diff --git a/logs_svd_gated/mode_0_param_gated_seed_41/config.json b/logs_svd_gated/mode_0_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..74a2e30bdca350468c2f65137d040d2f3fce9638 --- /dev/null +++ b/logs_svd_gated/mode_0_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 0, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "e7d65e9f-ed35-408d-8280-2e7bd292a672", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_0_param_gated_seed_41/training_log_e7d65e9f-ed35-408d-8280-2e7bd292a672.txt b/logs_svd_gated/mode_0_param_gated_seed_41/training_log_e7d65e9f-ed35-408d-8280-2e7bd292a672.txt new file mode 100644 index 0000000000000000000000000000000000000000..8ba11fcb0955c9cb78d46bfa780e1aa65c320c7d --- /dev/null +++ b/logs_svd_gated/mode_0_param_gated_seed_41/training_log_e7d65e9f-ed35-408d-8280-2e7bd292a672.txt @@ -0,0 +1,2926 @@ +[2025-08-22 08:12:52] [Rank 0] PRINT: --- Script Start: Fri Aug 22 08:12:52 2025 --- +[2025-08-22 08:12:52] [Rank 0] PRINT: --- Script Start: Fri Aug 22 08:12:52 2025 --- +[2025-08-22 08:12:52] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=0, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 08:12:52] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=0, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 08:12:52] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 08:12:52] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 08:12:52] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 08:12:52] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 08:12:52] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_0_param_gated_seed_41 +[2025-08-22 08:12:52] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_0_param_gated_seed_41 +[2025-08-22 08:12:52] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 08:12:52] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 08:12:52] [Rank 0] PRINT: Constructing model... +[2025-08-22 08:12:52] [Rank 0] PRINT: Constructing model... +[2025-08-22 08:12:54] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 08:12:54] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 08:12:54] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 08:12:54] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 08:12:54] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 08:12:54] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 08:12:54] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-08-22 08:12:54] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-08-22 08:12:54] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-08-22 08:12:54] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-08-22 08:12:54] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 08:12:54] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 08:12:54] [Rank 0] PRINT: Muon optimizer is active with 80 parameters. +[2025-08-22 08:12:54] [Rank 0] PRINT: Muon optimizer is active with 80 parameters. +[2025-08-22 08:12:54] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 08:12:54] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 08:12:54] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 08:12:54] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 08:12:54] [Rank 0] PRINT: Starting warmup... +[2025-08-22 08:12:54] [Rank 0] PRINT: Starting warmup... +[2025-08-22 08:23:42] [Rank 0] PRINT: Warmup complete. +[2025-08-22 08:23:42] [Rank 0] PRINT: Warmup complete. +[2025-08-22 08:23:42] [Rank 0] PRINT: Starting training... +[2025-08-22 08:23:42] [Rank 0] PRINT: Starting training... +[2025-08-22 08:23:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:23:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:31:01] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 08:31:01] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 08:31:03] [Rank 0] step:21/10000 train_time:1950ms step_avg:92.87ms +[2025-08-22 08:31:03] [Rank 0] step:21/10000 train_time:1950ms step_avg:92.87ms +[2025-08-22 08:31:05] [Rank 0] step:41/10000 train_time:3807ms step_avg:92.86ms +[2025-08-22 08:31:05] [Rank 0] step:41/10000 train_time:3807ms step_avg:92.86ms +[2025-08-22 08:31:07] [Rank 0] step:61/10000 train_time:5804ms step_avg:95.14ms +[2025-08-22 08:31:07] [Rank 0] step:61/10000 train_time:5804ms step_avg:95.14ms +[2025-08-22 08:31:09] [Rank 0] step:81/10000 train_time:7745ms step_avg:95.61ms +[2025-08-22 08:31:09] [Rank 0] step:81/10000 train_time:7745ms step_avg:95.61ms +[2025-08-22 08:31:11] [Rank 0] step:101/10000 train_time:9602ms step_avg:95.07ms +[2025-08-22 08:31:11] [Rank 0] step:101/10000 train_time:9602ms step_avg:95.07ms +[2025-08-22 08:31:13] [Rank 0] step:121/10000 train_time:11459ms step_avg:94.71ms +[2025-08-22 08:31:13] [Rank 0] step:121/10000 train_time:11459ms step_avg:94.71ms +[2025-08-22 08:31:15] [Rank 0] step:141/10000 train_time:13319ms step_avg:94.46ms +[2025-08-22 08:31:15] [Rank 0] step:141/10000 train_time:13319ms step_avg:94.46ms +[2025-08-22 08:31:17] [Rank 0] step:161/10000 train_time:15179ms step_avg:94.28ms +[2025-08-22 08:31:17] [Rank 0] step:161/10000 train_time:15179ms step_avg:94.28ms +[2025-08-22 08:31:18] [Rank 0] step:181/10000 train_time:17037ms step_avg:94.13ms +[2025-08-22 08:31:18] [Rank 0] step:181/10000 train_time:17037ms step_avg:94.13ms +[2025-08-22 08:31:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:31:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:31:34] [Rank 0] PRINT: step:200/10000 val_loss:5.2825 svd_entropy: attn_qk:H=0.9237,top10E=0.06,eRank=462.8,q75/q25=10.13 attn_vo:H=0.9191,top10E=0.06,eRank=448.9,q75/q25=10.87 mlp_w1:H=0.9745,top10E=0.03,eRank=648.4,q75/q25=2.74 mlp_w2:H=0.9729,top10E=0.04,eRank=641.3,q75/q25=2.83 vo_prod:H=0.8463,top10E=0.12,eRank=277.6,q75/q25=63.98 train_time:18856ms step_avg:94.28ms +[2025-08-22 08:31:34] [Rank 0] PRINT: step:200/10000 val_loss:5.2825 svd_entropy: attn_qk:H=0.9237,top10E=0.06,eRank=462.8,q75/q25=10.13 attn_vo:H=0.9191,top10E=0.06,eRank=448.9,q75/q25=10.87 mlp_w1:H=0.9745,top10E=0.03,eRank=648.4,q75/q25=2.74 mlp_w2:H=0.9729,top10E=0.04,eRank=641.3,q75/q25=2.83 vo_prod:H=0.8463,top10E=0.12,eRank=277.6,q75/q25=63.98 train_time:18856ms step_avg:94.28ms +[2025-08-22 08:31:34] [Rank 0] step:201/10000 train_time:18906ms step_avg:94.06ms +[2025-08-22 08:31:34] [Rank 0] step:201/10000 train_time:18906ms step_avg:94.06ms +[2025-08-22 08:31:36] [Rank 0] step:221/10000 train_time:20776ms step_avg:94.01ms +[2025-08-22 08:31:36] [Rank 0] step:221/10000 train_time:20776ms step_avg:94.01ms +[2025-08-22 08:31:38] [Rank 0] step:241/10000 train_time:22633ms step_avg:93.91ms +[2025-08-22 08:31:38] [Rank 0] step:241/10000 train_time:22633ms step_avg:93.91ms +[2025-08-22 08:31:40] [Rank 0] step:261/10000 train_time:24490ms step_avg:93.83ms +[2025-08-22 08:31:40] [Rank 0] step:261/10000 train_time:24490ms step_avg:93.83ms +[2025-08-22 08:31:41] [Rank 0] step:281/10000 train_time:26348ms step_avg:93.76ms +[2025-08-22 08:31:41] [Rank 0] step:281/10000 train_time:26348ms step_avg:93.76ms +[2025-08-22 08:31:43] [Rank 0] step:301/10000 train_time:28205ms step_avg:93.70ms +[2025-08-22 08:31:43] [Rank 0] step:301/10000 train_time:28205ms step_avg:93.70ms +[2025-08-22 08:31:45] [Rank 0] step:321/10000 train_time:30062ms step_avg:93.65ms +[2025-08-22 08:31:45] [Rank 0] step:321/10000 train_time:30062ms step_avg:93.65ms +[2025-08-22 08:31:47] [Rank 0] step:341/10000 train_time:31922ms step_avg:93.61ms +[2025-08-22 08:31:47] [Rank 0] step:341/10000 train_time:31922ms step_avg:93.61ms +[2025-08-22 08:31:49] [Rank 0] step:361/10000 train_time:33783ms step_avg:93.58ms +[2025-08-22 08:31:49] [Rank 0] step:361/10000 train_time:33783ms step_avg:93.58ms +[2025-08-22 08:31:51] [Rank 0] step:381/10000 train_time:35644ms step_avg:93.55ms +[2025-08-22 08:31:51] [Rank 0] step:381/10000 train_time:35644ms step_avg:93.55ms +[2025-08-22 08:31:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:31:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:32:06] [Rank 0] PRINT: step:400/10000 val_loss:4.9148 svd_entropy: attn_qk:H=0.9239,top10E=0.06,eRank=463.2,q75/q25=9.91 attn_vo:H=0.9216,top10E=0.06,eRank=456.2,q75/q25=10.29 mlp_w1:H=0.9745,top10E=0.04,eRank=648.6,q75/q25=2.71 mlp_w2:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.83 vo_prod:H=0.8505,top10E=0.12,eRank=285.6,q75/q25=56.72 train_time:37465ms step_avg:93.66ms +[2025-08-22 08:32:06] [Rank 0] PRINT: step:400/10000 val_loss:4.9148 svd_entropy: attn_qk:H=0.9239,top10E=0.06,eRank=463.2,q75/q25=9.91 attn_vo:H=0.9216,top10E=0.06,eRank=456.2,q75/q25=10.29 mlp_w1:H=0.9745,top10E=0.04,eRank=648.6,q75/q25=2.71 mlp_w2:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.83 vo_prod:H=0.8505,top10E=0.12,eRank=285.6,q75/q25=56.72 train_time:37465ms step_avg:93.66ms +[2025-08-22 08:32:06] [Rank 0] step:401/10000 train_time:37515ms step_avg:93.55ms +[2025-08-22 08:32:06] [Rank 0] step:401/10000 train_time:37515ms step_avg:93.55ms +[2025-08-22 08:32:08] [Rank 0] step:421/10000 train_time:39377ms step_avg:93.53ms +[2025-08-22 08:32:08] [Rank 0] step:421/10000 train_time:39377ms step_avg:93.53ms +[2025-08-22 08:32:10] [Rank 0] step:441/10000 train_time:41367ms step_avg:93.80ms +[2025-08-22 08:32:10] [Rank 0] step:441/10000 train_time:41367ms step_avg:93.80ms +[2025-08-22 08:32:12] [Rank 0] step:461/10000 train_time:43381ms step_avg:94.10ms +[2025-08-22 08:32:12] [Rank 0] step:461/10000 train_time:43381ms step_avg:94.10ms +[2025-08-22 08:32:14] [Rank 0] step:481/10000 train_time:45238ms step_avg:94.05ms +[2025-08-22 08:32:14] [Rank 0] step:481/10000 train_time:45238ms step_avg:94.05ms +[2025-08-22 08:32:16] [Rank 0] step:501/10000 train_time:47097ms step_avg:94.01ms +[2025-08-22 08:32:16] [Rank 0] step:501/10000 train_time:47097ms step_avg:94.01ms +[2025-08-22 08:32:18] [Rank 0] step:521/10000 train_time:48955ms step_avg:93.96ms +[2025-08-22 08:32:18] [Rank 0] step:521/10000 train_time:48955ms step_avg:93.96ms +[2025-08-22 08:32:20] [Rank 0] step:541/10000 train_time:50813ms step_avg:93.92ms +[2025-08-22 08:32:20] [Rank 0] step:541/10000 train_time:50813ms step_avg:93.92ms +[2025-08-22 08:32:22] [Rank 0] step:561/10000 train_time:52671ms step_avg:93.89ms +[2025-08-22 08:32:22] [Rank 0] step:561/10000 train_time:52671ms step_avg:93.89ms +[2025-08-22 08:32:23] [Rank 0] step:581/10000 train_time:54530ms step_avg:93.86ms +[2025-08-22 08:32:23] [Rank 0] step:581/10000 train_time:54530ms step_avg:93.86ms +[2025-08-22 08:32:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:32:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:32:39] [Rank 0] PRINT: step:600/10000 val_loss:4.7232 svd_entropy: attn_qk:H=0.9212,top10E=0.06,eRank=455.0,q75/q25=10.32 attn_vo:H=0.9195,top10E=0.06,eRank=450.2,q75/q25=10.69 mlp_w1:H=0.9741,top10E=0.04,eRank=646.6,q75/q25=2.72 mlp_w2:H=0.9712,top10E=0.04,eRank=634.2,q75/q25=2.84 vo_prod:H=0.8468,top10E=0.12,eRank=279.1,q75/q25=60.79 train_time:56350ms step_avg:93.92ms +[2025-08-22 08:32:39] [Rank 0] PRINT: step:600/10000 val_loss:4.7232 svd_entropy: attn_qk:H=0.9212,top10E=0.06,eRank=455.0,q75/q25=10.32 attn_vo:H=0.9195,top10E=0.06,eRank=450.2,q75/q25=10.69 mlp_w1:H=0.9741,top10E=0.04,eRank=646.6,q75/q25=2.72 mlp_w2:H=0.9712,top10E=0.04,eRank=634.2,q75/q25=2.84 vo_prod:H=0.8468,top10E=0.12,eRank=279.1,q75/q25=60.79 train_time:56350ms step_avg:93.92ms +[2025-08-22 08:32:39] [Rank 0] step:601/10000 train_time:56400ms step_avg:93.84ms +[2025-08-22 08:32:39] [Rank 0] step:601/10000 train_time:56400ms step_avg:93.84ms +[2025-08-22 08:32:41] [Rank 0] step:621/10000 train_time:58277ms step_avg:93.84ms +[2025-08-22 08:32:41] [Rank 0] step:621/10000 train_time:58277ms step_avg:93.84ms +[2025-08-22 08:32:43] [Rank 0] step:641/10000 train_time:60133ms step_avg:93.81ms +[2025-08-22 08:32:43] [Rank 0] step:641/10000 train_time:60133ms step_avg:93.81ms +[2025-08-22 08:32:45] [Rank 0] step:661/10000 train_time:61990ms step_avg:93.78ms +[2025-08-22 08:32:45] [Rank 0] step:661/10000 train_time:61990ms step_avg:93.78ms +[2025-08-22 08:32:46] [Rank 0] step:681/10000 train_time:63848ms step_avg:93.76ms +[2025-08-22 08:32:46] [Rank 0] step:681/10000 train_time:63848ms step_avg:93.76ms +[2025-08-22 08:32:48] [Rank 0] step:701/10000 train_time:65705ms step_avg:93.73ms +[2025-08-22 08:32:48] [Rank 0] step:701/10000 train_time:65705ms step_avg:93.73ms +[2025-08-22 08:32:50] [Rank 0] step:721/10000 train_time:67564ms step_avg:93.71ms +[2025-08-22 08:32:50] [Rank 0] step:721/10000 train_time:67564ms step_avg:93.71ms +[2025-08-22 08:32:52] [Rank 0] step:741/10000 train_time:69423ms step_avg:93.69ms +[2025-08-22 08:32:52] [Rank 0] step:741/10000 train_time:69423ms step_avg:93.69ms +[2025-08-22 08:32:54] [Rank 0] step:761/10000 train_time:71296ms step_avg:93.69ms +[2025-08-22 08:32:54] [Rank 0] step:761/10000 train_time:71296ms step_avg:93.69ms +[2025-08-22 08:32:56] [Rank 0] step:781/10000 train_time:73170ms step_avg:93.69ms +[2025-08-22 08:32:56] [Rank 0] step:781/10000 train_time:73170ms step_avg:93.69ms +[2025-08-22 08:32:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:32:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:33:11] [Rank 0] PRINT: step:800/10000 val_loss:4.4917 svd_entropy: attn_qk:H=0.9195,top10E=0.06,eRank=450.0,q75/q25=10.50 attn_vo:H=0.9183,top10E=0.06,eRank=446.7,q75/q25=10.95 mlp_w1:H=0.9738,top10E=0.04,eRank=645.2,q75/q25=2.73 mlp_w2:H=0.9704,top10E=0.04,eRank=631.2,q75/q25=2.85 vo_prod:H=0.8451,top10E=0.12,eRank=276.0,q75/q25=63.05 train_time:75002ms step_avg:93.75ms +[2025-08-22 08:33:11] [Rank 0] PRINT: step:800/10000 val_loss:4.4917 svd_entropy: attn_qk:H=0.9195,top10E=0.06,eRank=450.0,q75/q25=10.50 attn_vo:H=0.9183,top10E=0.06,eRank=446.7,q75/q25=10.95 mlp_w1:H=0.9738,top10E=0.04,eRank=645.2,q75/q25=2.73 mlp_w2:H=0.9704,top10E=0.04,eRank=631.2,q75/q25=2.85 vo_prod:H=0.8451,top10E=0.12,eRank=276.0,q75/q25=63.05 train_time:75002ms step_avg:93.75ms +[2025-08-22 08:33:11] [Rank 0] step:801/10000 train_time:75053ms step_avg:93.70ms +[2025-08-22 08:33:11] [Rank 0] step:801/10000 train_time:75053ms step_avg:93.70ms +[2025-08-22 08:33:13] [Rank 0] step:821/10000 train_time:77059ms step_avg:93.86ms +[2025-08-22 08:33:13] [Rank 0] step:821/10000 train_time:77059ms step_avg:93.86ms +[2025-08-22 08:33:15] [Rank 0] step:841/10000 train_time:79057ms step_avg:94.00ms +[2025-08-22 08:33:15] [Rank 0] step:841/10000 train_time:79057ms step_avg:94.00ms +[2025-08-22 08:33:17] [Rank 0] step:861/10000 train_time:80931ms step_avg:94.00ms +[2025-08-22 08:33:17] [Rank 0] step:861/10000 train_time:80931ms step_avg:94.00ms +[2025-08-22 08:33:19] [Rank 0] step:881/10000 train_time:82807ms step_avg:93.99ms +[2025-08-22 08:33:19] [Rank 0] step:881/10000 train_time:82807ms step_avg:93.99ms +[2025-08-22 08:33:21] [Rank 0] step:901/10000 train_time:84683ms step_avg:93.99ms +[2025-08-22 08:33:21] [Rank 0] step:901/10000 train_time:84683ms step_avg:93.99ms +[2025-08-22 08:33:23] [Rank 0] step:921/10000 train_time:86560ms step_avg:93.98ms +[2025-08-22 08:33:23] [Rank 0] step:921/10000 train_time:86560ms step_avg:93.98ms +[2025-08-22 08:33:25] [Rank 0] step:941/10000 train_time:88445ms step_avg:93.99ms +[2025-08-22 08:33:25] [Rank 0] step:941/10000 train_time:88445ms step_avg:93.99ms +[2025-08-22 08:33:27] [Rank 0] step:961/10000 train_time:90325ms step_avg:93.99ms +[2025-08-22 08:33:27] [Rank 0] step:961/10000 train_time:90325ms step_avg:93.99ms +[2025-08-22 08:33:29] [Rank 0] step:981/10000 train_time:92205ms step_avg:93.99ms +[2025-08-22 08:33:29] [Rank 0] step:981/10000 train_time:92205ms step_avg:93.99ms +[2025-08-22 08:33:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:33:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:33:44] [Rank 0] PRINT: step:1000/10000 val_loss:4.3583 svd_entropy: attn_qk:H=0.9182,top10E=0.06,eRank=446.2,q75/q25=10.66 attn_vo:H=0.9175,top10E=0.06,eRank=444.1,q75/q25=11.07 mlp_w1:H=0.9736,top10E=0.04,eRank=644.4,q75/q25=2.73 mlp_w2:H=0.9700,top10E=0.05,eRank=629.2,q75/q25=2.84 vo_prod:H=0.8438,top10E=0.12,eRank=273.8,q75/q25=65.04 train_time:94045ms step_avg:94.04ms +[2025-08-22 08:33:44] [Rank 0] PRINT: step:1000/10000 val_loss:4.3583 svd_entropy: attn_qk:H=0.9182,top10E=0.06,eRank=446.2,q75/q25=10.66 attn_vo:H=0.9175,top10E=0.06,eRank=444.1,q75/q25=11.07 mlp_w1:H=0.9736,top10E=0.04,eRank=644.4,q75/q25=2.73 mlp_w2:H=0.9700,top10E=0.05,eRank=629.2,q75/q25=2.84 vo_prod:H=0.8438,top10E=0.12,eRank=273.8,q75/q25=65.04 train_time:94045ms step_avg:94.04ms +[2025-08-22 08:33:44] [Rank 0] step:1001/10000 train_time:94091ms step_avg:94.00ms +[2025-08-22 08:33:44] [Rank 0] step:1001/10000 train_time:94091ms step_avg:94.00ms +[2025-08-22 08:33:46] [Rank 0] step:1021/10000 train_time:95964ms step_avg:93.99ms +[2025-08-22 08:33:46] [Rank 0] step:1021/10000 train_time:95964ms step_avg:93.99ms +[2025-08-22 08:33:48] [Rank 0] step:1041/10000 train_time:97837ms step_avg:93.98ms +[2025-08-22 08:33:48] [Rank 0] step:1041/10000 train_time:97837ms step_avg:93.98ms +[2025-08-22 08:33:50] [Rank 0] step:1061/10000 train_time:99708ms step_avg:93.98ms +[2025-08-22 08:33:50] [Rank 0] step:1061/10000 train_time:99708ms step_avg:93.98ms +[2025-08-22 08:33:52] [Rank 0] step:1081/10000 train_time:101590ms step_avg:93.98ms +[2025-08-22 08:33:52] [Rank 0] step:1081/10000 train_time:101590ms step_avg:93.98ms +[2025-08-22 08:33:54] [Rank 0] step:1101/10000 train_time:103466ms step_avg:93.97ms +[2025-08-22 08:33:54] [Rank 0] step:1101/10000 train_time:103466ms step_avg:93.97ms +[2025-08-22 08:33:55] [Rank 0] step:1121/10000 train_time:105341ms step_avg:93.97ms +[2025-08-22 08:33:55] [Rank 0] step:1121/10000 train_time:105341ms step_avg:93.97ms +[2025-08-22 08:33:57] [Rank 0] step:1141/10000 train_time:107215ms step_avg:93.97ms +[2025-08-22 08:33:57] [Rank 0] step:1141/10000 train_time:107215ms step_avg:93.97ms +[2025-08-22 08:33:59] [Rank 0] step:1161/10000 train_time:109097ms step_avg:93.97ms +[2025-08-22 08:33:59] [Rank 0] step:1161/10000 train_time:109097ms step_avg:93.97ms +[2025-08-22 08:34:01] [Rank 0] step:1181/10000 train_time:110971ms step_avg:93.96ms +[2025-08-22 08:34:01] [Rank 0] step:1181/10000 train_time:110971ms step_avg:93.96ms +[2025-08-22 08:34:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:34:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:34:17] [Rank 0] PRINT: step:1200/10000 val_loss:4.2632 svd_entropy: attn_qk:H=0.9174,top10E=0.06,eRank=443.7,q75/q25=10.76 attn_vo:H=0.9170,top10E=0.06,eRank=442.8,q75/q25=11.15 mlp_w1:H=0.9734,top10E=0.04,eRank=643.6,q75/q25=2.73 mlp_w2:H=0.9696,top10E=0.05,eRank=627.5,q75/q25=2.84 vo_prod:H=0.8433,top10E=0.12,eRank=273.0,q75/q25=65.69 train_time:112806ms step_avg:94.00ms +[2025-08-22 08:34:17] [Rank 0] PRINT: step:1200/10000 val_loss:4.2632 svd_entropy: attn_qk:H=0.9174,top10E=0.06,eRank=443.7,q75/q25=10.76 attn_vo:H=0.9170,top10E=0.06,eRank=442.8,q75/q25=11.15 mlp_w1:H=0.9734,top10E=0.04,eRank=643.6,q75/q25=2.73 mlp_w2:H=0.9696,top10E=0.05,eRank=627.5,q75/q25=2.84 vo_prod:H=0.8433,top10E=0.12,eRank=273.0,q75/q25=65.69 train_time:112806ms step_avg:94.00ms +[2025-08-22 08:34:17] [Rank 0] step:1201/10000 train_time:112856ms step_avg:93.97ms +[2025-08-22 08:34:17] [Rank 0] step:1201/10000 train_time:112856ms step_avg:93.97ms +[2025-08-22 08:34:19] [Rank 0] step:1221/10000 train_time:114775ms step_avg:94.00ms +[2025-08-22 08:34:19] [Rank 0] step:1221/10000 train_time:114775ms step_avg:94.00ms +[2025-08-22 08:34:21] [Rank 0] step:1241/10000 train_time:116644ms step_avg:93.99ms +[2025-08-22 08:34:21] [Rank 0] step:1241/10000 train_time:116644ms step_avg:93.99ms +[2025-08-22 08:34:23] [Rank 0] step:1261/10000 train_time:118516ms step_avg:93.99ms +[2025-08-22 08:34:23] [Rank 0] step:1261/10000 train_time:118516ms step_avg:93.99ms +[2025-08-22 08:34:25] [Rank 0] step:1281/10000 train_time:120385ms step_avg:93.98ms +[2025-08-22 08:34:25] [Rank 0] step:1281/10000 train_time:120385ms step_avg:93.98ms +[2025-08-22 08:34:27] [Rank 0] step:1301/10000 train_time:122260ms step_avg:93.97ms +[2025-08-22 08:34:27] [Rank 0] step:1301/10000 train_time:122260ms step_avg:93.97ms +[2025-08-22 08:34:29] [Rank 0] step:1321/10000 train_time:124134ms step_avg:93.97ms +[2025-08-22 08:34:29] [Rank 0] step:1321/10000 train_time:124134ms step_avg:93.97ms +[2025-08-22 08:34:30] [Rank 0] step:1341/10000 train_time:126009ms step_avg:93.97ms +[2025-08-22 08:34:30] [Rank 0] step:1341/10000 train_time:126009ms step_avg:93.97ms +[2025-08-22 08:34:32] [Rank 0] step:1361/10000 train_time:127883ms step_avg:93.96ms +[2025-08-22 08:34:32] [Rank 0] step:1361/10000 train_time:127883ms step_avg:93.96ms +[2025-08-22 08:34:34] [Rank 0] step:1381/10000 train_time:129764ms step_avg:93.96ms +[2025-08-22 08:34:34] [Rank 0] step:1381/10000 train_time:129764ms step_avg:93.96ms +[2025-08-22 08:34:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:34:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:34:50] [Rank 0] PRINT: step:1400/10000 val_loss:4.2119 svd_entropy: attn_qk:H=0.9167,top10E=0.07,eRank=441.8,q75/q25=10.87 attn_vo:H=0.9168,top10E=0.06,eRank=442.0,q75/q25=11.21 mlp_w1:H=0.9733,top10E=0.04,eRank=643.1,q75/q25=2.73 mlp_w2:H=0.9693,top10E=0.05,eRank=626.3,q75/q25=2.85 vo_prod:H=0.8431,top10E=0.12,eRank=272.7,q75/q25=66.69 train_time:131598ms step_avg:94.00ms +[2025-08-22 08:34:50] [Rank 0] PRINT: step:1400/10000 val_loss:4.2119 svd_entropy: attn_qk:H=0.9167,top10E=0.07,eRank=441.8,q75/q25=10.87 attn_vo:H=0.9168,top10E=0.06,eRank=442.0,q75/q25=11.21 mlp_w1:H=0.9733,top10E=0.04,eRank=643.1,q75/q25=2.73 mlp_w2:H=0.9693,top10E=0.05,eRank=626.3,q75/q25=2.85 vo_prod:H=0.8431,top10E=0.12,eRank=272.7,q75/q25=66.69 train_time:131598ms step_avg:94.00ms +[2025-08-22 08:34:50] [Rank 0] step:1401/10000 train_time:131648ms step_avg:93.97ms +[2025-08-22 08:34:50] [Rank 0] step:1401/10000 train_time:131648ms step_avg:93.97ms +[2025-08-22 08:34:52] [Rank 0] step:1421/10000 train_time:133528ms step_avg:93.97ms +[2025-08-22 08:34:52] [Rank 0] step:1421/10000 train_time:133528ms step_avg:93.97ms +[2025-08-22 08:34:54] [Rank 0] step:1441/10000 train_time:135399ms step_avg:93.96ms +[2025-08-22 08:34:54] [Rank 0] step:1441/10000 train_time:135399ms step_avg:93.96ms +[2025-08-22 08:34:56] [Rank 0] step:1461/10000 train_time:137270ms step_avg:93.96ms +[2025-08-22 08:34:56] [Rank 0] step:1461/10000 train_time:137270ms step_avg:93.96ms +[2025-08-22 08:34:58] [Rank 0] step:1481/10000 train_time:139142ms step_avg:93.95ms +[2025-08-22 08:34:58] [Rank 0] step:1481/10000 train_time:139142ms step_avg:93.95ms +[2025-08-22 08:34:59] [Rank 0] step:1501/10000 train_time:141023ms step_avg:93.95ms +[2025-08-22 08:34:59] [Rank 0] step:1501/10000 train_time:141023ms step_avg:93.95ms +[2025-08-22 08:35:01] [Rank 0] step:1521/10000 train_time:142907ms step_avg:93.96ms +[2025-08-22 08:35:01] [Rank 0] step:1521/10000 train_time:142907ms step_avg:93.96ms +[2025-08-22 08:35:03] [Rank 0] step:1541/10000 train_time:144793ms step_avg:93.96ms +[2025-08-22 08:35:03] [Rank 0] step:1541/10000 train_time:144793ms step_avg:93.96ms +[2025-08-22 08:35:05] [Rank 0] step:1561/10000 train_time:146681ms step_avg:93.97ms +[2025-08-22 08:35:05] [Rank 0] step:1561/10000 train_time:146681ms step_avg:93.97ms +[2025-08-22 08:35:07] [Rank 0] step:1581/10000 train_time:148569ms step_avg:93.97ms +[2025-08-22 08:35:07] [Rank 0] step:1581/10000 train_time:148569ms step_avg:93.97ms +[2025-08-22 08:35:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:35:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:35:23] [Rank 0] PRINT: step:1600/10000 val_loss:4.1202 svd_entropy: attn_qk:H=0.9162,top10E=0.07,eRank=440.3,q75/q25=10.90 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.29 mlp_w1:H=0.9732,top10E=0.04,eRank=642.8,q75/q25=2.73 mlp_w2:H=0.9690,top10E=0.05,eRank=625.3,q75/q25=2.85 vo_prod:H=0.8430,top10E=0.12,eRank=272.5,q75/q25=66.57 train_time:150416ms step_avg:94.01ms +[2025-08-22 08:35:23] [Rank 0] PRINT: step:1600/10000 val_loss:4.1202 svd_entropy: attn_qk:H=0.9162,top10E=0.07,eRank=440.3,q75/q25=10.90 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.29 mlp_w1:H=0.9732,top10E=0.04,eRank=642.8,q75/q25=2.73 mlp_w2:H=0.9690,top10E=0.05,eRank=625.3,q75/q25=2.85 vo_prod:H=0.8430,top10E=0.12,eRank=272.5,q75/q25=66.57 train_time:150416ms step_avg:94.01ms +[2025-08-22 08:35:23] [Rank 0] step:1601/10000 train_time:150466ms step_avg:93.98ms +[2025-08-22 08:35:23] [Rank 0] step:1601/10000 train_time:150466ms step_avg:93.98ms +[2025-08-22 08:35:25] [Rank 0] step:1621/10000 train_time:152374ms step_avg:94.00ms +[2025-08-22 08:35:25] [Rank 0] step:1621/10000 train_time:152374ms step_avg:94.00ms +[2025-08-22 08:35:27] [Rank 0] step:1641/10000 train_time:154259ms step_avg:94.00ms +[2025-08-22 08:35:27] [Rank 0] step:1641/10000 train_time:154259ms step_avg:94.00ms +[2025-08-22 08:35:29] [Rank 0] step:1661/10000 train_time:156145ms step_avg:94.01ms +[2025-08-22 08:35:29] [Rank 0] step:1661/10000 train_time:156145ms step_avg:94.01ms +[2025-08-22 08:35:30] [Rank 0] step:1681/10000 train_time:158033ms step_avg:94.01ms +[2025-08-22 08:35:30] [Rank 0] step:1681/10000 train_time:158033ms step_avg:94.01ms +[2025-08-22 08:35:32] [Rank 0] step:1701/10000 train_time:159920ms step_avg:94.02ms +[2025-08-22 08:35:32] [Rank 0] step:1701/10000 train_time:159920ms step_avg:94.02ms +[2025-08-22 08:35:34] [Rank 0] step:1721/10000 train_time:161812ms step_avg:94.02ms +[2025-08-22 08:35:34] [Rank 0] step:1721/10000 train_time:161812ms step_avg:94.02ms +[2025-08-22 08:35:36] [Rank 0] step:1741/10000 train_time:163701ms step_avg:94.03ms +[2025-08-22 08:35:36] [Rank 0] step:1741/10000 train_time:163701ms step_avg:94.03ms +[2025-08-22 08:35:38] [Rank 0] step:1761/10000 train_time:165594ms step_avg:94.03ms +[2025-08-22 08:35:38] [Rank 0] step:1761/10000 train_time:165594ms step_avg:94.03ms +[2025-08-22 08:35:40] [Rank 0] step:1781/10000 train_time:167487ms step_avg:94.04ms +[2025-08-22 08:35:40] [Rank 0] step:1781/10000 train_time:167487ms step_avg:94.04ms +[2025-08-22 08:35:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:35:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:35:55] [Rank 0] PRINT: step:1800/10000 val_loss:4.0659 svd_entropy: attn_qk:H=0.9157,top10E=0.07,eRank=439.0,q75/q25=10.90 attn_vo:H=0.9164,top10E=0.06,eRank=441.0,q75/q25=11.35 mlp_w1:H=0.9732,top10E=0.04,eRank=642.6,q75/q25=2.73 mlp_w2:H=0.9688,top10E=0.05,eRank=624.5,q75/q25=2.84 vo_prod:H=0.8430,top10E=0.12,eRank=272.5,q75/q25=66.95 train_time:169338ms step_avg:94.08ms +[2025-08-22 08:35:55] [Rank 0] PRINT: step:1800/10000 val_loss:4.0659 svd_entropy: attn_qk:H=0.9157,top10E=0.07,eRank=439.0,q75/q25=10.90 attn_vo:H=0.9164,top10E=0.06,eRank=441.0,q75/q25=11.35 mlp_w1:H=0.9732,top10E=0.04,eRank=642.6,q75/q25=2.73 mlp_w2:H=0.9688,top10E=0.05,eRank=624.5,q75/q25=2.84 vo_prod:H=0.8430,top10E=0.12,eRank=272.5,q75/q25=66.95 train_time:169338ms step_avg:94.08ms +[2025-08-22 08:35:56] [Rank 0] step:1801/10000 train_time:169389ms step_avg:94.05ms +[2025-08-22 08:35:56] [Rank 0] step:1801/10000 train_time:169389ms step_avg:94.05ms +[2025-08-22 08:35:57] [Rank 0] step:1821/10000 train_time:171293ms step_avg:94.07ms +[2025-08-22 08:35:57] [Rank 0] step:1821/10000 train_time:171293ms step_avg:94.07ms +[2025-08-22 08:35:59] [Rank 0] step:1841/10000 train_time:173177ms step_avg:94.07ms +[2025-08-22 08:35:59] [Rank 0] step:1841/10000 train_time:173177ms step_avg:94.07ms +[2025-08-22 08:36:01] [Rank 0] step:1861/10000 train_time:175062ms step_avg:94.07ms +[2025-08-22 08:36:01] [Rank 0] step:1861/10000 train_time:175062ms step_avg:94.07ms +[2025-08-22 08:36:03] [Rank 0] step:1881/10000 train_time:176948ms step_avg:94.07ms +[2025-08-22 08:36:03] [Rank 0] step:1881/10000 train_time:176948ms step_avg:94.07ms +[2025-08-22 08:36:05] [Rank 0] step:1901/10000 train_time:178835ms step_avg:94.07ms +[2025-08-22 08:36:05] [Rank 0] step:1901/10000 train_time:178835ms step_avg:94.07ms +[2025-08-22 08:36:07] [Rank 0] step:1921/10000 train_time:180720ms step_avg:94.08ms +[2025-08-22 08:36:07] [Rank 0] step:1921/10000 train_time:180720ms step_avg:94.08ms +[2025-08-22 08:36:09] [Rank 0] step:1941/10000 train_time:182608ms step_avg:94.08ms +[2025-08-22 08:36:09] [Rank 0] step:1941/10000 train_time:182608ms step_avg:94.08ms +[2025-08-22 08:36:11] [Rank 0] step:1961/10000 train_time:184495ms step_avg:94.08ms +[2025-08-22 08:36:11] [Rank 0] step:1961/10000 train_time:184495ms step_avg:94.08ms +[2025-08-22 08:36:13] [Rank 0] step:1981/10000 train_time:186382ms step_avg:94.08ms +[2025-08-22 08:36:13] [Rank 0] step:1981/10000 train_time:186382ms step_avg:94.08ms +[2025-08-22 08:36:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:36:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:36:28] [Rank 0] PRINT: step:2000/10000 val_loss:4.0371 svd_entropy: attn_qk:H=0.9153,top10E=0.07,eRank=437.8,q75/q25=11.00 attn_vo:H=0.9164,top10E=0.06,eRank=440.9,q75/q25=11.36 mlp_w1:H=0.9731,top10E=0.04,eRank=642.4,q75/q25=2.73 mlp_w2:H=0.9688,top10E=0.05,eRank=624.2,q75/q25=2.84 vo_prod:H=0.8432,top10E=0.12,eRank=272.8,q75/q25=67.25 train_time:188228ms step_avg:94.11ms +[2025-08-22 08:36:28] [Rank 0] PRINT: step:2000/10000 val_loss:4.0371 svd_entropy: attn_qk:H=0.9153,top10E=0.07,eRank=437.8,q75/q25=11.00 attn_vo:H=0.9164,top10E=0.06,eRank=440.9,q75/q25=11.36 mlp_w1:H=0.9731,top10E=0.04,eRank=642.4,q75/q25=2.73 mlp_w2:H=0.9688,top10E=0.05,eRank=624.2,q75/q25=2.84 vo_prod:H=0.8432,top10E=0.12,eRank=272.8,q75/q25=67.25 train_time:188228ms step_avg:94.11ms +[2025-08-22 08:36:28] [Rank 0] step:2001/10000 train_time:188280ms step_avg:94.09ms +[2025-08-22 08:36:28] [Rank 0] step:2001/10000 train_time:188280ms step_avg:94.09ms +[2025-08-22 08:36:30] [Rank 0] step:2021/10000 train_time:190162ms step_avg:94.09ms +[2025-08-22 08:36:30] [Rank 0] step:2021/10000 train_time:190162ms step_avg:94.09ms +[2025-08-22 08:36:32] [Rank 0] step:2041/10000 train_time:192331ms step_avg:94.23ms +[2025-08-22 08:36:32] [Rank 0] step:2041/10000 train_time:192331ms step_avg:94.23ms +[2025-08-22 08:36:34] [Rank 0] step:2061/10000 train_time:194211ms step_avg:94.23ms +[2025-08-22 08:36:34] [Rank 0] step:2061/10000 train_time:194211ms step_avg:94.23ms +[2025-08-22 08:36:36] [Rank 0] step:2081/10000 train_time:196092ms step_avg:94.23ms +[2025-08-22 08:36:36] [Rank 0] step:2081/10000 train_time:196092ms step_avg:94.23ms +[2025-08-22 08:36:38] [Rank 0] step:2101/10000 train_time:197975ms step_avg:94.23ms +[2025-08-22 08:36:38] [Rank 0] step:2101/10000 train_time:197975ms step_avg:94.23ms +[2025-08-22 08:36:40] [Rank 0] step:2121/10000 train_time:199860ms step_avg:94.23ms +[2025-08-22 08:36:40] [Rank 0] step:2121/10000 train_time:199860ms step_avg:94.23ms +[2025-08-22 08:36:42] [Rank 0] step:2141/10000 train_time:201746ms step_avg:94.23ms +[2025-08-22 08:36:42] [Rank 0] step:2141/10000 train_time:201746ms step_avg:94.23ms +[2025-08-22 08:36:44] [Rank 0] step:2161/10000 train_time:203641ms step_avg:94.23ms +[2025-08-22 08:36:44] [Rank 0] step:2161/10000 train_time:203641ms step_avg:94.23ms +[2025-08-22 08:36:46] [Rank 0] step:2181/10000 train_time:205530ms step_avg:94.24ms +[2025-08-22 08:36:46] [Rank 0] step:2181/10000 train_time:205530ms step_avg:94.24ms +[2025-08-22 08:36:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:36:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:37:01] [Rank 0] PRINT: step:2200/10000 val_loss:3.9963 svd_entropy: attn_qk:H=0.9150,top10E=0.07,eRank=436.9,q75/q25=10.97 attn_vo:H=0.9163,top10E=0.06,eRank=440.7,q75/q25=11.42 mlp_w1:H=0.9731,top10E=0.04,eRank=642.3,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=623.8,q75/q25=2.84 vo_prod:H=0.8433,top10E=0.12,eRank=273.1,q75/q25=66.87 train_time:207376ms step_avg:94.26ms +[2025-08-22 08:37:01] [Rank 0] PRINT: step:2200/10000 val_loss:3.9963 svd_entropy: attn_qk:H=0.9150,top10E=0.07,eRank=436.9,q75/q25=10.97 attn_vo:H=0.9163,top10E=0.06,eRank=440.7,q75/q25=11.42 mlp_w1:H=0.9731,top10E=0.04,eRank=642.3,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=623.8,q75/q25=2.84 vo_prod:H=0.8433,top10E=0.12,eRank=273.1,q75/q25=66.87 train_time:207376ms step_avg:94.26ms +[2025-08-22 08:37:01] [Rank 0] step:2201/10000 train_time:207428ms step_avg:94.24ms +[2025-08-22 08:37:01] [Rank 0] step:2201/10000 train_time:207428ms step_avg:94.24ms +[2025-08-22 08:37:03] [Rank 0] step:2221/10000 train_time:209335ms step_avg:94.25ms +[2025-08-22 08:37:03] [Rank 0] step:2221/10000 train_time:209335ms step_avg:94.25ms +[2025-08-22 08:37:05] [Rank 0] step:2241/10000 train_time:211253ms step_avg:94.27ms +[2025-08-22 08:37:05] [Rank 0] step:2241/10000 train_time:211253ms step_avg:94.27ms +[2025-08-22 08:37:07] [Rank 0] step:2261/10000 train_time:213181ms step_avg:94.29ms +[2025-08-22 08:37:07] [Rank 0] step:2261/10000 train_time:213181ms step_avg:94.29ms +[2025-08-22 08:37:09] [Rank 0] step:2281/10000 train_time:215110ms step_avg:94.31ms +[2025-08-22 08:37:09] [Rank 0] step:2281/10000 train_time:215110ms step_avg:94.31ms +[2025-08-22 08:37:11] [Rank 0] step:2301/10000 train_time:217039ms step_avg:94.32ms +[2025-08-22 08:37:11] [Rank 0] step:2301/10000 train_time:217039ms step_avg:94.32ms +[2025-08-22 08:37:13] [Rank 0] step:2321/10000 train_time:218967ms step_avg:94.34ms +[2025-08-22 08:37:13] [Rank 0] step:2321/10000 train_time:218967ms step_avg:94.34ms +[2025-08-22 08:37:15] [Rank 0] step:2341/10000 train_time:220897ms step_avg:94.36ms +[2025-08-22 08:37:15] [Rank 0] step:2341/10000 train_time:220897ms step_avg:94.36ms +[2025-08-22 08:37:17] [Rank 0] step:2361/10000 train_time:222827ms step_avg:94.38ms +[2025-08-22 08:37:17] [Rank 0] step:2361/10000 train_time:222827ms step_avg:94.38ms +[2025-08-22 08:37:19] [Rank 0] step:2381/10000 train_time:224758ms step_avg:94.40ms +[2025-08-22 08:37:19] [Rank 0] step:2381/10000 train_time:224758ms step_avg:94.40ms +[2025-08-22 08:37:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:37:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:37:35] [Rank 0] PRINT: step:2400/10000 val_loss:3.9300 svd_entropy: attn_qk:H=0.9146,top10E=0.07,eRank=435.8,q75/q25=11.06 attn_vo:H=0.9163,top10E=0.06,eRank=440.7,q75/q25=11.43 mlp_w1:H=0.9731,top10E=0.04,eRank=642.4,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.5,q75/q25=2.83 vo_prod:H=0.8433,top10E=0.12,eRank=273.1,q75/q25=67.03 train_time:226647ms step_avg:94.44ms +[2025-08-22 08:37:35] [Rank 0] PRINT: step:2400/10000 val_loss:3.9300 svd_entropy: attn_qk:H=0.9146,top10E=0.07,eRank=435.8,q75/q25=11.06 attn_vo:H=0.9163,top10E=0.06,eRank=440.7,q75/q25=11.43 mlp_w1:H=0.9731,top10E=0.04,eRank=642.4,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.5,q75/q25=2.83 vo_prod:H=0.8433,top10E=0.12,eRank=273.1,q75/q25=67.03 train_time:226647ms step_avg:94.44ms +[2025-08-22 08:37:35] [Rank 0] step:2401/10000 train_time:226698ms step_avg:94.42ms +[2025-08-22 08:37:35] [Rank 0] step:2401/10000 train_time:226698ms step_avg:94.42ms +[2025-08-22 08:37:37] [Rank 0] step:2421/10000 train_time:228636ms step_avg:94.44ms +[2025-08-22 08:37:37] [Rank 0] step:2421/10000 train_time:228636ms step_avg:94.44ms +[2025-08-22 08:37:39] [Rank 0] step:2441/10000 train_time:230563ms step_avg:94.45ms +[2025-08-22 08:37:39] [Rank 0] step:2441/10000 train_time:230563ms step_avg:94.45ms +[2025-08-22 08:37:41] [Rank 0] step:2461/10000 train_time:232496ms step_avg:94.47ms +[2025-08-22 08:37:41] [Rank 0] step:2461/10000 train_time:232496ms step_avg:94.47ms +[2025-08-22 08:37:42] [Rank 0] step:2481/10000 train_time:234427ms step_avg:94.49ms +[2025-08-22 08:37:42] [Rank 0] step:2481/10000 train_time:234427ms step_avg:94.49ms +[2025-08-22 08:37:44] [Rank 0] step:2501/10000 train_time:236358ms step_avg:94.51ms +[2025-08-22 08:37:44] [Rank 0] step:2501/10000 train_time:236358ms step_avg:94.51ms +[2025-08-22 08:37:46] [Rank 0] step:2521/10000 train_time:238291ms step_avg:94.52ms +[2025-08-22 08:37:46] [Rank 0] step:2521/10000 train_time:238291ms step_avg:94.52ms +[2025-08-22 08:37:48] [Rank 0] step:2541/10000 train_time:240225ms step_avg:94.54ms +[2025-08-22 08:37:48] [Rank 0] step:2541/10000 train_time:240225ms step_avg:94.54ms +[2025-08-22 08:37:50] [Rank 0] step:2561/10000 train_time:242160ms step_avg:94.56ms +[2025-08-22 08:37:50] [Rank 0] step:2561/10000 train_time:242160ms step_avg:94.56ms +[2025-08-22 08:37:52] [Rank 0] step:2581/10000 train_time:244096ms step_avg:94.57ms +[2025-08-22 08:37:52] [Rank 0] step:2581/10000 train_time:244096ms step_avg:94.57ms +[2025-08-22 08:37:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:37:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:38:08] [Rank 0] PRINT: step:2600/10000 val_loss:3.9064 svd_entropy: attn_qk:H=0.9143,top10E=0.07,eRank=434.9,q75/q25=11.08 attn_vo:H=0.9163,top10E=0.06,eRank=440.8,q75/q25=11.43 mlp_w1:H=0.9731,top10E=0.04,eRank=642.3,q75/q25=2.72 mlp_w2:H=0.9685,top10E=0.05,eRank=623.3,q75/q25=2.83 vo_prod:H=0.8436,top10E=0.12,eRank=273.7,q75/q25=66.78 train_time:245990ms step_avg:94.61ms +[2025-08-22 08:38:08] [Rank 0] PRINT: step:2600/10000 val_loss:3.9064 svd_entropy: attn_qk:H=0.9143,top10E=0.07,eRank=434.9,q75/q25=11.08 attn_vo:H=0.9163,top10E=0.06,eRank=440.8,q75/q25=11.43 mlp_w1:H=0.9731,top10E=0.04,eRank=642.3,q75/q25=2.72 mlp_w2:H=0.9685,top10E=0.05,eRank=623.3,q75/q25=2.83 vo_prod:H=0.8436,top10E=0.12,eRank=273.7,q75/q25=66.78 train_time:245990ms step_avg:94.61ms +[2025-08-22 08:38:08] [Rank 0] step:2601/10000 train_time:246041ms step_avg:94.59ms +[2025-08-22 08:38:08] [Rank 0] step:2601/10000 train_time:246041ms step_avg:94.59ms +[2025-08-22 08:38:10] [Rank 0] step:2621/10000 train_time:247979ms step_avg:94.61ms +[2025-08-22 08:38:10] [Rank 0] step:2621/10000 train_time:247979ms step_avg:94.61ms +[2025-08-22 08:38:12] [Rank 0] step:2641/10000 train_time:249906ms step_avg:94.63ms +[2025-08-22 08:38:12] [Rank 0] step:2641/10000 train_time:249906ms step_avg:94.63ms +[2025-08-22 08:38:14] [Rank 0] step:2661/10000 train_time:251833ms step_avg:94.64ms +[2025-08-22 08:38:14] [Rank 0] step:2661/10000 train_time:251833ms step_avg:94.64ms +[2025-08-22 08:38:16] [Rank 0] step:2681/10000 train_time:253761ms step_avg:94.65ms +[2025-08-22 08:38:16] [Rank 0] step:2681/10000 train_time:253761ms step_avg:94.65ms +[2025-08-22 08:38:18] [Rank 0] step:2701/10000 train_time:255692ms step_avg:94.67ms +[2025-08-22 08:38:18] [Rank 0] step:2701/10000 train_time:255692ms step_avg:94.67ms +[2025-08-22 08:38:20] [Rank 0] step:2721/10000 train_time:257620ms step_avg:94.68ms +[2025-08-22 08:38:20] [Rank 0] step:2721/10000 train_time:257620ms step_avg:94.68ms +[2025-08-22 08:38:22] [Rank 0] step:2741/10000 train_time:259551ms step_avg:94.69ms +[2025-08-22 08:38:22] [Rank 0] step:2741/10000 train_time:259551ms step_avg:94.69ms +[2025-08-22 08:38:24] [Rank 0] step:2761/10000 train_time:261482ms step_avg:94.71ms +[2025-08-22 08:38:24] [Rank 0] step:2761/10000 train_time:261482ms step_avg:94.71ms +[2025-08-22 08:38:25] [Rank 0] step:2781/10000 train_time:263414ms step_avg:94.72ms +[2025-08-22 08:38:25] [Rank 0] step:2781/10000 train_time:263414ms step_avg:94.72ms +[2025-08-22 08:38:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:38:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:38:41] [Rank 0] PRINT: step:2800/10000 val_loss:3.8878 svd_entropy: attn_qk:H=0.9140,top10E=0.07,eRank=434.2,q75/q25=11.12 attn_vo:H=0.9164,top10E=0.06,eRank=440.9,q75/q25=11.40 mlp_w1:H=0.9731,top10E=0.04,eRank=642.2,q75/q25=2.72 mlp_w2:H=0.9685,top10E=0.05,eRank=623.1,q75/q25=2.83 vo_prod:H=0.8438,top10E=0.12,eRank=274.1,q75/q25=67.21 train_time:265303ms step_avg:94.75ms +[2025-08-22 08:38:41] [Rank 0] PRINT: step:2800/10000 val_loss:3.8878 svd_entropy: attn_qk:H=0.9140,top10E=0.07,eRank=434.2,q75/q25=11.12 attn_vo:H=0.9164,top10E=0.06,eRank=440.9,q75/q25=11.40 mlp_w1:H=0.9731,top10E=0.04,eRank=642.2,q75/q25=2.72 mlp_w2:H=0.9685,top10E=0.05,eRank=623.1,q75/q25=2.83 vo_prod:H=0.8438,top10E=0.12,eRank=274.1,q75/q25=67.21 train_time:265303ms step_avg:94.75ms +[2025-08-22 08:38:41] [Rank 0] step:2801/10000 train_time:265354ms step_avg:94.74ms +[2025-08-22 08:38:41] [Rank 0] step:2801/10000 train_time:265354ms step_avg:94.74ms +[2025-08-22 08:38:43] [Rank 0] step:2821/10000 train_time:267289ms step_avg:94.75ms +[2025-08-22 08:38:43] [Rank 0] step:2821/10000 train_time:267289ms step_avg:94.75ms +[2025-08-22 08:38:45] [Rank 0] step:2841/10000 train_time:269214ms step_avg:94.76ms +[2025-08-22 08:38:45] [Rank 0] step:2841/10000 train_time:269214ms step_avg:94.76ms +[2025-08-22 08:38:47] [Rank 0] step:2861/10000 train_time:271140ms step_avg:94.77ms +[2025-08-22 08:38:47] [Rank 0] step:2861/10000 train_time:271140ms step_avg:94.77ms +[2025-08-22 08:38:49] [Rank 0] step:2881/10000 train_time:273066ms step_avg:94.78ms +[2025-08-22 08:38:49] [Rank 0] step:2881/10000 train_time:273066ms step_avg:94.78ms +[2025-08-22 08:38:51] [Rank 0] step:2901/10000 train_time:274993ms step_avg:94.79ms +[2025-08-22 08:38:51] [Rank 0] step:2901/10000 train_time:274993ms step_avg:94.79ms +[2025-08-22 08:38:53] [Rank 0] step:2921/10000 train_time:276919ms step_avg:94.80ms +[2025-08-22 08:38:53] [Rank 0] step:2921/10000 train_time:276919ms step_avg:94.80ms +[2025-08-22 08:38:55] [Rank 0] step:2941/10000 train_time:278849ms step_avg:94.81ms +[2025-08-22 08:38:55] [Rank 0] step:2941/10000 train_time:278849ms step_avg:94.81ms +[2025-08-22 08:38:57] [Rank 0] step:2961/10000 train_time:280778ms step_avg:94.83ms +[2025-08-22 08:38:57] [Rank 0] step:2961/10000 train_time:280778ms step_avg:94.83ms +[2025-08-22 08:38:59] [Rank 0] step:2981/10000 train_time:282715ms step_avg:94.84ms +[2025-08-22 08:38:59] [Rank 0] step:2981/10000 train_time:282715ms step_avg:94.84ms +[2025-08-22 08:39:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:39:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:39:14] [Rank 0] PRINT: step:3000/10000 val_loss:3.8551 svd_entropy: attn_qk:H=0.9138,top10E=0.07,eRank=433.4,q75/q25=11.17 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.41 mlp_w1:H=0.9731,top10E=0.04,eRank=642.1,q75/q25=2.72 mlp_w2:H=0.9684,top10E=0.05,eRank=622.9,q75/q25=2.83 vo_prod:H=0.8439,top10E=0.12,eRank=274.4,q75/q25=66.32 train_time:284610ms step_avg:94.87ms +[2025-08-22 08:39:14] [Rank 0] PRINT: step:3000/10000 val_loss:3.8551 svd_entropy: attn_qk:H=0.9138,top10E=0.07,eRank=433.4,q75/q25=11.17 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.41 mlp_w1:H=0.9731,top10E=0.04,eRank=642.1,q75/q25=2.72 mlp_w2:H=0.9684,top10E=0.05,eRank=622.9,q75/q25=2.83 vo_prod:H=0.8439,top10E=0.12,eRank=274.4,q75/q25=66.32 train_time:284610ms step_avg:94.87ms +[2025-08-22 08:39:14] [Rank 0] step:3001/10000 train_time:284662ms step_avg:94.86ms +[2025-08-22 08:39:14] [Rank 0] step:3001/10000 train_time:284662ms step_avg:94.86ms +[2025-08-22 08:39:16] [Rank 0] step:3021/10000 train_time:286601ms step_avg:94.87ms +[2025-08-22 08:39:16] [Rank 0] step:3021/10000 train_time:286601ms step_avg:94.87ms +[2025-08-22 08:39:18] [Rank 0] step:3041/10000 train_time:288531ms step_avg:94.88ms +[2025-08-22 08:39:18] [Rank 0] step:3041/10000 train_time:288531ms step_avg:94.88ms +[2025-08-22 08:39:20] [Rank 0] step:3061/10000 train_time:290463ms step_avg:94.89ms +[2025-08-22 08:39:20] [Rank 0] step:3061/10000 train_time:290463ms step_avg:94.89ms +[2025-08-22 08:39:22] [Rank 0] step:3081/10000 train_time:292398ms step_avg:94.90ms +[2025-08-22 08:39:22] [Rank 0] step:3081/10000 train_time:292398ms step_avg:94.90ms +[2025-08-22 08:39:24] [Rank 0] step:3101/10000 train_time:294334ms step_avg:94.92ms +[2025-08-22 08:39:24] [Rank 0] step:3101/10000 train_time:294334ms step_avg:94.92ms +[2025-08-22 08:39:26] [Rank 0] step:3121/10000 train_time:296269ms step_avg:94.93ms +[2025-08-22 08:39:26] [Rank 0] step:3121/10000 train_time:296269ms step_avg:94.93ms +[2025-08-22 08:39:28] [Rank 0] step:3141/10000 train_time:298207ms step_avg:94.94ms +[2025-08-22 08:39:28] [Rank 0] step:3141/10000 train_time:298207ms step_avg:94.94ms +[2025-08-22 08:39:30] [Rank 0] step:3161/10000 train_time:300145ms step_avg:94.95ms +[2025-08-22 08:39:30] [Rank 0] step:3161/10000 train_time:300145ms step_avg:94.95ms +[2025-08-22 08:39:32] [Rank 0] step:3181/10000 train_time:302227ms step_avg:95.01ms +[2025-08-22 08:39:32] [Rank 0] step:3181/10000 train_time:302227ms step_avg:95.01ms +[2025-08-22 08:39:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:39:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:39:48] [Rank 0] PRINT: step:3200/10000 val_loss:3.8294 svd_entropy: attn_qk:H=0.9135,top10E=0.07,eRank=432.7,q75/q25=11.17 attn_vo:H=0.9164,top10E=0.06,eRank=441.0,q75/q25=11.41 mlp_w1:H=0.9730,top10E=0.04,eRank=642.1,q75/q25=2.71 mlp_w2:H=0.9684,top10E=0.05,eRank=622.7,q75/q25=2.83 vo_prod:H=0.8441,top10E=0.12,eRank=274.6,q75/q25=66.51 train_time:304216ms step_avg:95.07ms +[2025-08-22 08:39:48] [Rank 0] PRINT: step:3200/10000 val_loss:3.8294 svd_entropy: attn_qk:H=0.9135,top10E=0.07,eRank=432.7,q75/q25=11.17 attn_vo:H=0.9164,top10E=0.06,eRank=441.0,q75/q25=11.41 mlp_w1:H=0.9730,top10E=0.04,eRank=642.1,q75/q25=2.71 mlp_w2:H=0.9684,top10E=0.05,eRank=622.7,q75/q25=2.83 vo_prod:H=0.8441,top10E=0.12,eRank=274.6,q75/q25=66.51 train_time:304216ms step_avg:95.07ms +[2025-08-22 08:39:48] [Rank 0] step:3201/10000 train_time:304268ms step_avg:95.05ms +[2025-08-22 08:39:48] [Rank 0] step:3201/10000 train_time:304268ms step_avg:95.05ms +[2025-08-22 08:39:50] [Rank 0] step:3221/10000 train_time:306207ms step_avg:95.07ms +[2025-08-22 08:39:50] [Rank 0] step:3221/10000 train_time:306207ms step_avg:95.07ms +[2025-08-22 08:39:52] [Rank 0] step:3241/10000 train_time:308140ms step_avg:95.08ms +[2025-08-22 08:39:52] [Rank 0] step:3241/10000 train_time:308140ms step_avg:95.08ms +[2025-08-22 08:39:54] [Rank 0] step:3261/10000 train_time:310075ms step_avg:95.09ms +[2025-08-22 08:39:54] [Rank 0] step:3261/10000 train_time:310075ms step_avg:95.09ms +[2025-08-22 08:39:56] [Rank 0] step:3281/10000 train_time:312012ms step_avg:95.10ms +[2025-08-22 08:39:56] [Rank 0] step:3281/10000 train_time:312012ms step_avg:95.10ms +[2025-08-22 08:39:57] [Rank 0] step:3301/10000 train_time:313950ms step_avg:95.11ms +[2025-08-22 08:39:57] [Rank 0] step:3301/10000 train_time:313950ms step_avg:95.11ms +[2025-08-22 08:39:59] [Rank 0] step:3321/10000 train_time:315889ms step_avg:95.12ms +[2025-08-22 08:39:59] [Rank 0] step:3321/10000 train_time:315889ms step_avg:95.12ms +[2025-08-22 08:40:01] [Rank 0] step:3341/10000 train_time:317830ms step_avg:95.13ms +[2025-08-22 08:40:01] [Rank 0] step:3341/10000 train_time:317830ms step_avg:95.13ms +[2025-08-22 08:40:03] [Rank 0] step:3361/10000 train_time:319770ms step_avg:95.14ms +[2025-08-22 08:40:03] [Rank 0] step:3361/10000 train_time:319770ms step_avg:95.14ms +[2025-08-22 08:40:05] [Rank 0] step:3381/10000 train_time:321710ms step_avg:95.15ms +[2025-08-22 08:40:05] [Rank 0] step:3381/10000 train_time:321710ms step_avg:95.15ms +[2025-08-22 08:40:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:40:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:40:21] [Rank 0] PRINT: step:3400/10000 val_loss:3.8086 svd_entropy: attn_qk:H=0.9133,top10E=0.07,eRank=432.1,q75/q25=11.16 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.47 mlp_w1:H=0.9730,top10E=0.04,eRank=642.0,q75/q25=2.72 mlp_w2:H=0.9683,top10E=0.05,eRank=622.6,q75/q25=2.82 vo_prod:H=0.8442,top10E=0.12,eRank=274.8,q75/q25=66.27 train_time:323607ms step_avg:95.18ms +[2025-08-22 08:40:21] [Rank 0] PRINT: step:3400/10000 val_loss:3.8086 svd_entropy: attn_qk:H=0.9133,top10E=0.07,eRank=432.1,q75/q25=11.16 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.47 mlp_w1:H=0.9730,top10E=0.04,eRank=642.0,q75/q25=2.72 mlp_w2:H=0.9683,top10E=0.05,eRank=622.6,q75/q25=2.82 vo_prod:H=0.8442,top10E=0.12,eRank=274.8,q75/q25=66.27 train_time:323607ms step_avg:95.18ms +[2025-08-22 08:40:21] [Rank 0] step:3401/10000 train_time:323659ms step_avg:95.17ms +[2025-08-22 08:40:21] [Rank 0] step:3401/10000 train_time:323659ms step_avg:95.17ms +[2025-08-22 08:40:23] [Rank 0] step:3421/10000 train_time:325609ms step_avg:95.18ms +[2025-08-22 08:40:23] [Rank 0] step:3421/10000 train_time:325609ms step_avg:95.18ms +[2025-08-22 08:40:25] [Rank 0] step:3441/10000 train_time:327542ms step_avg:95.19ms +[2025-08-22 08:40:25] [Rank 0] step:3441/10000 train_time:327542ms step_avg:95.19ms +[2025-08-22 08:40:27] [Rank 0] step:3461/10000 train_time:329475ms step_avg:95.20ms +[2025-08-22 08:40:27] [Rank 0] step:3461/10000 train_time:329475ms step_avg:95.20ms +[2025-08-22 08:40:29] [Rank 0] step:3481/10000 train_time:331407ms step_avg:95.20ms +[2025-08-22 08:40:29] [Rank 0] step:3481/10000 train_time:331407ms step_avg:95.20ms +[2025-08-22 08:40:31] [Rank 0] step:3501/10000 train_time:333348ms step_avg:95.22ms +[2025-08-22 08:40:31] [Rank 0] step:3501/10000 train_time:333348ms step_avg:95.22ms +[2025-08-22 08:40:33] [Rank 0] step:3521/10000 train_time:335284ms step_avg:95.22ms +[2025-08-22 08:40:33] [Rank 0] step:3521/10000 train_time:335284ms step_avg:95.22ms +[2025-08-22 08:40:35] [Rank 0] step:3541/10000 train_time:337304ms step_avg:95.26ms +[2025-08-22 08:40:35] [Rank 0] step:3541/10000 train_time:337304ms step_avg:95.26ms +[2025-08-22 08:40:37] [Rank 0] step:3561/10000 train_time:339319ms step_avg:95.29ms +[2025-08-22 08:40:37] [Rank 0] step:3561/10000 train_time:339319ms step_avg:95.29ms +[2025-08-22 08:40:39] [Rank 0] step:3581/10000 train_time:341253ms step_avg:95.30ms +[2025-08-22 08:40:39] [Rank 0] step:3581/10000 train_time:341253ms step_avg:95.30ms +[2025-08-22 08:40:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:40:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:40:55] [Rank 0] PRINT: step:3600/10000 val_loss:3.8042 svd_entropy: attn_qk:H=0.9130,top10E=0.07,eRank=431.4,q75/q25=11.22 attn_vo:H=0.9165,top10E=0.06,eRank=441.2,q75/q25=11.48 mlp_w1:H=0.9730,top10E=0.04,eRank=641.9,q75/q25=2.71 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.82 vo_prod:H=0.8444,top10E=0.12,eRank=275.2,q75/q25=66.57 train_time:343148ms step_avg:95.32ms +[2025-08-22 08:40:55] [Rank 0] PRINT: step:3600/10000 val_loss:3.8042 svd_entropy: attn_qk:H=0.9130,top10E=0.07,eRank=431.4,q75/q25=11.22 attn_vo:H=0.9165,top10E=0.06,eRank=441.2,q75/q25=11.48 mlp_w1:H=0.9730,top10E=0.04,eRank=641.9,q75/q25=2.71 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.82 vo_prod:H=0.8444,top10E=0.12,eRank=275.2,q75/q25=66.57 train_time:343148ms step_avg:95.32ms +[2025-08-22 08:40:55] [Rank 0] step:3601/10000 train_time:343200ms step_avg:95.31ms +[2025-08-22 08:40:55] [Rank 0] step:3601/10000 train_time:343200ms step_avg:95.31ms +[2025-08-22 08:40:57] [Rank 0] step:3621/10000 train_time:345141ms step_avg:95.32ms +[2025-08-22 08:40:57] [Rank 0] step:3621/10000 train_time:345141ms step_avg:95.32ms +[2025-08-22 08:40:59] [Rank 0] step:3641/10000 train_time:347073ms step_avg:95.32ms +[2025-08-22 08:40:59] [Rank 0] step:3641/10000 train_time:347073ms step_avg:95.32ms +[2025-08-22 08:41:01] [Rank 0] step:3661/10000 train_time:349005ms step_avg:95.33ms +[2025-08-22 08:41:01] [Rank 0] step:3661/10000 train_time:349005ms step_avg:95.33ms +[2025-08-22 08:41:03] [Rank 0] step:3681/10000 train_time:350938ms step_avg:95.34ms +[2025-08-22 08:41:03] [Rank 0] step:3681/10000 train_time:350938ms step_avg:95.34ms +[2025-08-22 08:41:04] [Rank 0] step:3701/10000 train_time:352875ms step_avg:95.35ms +[2025-08-22 08:41:04] [Rank 0] step:3701/10000 train_time:352875ms step_avg:95.35ms +[2025-08-22 08:41:06] [Rank 0] step:3721/10000 train_time:354841ms step_avg:95.36ms +[2025-08-22 08:41:06] [Rank 0] step:3721/10000 train_time:354841ms step_avg:95.36ms +[2025-08-22 08:41:08] [Rank 0] step:3741/10000 train_time:356813ms step_avg:95.38ms +[2025-08-22 08:41:08] [Rank 0] step:3741/10000 train_time:356813ms step_avg:95.38ms +[2025-08-22 08:41:10] [Rank 0] step:3761/10000 train_time:358787ms step_avg:95.40ms +[2025-08-22 08:41:10] [Rank 0] step:3761/10000 train_time:358787ms step_avg:95.40ms +[2025-08-22 08:41:12] [Rank 0] step:3781/10000 train_time:360764ms step_avg:95.41ms +[2025-08-22 08:41:12] [Rank 0] step:3781/10000 train_time:360764ms step_avg:95.41ms +[2025-08-22 08:41:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:41:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:41:28] [Rank 0] PRINT: step:3800/10000 val_loss:3.7730 svd_entropy: attn_qk:H=0.9128,top10E=0.07,eRank=430.8,q75/q25=11.27 attn_vo:H=0.9165,top10E=0.06,eRank=441.3,q75/q25=11.49 mlp_w1:H=0.9730,top10E=0.04,eRank=641.8,q75/q25=2.71 mlp_w2:H=0.9683,top10E=0.05,eRank=622.3,q75/q25=2.82 vo_prod:H=0.8444,top10E=0.12,eRank=275.3,q75/q25=66.77 train_time:362698ms step_avg:95.45ms +[2025-08-22 08:41:28] [Rank 0] PRINT: step:3800/10000 val_loss:3.7730 svd_entropy: attn_qk:H=0.9128,top10E=0.07,eRank=430.8,q75/q25=11.27 attn_vo:H=0.9165,top10E=0.06,eRank=441.3,q75/q25=11.49 mlp_w1:H=0.9730,top10E=0.04,eRank=641.8,q75/q25=2.71 mlp_w2:H=0.9683,top10E=0.05,eRank=622.3,q75/q25=2.82 vo_prod:H=0.8444,top10E=0.12,eRank=275.3,q75/q25=66.77 train_time:362698ms step_avg:95.45ms +[2025-08-22 08:41:28] [Rank 0] step:3801/10000 train_time:362751ms step_avg:95.44ms +[2025-08-22 08:41:28] [Rank 0] step:3801/10000 train_time:362751ms step_avg:95.44ms +[2025-08-22 08:41:30] [Rank 0] step:3821/10000 train_time:364746ms step_avg:95.46ms +[2025-08-22 08:41:30] [Rank 0] step:3821/10000 train_time:364746ms step_avg:95.46ms +[2025-08-22 08:41:32] [Rank 0] step:3841/10000 train_time:366719ms step_avg:95.47ms +[2025-08-22 08:41:32] [Rank 0] step:3841/10000 train_time:366719ms step_avg:95.47ms +[2025-08-22 08:41:34] [Rank 0] step:3861/10000 train_time:368691ms step_avg:95.49ms +[2025-08-22 08:41:34] [Rank 0] step:3861/10000 train_time:368691ms step_avg:95.49ms +[2025-08-22 08:41:36] [Rank 0] step:3881/10000 train_time:370661ms step_avg:95.51ms +[2025-08-22 08:41:36] [Rank 0] step:3881/10000 train_time:370661ms step_avg:95.51ms +[2025-08-22 08:41:38] [Rank 0] step:3901/10000 train_time:372760ms step_avg:95.56ms +[2025-08-22 08:41:38] [Rank 0] step:3901/10000 train_time:372760ms step_avg:95.56ms +[2025-08-22 08:41:40] [Rank 0] step:3921/10000 train_time:374862ms step_avg:95.60ms +[2025-08-22 08:41:40] [Rank 0] step:3921/10000 train_time:374862ms step_avg:95.60ms +[2025-08-22 08:41:42] [Rank 0] step:3941/10000 train_time:376836ms step_avg:95.62ms +[2025-08-22 08:41:42] [Rank 0] step:3941/10000 train_time:376836ms step_avg:95.62ms +[2025-08-22 08:41:44] [Rank 0] step:3961/10000 train_time:378811ms step_avg:95.64ms +[2025-08-22 08:41:44] [Rank 0] step:3961/10000 train_time:378811ms step_avg:95.64ms +[2025-08-22 08:41:46] [Rank 0] step:3981/10000 train_time:380791ms step_avg:95.65ms +[2025-08-22 08:41:46] [Rank 0] step:3981/10000 train_time:380791ms step_avg:95.65ms +[2025-08-22 08:41:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:41:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:42:02] [Rank 0] PRINT: step:4000/10000 val_loss:3.7572 svd_entropy: attn_qk:H=0.9125,top10E=0.07,eRank=430.0,q75/q25=11.28 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.45 mlp_w1:H=0.9730,top10E=0.04,eRank=641.7,q75/q25=2.71 mlp_w2:H=0.9683,top10E=0.05,eRank=622.3,q75/q25=2.81 vo_prod:H=0.8445,top10E=0.13,eRank=275.5,q75/q25=65.44 train_time:382725ms step_avg:95.68ms +[2025-08-22 08:42:02] [Rank 0] PRINT: step:4000/10000 val_loss:3.7572 svd_entropy: attn_qk:H=0.9125,top10E=0.07,eRank=430.0,q75/q25=11.28 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.45 mlp_w1:H=0.9730,top10E=0.04,eRank=641.7,q75/q25=2.71 mlp_w2:H=0.9683,top10E=0.05,eRank=622.3,q75/q25=2.81 vo_prod:H=0.8445,top10E=0.13,eRank=275.5,q75/q25=65.44 train_time:382725ms step_avg:95.68ms +[2025-08-22 08:42:02] [Rank 0] step:4001/10000 train_time:382778ms step_avg:95.67ms +[2025-08-22 08:42:02] [Rank 0] step:4001/10000 train_time:382778ms step_avg:95.67ms +[2025-08-22 08:42:04] [Rank 0] step:4021/10000 train_time:384758ms step_avg:95.69ms +[2025-08-22 08:42:04] [Rank 0] step:4021/10000 train_time:384758ms step_avg:95.69ms +[2025-08-22 08:42:06] [Rank 0] step:4041/10000 train_time:386733ms step_avg:95.70ms +[2025-08-22 08:42:06] [Rank 0] step:4041/10000 train_time:386733ms step_avg:95.70ms +[2025-08-22 08:42:08] [Rank 0] step:4061/10000 train_time:388707ms step_avg:95.72ms +[2025-08-22 08:42:08] [Rank 0] step:4061/10000 train_time:388707ms step_avg:95.72ms +[2025-08-22 08:42:11] [Rank 0] step:4081/10000 train_time:391389ms step_avg:95.91ms +[2025-08-22 08:42:11] [Rank 0] step:4081/10000 train_time:391389ms step_avg:95.91ms +[2025-08-22 08:42:13] [Rank 0] step:4101/10000 train_time:393364ms step_avg:95.92ms +[2025-08-22 08:42:13] [Rank 0] step:4101/10000 train_time:393364ms step_avg:95.92ms +[2025-08-22 08:42:15] [Rank 0] step:4121/10000 train_time:395341ms step_avg:95.93ms +[2025-08-22 08:42:15] [Rank 0] step:4121/10000 train_time:395341ms step_avg:95.93ms +[2025-08-22 08:42:17] [Rank 0] step:4141/10000 train_time:397319ms step_avg:95.95ms +[2025-08-22 08:42:17] [Rank 0] step:4141/10000 train_time:397319ms step_avg:95.95ms +[2025-08-22 08:42:19] [Rank 0] step:4161/10000 train_time:399296ms step_avg:95.96ms +[2025-08-22 08:42:19] [Rank 0] step:4161/10000 train_time:399296ms step_avg:95.96ms +[2025-08-22 08:42:21] [Rank 0] step:4181/10000 train_time:401275ms step_avg:95.98ms +[2025-08-22 08:42:21] [Rank 0] step:4181/10000 train_time:401275ms step_avg:95.98ms +[2025-08-22 08:42:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:42:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:42:36] [Rank 0] PRINT: step:4200/10000 val_loss:3.7452 svd_entropy: attn_qk:H=0.9123,top10E=0.07,eRank=429.3,q75/q25=11.27 attn_vo:H=0.9165,top10E=0.06,eRank=441.3,q75/q25=11.45 mlp_w1:H=0.9729,top10E=0.04,eRank=641.6,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.2,q75/q25=2.81 vo_prod:H=0.8447,top10E=0.13,eRank=275.8,q75/q25=65.82 train_time:403210ms step_avg:96.00ms +[2025-08-22 08:42:36] [Rank 0] PRINT: step:4200/10000 val_loss:3.7452 svd_entropy: attn_qk:H=0.9123,top10E=0.07,eRank=429.3,q75/q25=11.27 attn_vo:H=0.9165,top10E=0.06,eRank=441.3,q75/q25=11.45 mlp_w1:H=0.9729,top10E=0.04,eRank=641.6,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.2,q75/q25=2.81 vo_prod:H=0.8447,top10E=0.13,eRank=275.8,q75/q25=65.82 train_time:403210ms step_avg:96.00ms +[2025-08-22 08:42:36] [Rank 0] step:4201/10000 train_time:403262ms step_avg:95.99ms +[2025-08-22 08:42:36] [Rank 0] step:4201/10000 train_time:403262ms step_avg:95.99ms +[2025-08-22 08:42:38] [Rank 0] step:4221/10000 train_time:405245ms step_avg:96.01ms +[2025-08-22 08:42:38] [Rank 0] step:4221/10000 train_time:405245ms step_avg:96.01ms +[2025-08-22 08:42:40] [Rank 0] step:4241/10000 train_time:407218ms step_avg:96.02ms +[2025-08-22 08:42:40] [Rank 0] step:4241/10000 train_time:407218ms step_avg:96.02ms +[2025-08-22 08:42:43] [Rank 0] step:4261/10000 train_time:409303ms step_avg:96.06ms +[2025-08-22 08:42:43] [Rank 0] step:4261/10000 train_time:409303ms step_avg:96.06ms +[2025-08-22 08:42:45] [Rank 0] step:4281/10000 train_time:411409ms step_avg:96.10ms +[2025-08-22 08:42:45] [Rank 0] step:4281/10000 train_time:411409ms step_avg:96.10ms +[2025-08-22 08:42:47] [Rank 0] step:4301/10000 train_time:413379ms step_avg:96.11ms +[2025-08-22 08:42:47] [Rank 0] step:4301/10000 train_time:413379ms step_avg:96.11ms +[2025-08-22 08:42:49] [Rank 0] step:4321/10000 train_time:415351ms step_avg:96.12ms +[2025-08-22 08:42:49] [Rank 0] step:4321/10000 train_time:415351ms step_avg:96.12ms +[2025-08-22 08:42:51] [Rank 0] step:4341/10000 train_time:417324ms step_avg:96.14ms +[2025-08-22 08:42:51] [Rank 0] step:4341/10000 train_time:417324ms step_avg:96.14ms +[2025-08-22 08:42:53] [Rank 0] step:4361/10000 train_time:419297ms step_avg:96.15ms +[2025-08-22 08:42:53] [Rank 0] step:4361/10000 train_time:419297ms step_avg:96.15ms +[2025-08-22 08:42:54] [Rank 0] step:4381/10000 train_time:421272ms step_avg:96.16ms +[2025-08-22 08:42:54] [Rank 0] step:4381/10000 train_time:421272ms step_avg:96.16ms +[2025-08-22 08:42:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:42:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:43:10] [Rank 0] PRINT: step:4400/10000 val_loss:3.7299 svd_entropy: attn_qk:H=0.9121,top10E=0.07,eRank=428.8,q75/q25=11.30 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.47 mlp_w1:H=0.9729,top10E=0.04,eRank=641.5,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.2,q75/q25=2.81 vo_prod:H=0.8447,top10E=0.13,eRank=275.9,q75/q25=65.73 train_time:423205ms step_avg:96.18ms +[2025-08-22 08:43:10] [Rank 0] PRINT: step:4400/10000 val_loss:3.7299 svd_entropy: attn_qk:H=0.9121,top10E=0.07,eRank=428.8,q75/q25=11.30 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.47 mlp_w1:H=0.9729,top10E=0.04,eRank=641.5,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.2,q75/q25=2.81 vo_prod:H=0.8447,top10E=0.13,eRank=275.9,q75/q25=65.73 train_time:423205ms step_avg:96.18ms +[2025-08-22 08:43:10] [Rank 0] step:4401/10000 train_time:423258ms step_avg:96.17ms +[2025-08-22 08:43:10] [Rank 0] step:4401/10000 train_time:423258ms step_avg:96.17ms +[2025-08-22 08:43:12] [Rank 0] step:4421/10000 train_time:425227ms step_avg:96.18ms +[2025-08-22 08:43:12] [Rank 0] step:4421/10000 train_time:425227ms step_avg:96.18ms +[2025-08-22 08:43:14] [Rank 0] step:4441/10000 train_time:427196ms step_avg:96.19ms +[2025-08-22 08:43:14] [Rank 0] step:4441/10000 train_time:427196ms step_avg:96.19ms +[2025-08-22 08:43:16] [Rank 0] step:4461/10000 train_time:429171ms step_avg:96.21ms +[2025-08-22 08:43:16] [Rank 0] step:4461/10000 train_time:429171ms step_avg:96.21ms +[2025-08-22 08:43:18] [Rank 0] step:4481/10000 train_time:431148ms step_avg:96.22ms +[2025-08-22 08:43:18] [Rank 0] step:4481/10000 train_time:431148ms step_avg:96.22ms +[2025-08-22 08:43:20] [Rank 0] step:4501/10000 train_time:433123ms step_avg:96.23ms +[2025-08-22 08:43:20] [Rank 0] step:4501/10000 train_time:433123ms step_avg:96.23ms +[2025-08-22 08:43:22] [Rank 0] step:4521/10000 train_time:435099ms step_avg:96.24ms +[2025-08-22 08:43:22] [Rank 0] step:4521/10000 train_time:435099ms step_avg:96.24ms +[2025-08-22 08:43:24] [Rank 0] step:4541/10000 train_time:437079ms step_avg:96.25ms +[2025-08-22 08:43:24] [Rank 0] step:4541/10000 train_time:437079ms step_avg:96.25ms +[2025-08-22 08:43:26] [Rank 0] step:4561/10000 train_time:439057ms step_avg:96.26ms +[2025-08-22 08:43:26] [Rank 0] step:4561/10000 train_time:439057ms step_avg:96.26ms +[2025-08-22 08:43:28] [Rank 0] step:4581/10000 train_time:441039ms step_avg:96.28ms +[2025-08-22 08:43:28] [Rank 0] step:4581/10000 train_time:441039ms step_avg:96.28ms +[2025-08-22 08:43:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:43:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:43:44] [Rank 0] PRINT: step:4600/10000 val_loss:3.7140 svd_entropy: attn_qk:H=0.9119,top10E=0.07,eRank=428.3,q75/q25=11.30 attn_vo:H=0.9165,top10E=0.06,eRank=441.5,q75/q25=11.48 mlp_w1:H=0.9729,top10E=0.04,eRank=641.4,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.1,q75/q25=2.81 vo_prod:H=0.8449,top10E=0.13,eRank=276.2,q75/q25=65.47 train_time:442978ms step_avg:96.30ms +[2025-08-22 08:43:44] [Rank 0] PRINT: step:4600/10000 val_loss:3.7140 svd_entropy: attn_qk:H=0.9119,top10E=0.07,eRank=428.3,q75/q25=11.30 attn_vo:H=0.9165,top10E=0.06,eRank=441.5,q75/q25=11.48 mlp_w1:H=0.9729,top10E=0.04,eRank=641.4,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.1,q75/q25=2.81 vo_prod:H=0.8449,top10E=0.13,eRank=276.2,q75/q25=65.47 train_time:442978ms step_avg:96.30ms +[2025-08-22 08:43:44] [Rank 0] step:4601/10000 train_time:443031ms step_avg:96.29ms +[2025-08-22 08:43:44] [Rank 0] step:4601/10000 train_time:443031ms step_avg:96.29ms +[2025-08-22 08:43:46] [Rank 0] step:4621/10000 train_time:445140ms step_avg:96.33ms +[2025-08-22 08:43:46] [Rank 0] step:4621/10000 train_time:445140ms step_avg:96.33ms +[2025-08-22 08:43:48] [Rank 0] step:4641/10000 train_time:447273ms step_avg:96.37ms +[2025-08-22 08:43:48] [Rank 0] step:4641/10000 train_time:447273ms step_avg:96.37ms +[2025-08-22 08:43:50] [Rank 0] step:4661/10000 train_time:449250ms step_avg:96.38ms +[2025-08-22 08:43:50] [Rank 0] step:4661/10000 train_time:449250ms step_avg:96.38ms +[2025-08-22 08:43:52] [Rank 0] step:4681/10000 train_time:451229ms step_avg:96.40ms +[2025-08-22 08:43:52] [Rank 0] step:4681/10000 train_time:451229ms step_avg:96.40ms +[2025-08-22 08:43:54] [Rank 0] step:4701/10000 train_time:453210ms step_avg:96.41ms +[2025-08-22 08:43:54] [Rank 0] step:4701/10000 train_time:453210ms step_avg:96.41ms +[2025-08-22 08:43:56] [Rank 0] step:4721/10000 train_time:455189ms step_avg:96.42ms +[2025-08-22 08:43:56] [Rank 0] step:4721/10000 train_time:455189ms step_avg:96.42ms +[2025-08-22 08:43:58] [Rank 0] step:4741/10000 train_time:457174ms step_avg:96.43ms +[2025-08-22 08:43:58] [Rank 0] step:4741/10000 train_time:457174ms step_avg:96.43ms +[2025-08-22 08:44:00] [Rank 0] step:4761/10000 train_time:459155ms step_avg:96.44ms +[2025-08-22 08:44:00] [Rank 0] step:4761/10000 train_time:459155ms step_avg:96.44ms +[2025-08-22 08:44:02] [Rank 0] step:4781/10000 train_time:461136ms step_avg:96.45ms +[2025-08-22 08:44:02] [Rank 0] step:4781/10000 train_time:461136ms step_avg:96.45ms +[2025-08-22 08:44:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:44:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:44:18] [Rank 0] PRINT: step:4800/10000 val_loss:3.7055 svd_entropy: attn_qk:H=0.9117,top10E=0.07,eRank=427.9,q75/q25=11.34 attn_vo:H=0.9166,top10E=0.06,eRank=441.6,q75/q25=11.47 mlp_w1:H=0.9728,top10E=0.04,eRank=641.3,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.1,q75/q25=2.81 vo_prod:H=0.8450,top10E=0.13,eRank=276.5,q75/q25=65.27 train_time:463077ms step_avg:96.47ms +[2025-08-22 08:44:18] [Rank 0] PRINT: step:4800/10000 val_loss:3.7055 svd_entropy: attn_qk:H=0.9117,top10E=0.07,eRank=427.9,q75/q25=11.34 attn_vo:H=0.9166,top10E=0.06,eRank=441.6,q75/q25=11.47 mlp_w1:H=0.9728,top10E=0.04,eRank=641.3,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.1,q75/q25=2.81 vo_prod:H=0.8450,top10E=0.13,eRank=276.5,q75/q25=65.27 train_time:463077ms step_avg:96.47ms +[2025-08-22 08:44:18] [Rank 0] step:4801/10000 train_time:463130ms step_avg:96.47ms +[2025-08-22 08:44:18] [Rank 0] step:4801/10000 train_time:463130ms step_avg:96.47ms +[2025-08-22 08:44:20] [Rank 0] step:4821/10000 train_time:465111ms step_avg:96.48ms +[2025-08-22 08:44:20] [Rank 0] step:4821/10000 train_time:465111ms step_avg:96.48ms +[2025-08-22 08:44:22] [Rank 0] step:4841/10000 train_time:467088ms step_avg:96.49ms +[2025-08-22 08:44:22] [Rank 0] step:4841/10000 train_time:467088ms step_avg:96.49ms +[2025-08-22 08:44:24] [Rank 0] step:4861/10000 train_time:469067ms step_avg:96.50ms +[2025-08-22 08:44:24] [Rank 0] step:4861/10000 train_time:469067ms step_avg:96.50ms +[2025-08-22 08:44:26] [Rank 0] step:4881/10000 train_time:471047ms step_avg:96.51ms +[2025-08-22 08:44:26] [Rank 0] step:4881/10000 train_time:471047ms step_avg:96.51ms +[2025-08-22 08:44:28] [Rank 0] step:4901/10000 train_time:473026ms step_avg:96.52ms +[2025-08-22 08:44:28] [Rank 0] step:4901/10000 train_time:473026ms step_avg:96.52ms +[2025-08-22 08:44:30] [Rank 0] step:4921/10000 train_time:475011ms step_avg:96.53ms +[2025-08-22 08:44:30] [Rank 0] step:4921/10000 train_time:475011ms step_avg:96.53ms +[2025-08-22 08:44:32] [Rank 0] step:4941/10000 train_time:476994ms step_avg:96.54ms +[2025-08-22 08:44:32] [Rank 0] step:4941/10000 train_time:476994ms step_avg:96.54ms +[2025-08-22 08:44:34] [Rank 0] step:4961/10000 train_time:478977ms step_avg:96.55ms +[2025-08-22 08:44:34] [Rank 0] step:4961/10000 train_time:478977ms step_avg:96.55ms +[2025-08-22 08:44:36] [Rank 0] step:4981/10000 train_time:480962ms step_avg:96.56ms +[2025-08-22 08:44:36] [Rank 0] step:4981/10000 train_time:480962ms step_avg:96.56ms +[2025-08-22 08:44:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:44:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:44:51] [Rank 0] PRINT: step:5000/10000 val_loss:3.6948 svd_entropy: attn_qk:H=0.9115,top10E=0.07,eRank=427.3,q75/q25=11.35 attn_vo:H=0.9166,top10E=0.06,eRank=441.7,q75/q25=11.48 mlp_w1:H=0.9728,top10E=0.04,eRank=641.1,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.0,q75/q25=2.81 vo_prod:H=0.8451,top10E=0.13,eRank=276.8,q75/q25=65.39 train_time:482906ms step_avg:96.58ms +[2025-08-22 08:44:51] [Rank 0] PRINT: step:5000/10000 val_loss:3.6948 svd_entropy: attn_qk:H=0.9115,top10E=0.07,eRank=427.3,q75/q25=11.35 attn_vo:H=0.9166,top10E=0.06,eRank=441.7,q75/q25=11.48 mlp_w1:H=0.9728,top10E=0.04,eRank=641.1,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=622.0,q75/q25=2.81 vo_prod:H=0.8451,top10E=0.13,eRank=276.8,q75/q25=65.39 train_time:482906ms step_avg:96.58ms +[2025-08-22 08:44:51] [Rank 0] step:5001/10000 train_time:482959ms step_avg:96.57ms +[2025-08-22 08:44:51] [Rank 0] step:5001/10000 train_time:482959ms step_avg:96.57ms +[2025-08-22 08:44:53] [Rank 0] step:5021/10000 train_time:484972ms step_avg:96.59ms +[2025-08-22 08:44:53] [Rank 0] step:5021/10000 train_time:484972ms step_avg:96.59ms +[2025-08-22 08:44:55] [Rank 0] step:5041/10000 train_time:486957ms step_avg:96.60ms +[2025-08-22 08:44:55] [Rank 0] step:5041/10000 train_time:486957ms step_avg:96.60ms +[2025-08-22 08:44:57] [Rank 0] step:5061/10000 train_time:488939ms step_avg:96.61ms +[2025-08-22 08:44:57] [Rank 0] step:5061/10000 train_time:488939ms step_avg:96.61ms +[2025-08-22 08:44:59] [Rank 0] step:5081/10000 train_time:490926ms step_avg:96.62ms +[2025-08-22 08:44:59] [Rank 0] step:5081/10000 train_time:490926ms step_avg:96.62ms +[2025-08-22 08:45:01] [Rank 0] step:5101/10000 train_time:492910ms step_avg:96.63ms +[2025-08-22 08:45:01] [Rank 0] step:5101/10000 train_time:492910ms step_avg:96.63ms +[2025-08-22 08:45:03] [Rank 0] step:5121/10000 train_time:494903ms step_avg:96.64ms +[2025-08-22 08:45:03] [Rank 0] step:5121/10000 train_time:494903ms step_avg:96.64ms +[2025-08-22 08:45:05] [Rank 0] step:5141/10000 train_time:496893ms step_avg:96.65ms +[2025-08-22 08:45:05] [Rank 0] step:5141/10000 train_time:496893ms step_avg:96.65ms +[2025-08-22 08:45:07] [Rank 0] step:5161/10000 train_time:498879ms step_avg:96.66ms +[2025-08-22 08:45:07] [Rank 0] step:5161/10000 train_time:498879ms step_avg:96.66ms +[2025-08-22 08:45:09] [Rank 0] step:5181/10000 train_time:500869ms step_avg:96.67ms +[2025-08-22 08:45:09] [Rank 0] step:5181/10000 train_time:500869ms step_avg:96.67ms +[2025-08-22 08:45:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:45:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:45:25] [Rank 0] PRINT: step:5200/10000 val_loss:3.6836 svd_entropy: attn_qk:H=0.9113,top10E=0.07,eRank=426.8,q75/q25=11.34 attn_vo:H=0.9166,top10E=0.06,eRank=441.8,q75/q25=11.48 mlp_w1:H=0.9728,top10E=0.04,eRank=641.0,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=621.9,q75/q25=2.81 vo_prod:H=0.8453,top10E=0.13,eRank=277.0,q75/q25=65.09 train_time:502841ms step_avg:96.70ms +[2025-08-22 08:45:25] [Rank 0] PRINT: step:5200/10000 val_loss:3.6836 svd_entropy: attn_qk:H=0.9113,top10E=0.07,eRank=426.8,q75/q25=11.34 attn_vo:H=0.9166,top10E=0.06,eRank=441.8,q75/q25=11.48 mlp_w1:H=0.9728,top10E=0.04,eRank=641.0,q75/q25=2.71 mlp_w2:H=0.9682,top10E=0.05,eRank=621.9,q75/q25=2.81 vo_prod:H=0.8453,top10E=0.13,eRank=277.0,q75/q25=65.09 train_time:502841ms step_avg:96.70ms +[2025-08-22 08:45:25] [Rank 0] step:5201/10000 train_time:502895ms step_avg:96.69ms +[2025-08-22 08:45:25] [Rank 0] step:5201/10000 train_time:502895ms step_avg:96.69ms +[2025-08-22 08:45:27] [Rank 0] step:5221/10000 train_time:504919ms step_avg:96.71ms +[2025-08-22 08:45:27] [Rank 0] step:5221/10000 train_time:504919ms step_avg:96.71ms +[2025-08-22 08:45:29] [Rank 0] step:5241/10000 train_time:506927ms step_avg:96.72ms +[2025-08-22 08:45:29] [Rank 0] step:5241/10000 train_time:506927ms step_avg:96.72ms +[2025-08-22 08:45:31] [Rank 0] step:5261/10000 train_time:508937ms step_avg:96.74ms +[2025-08-22 08:45:31] [Rank 0] step:5261/10000 train_time:508937ms step_avg:96.74ms +[2025-08-22 08:45:33] [Rank 0] step:5281/10000 train_time:510946ms step_avg:96.75ms +[2025-08-22 08:45:33] [Rank 0] step:5281/10000 train_time:510946ms step_avg:96.75ms +[2025-08-22 08:45:35] [Rank 0] step:5301/10000 train_time:512964ms step_avg:96.77ms +[2025-08-22 08:45:35] [Rank 0] step:5301/10000 train_time:512964ms step_avg:96.77ms +[2025-08-22 08:45:37] [Rank 0] step:5321/10000 train_time:514978ms step_avg:96.78ms +[2025-08-22 08:45:37] [Rank 0] step:5321/10000 train_time:514978ms step_avg:96.78ms +[2025-08-22 08:45:39] [Rank 0] step:5341/10000 train_time:516990ms step_avg:96.80ms +[2025-08-22 08:45:39] [Rank 0] step:5341/10000 train_time:516990ms step_avg:96.80ms +[2025-08-22 08:45:41] [Rank 0] step:5361/10000 train_time:519003ms step_avg:96.81ms +[2025-08-22 08:45:41] [Rank 0] step:5361/10000 train_time:519003ms step_avg:96.81ms +[2025-08-22 08:45:43] [Rank 0] step:5381/10000 train_time:521020ms step_avg:96.83ms +[2025-08-22 08:45:43] [Rank 0] step:5381/10000 train_time:521020ms step_avg:96.83ms +[2025-08-22 08:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:45:59] [Rank 0] PRINT: step:5400/10000 val_loss:3.6721 svd_entropy: attn_qk:H=0.9111,top10E=0.07,eRank=426.2,q75/q25=11.41 attn_vo:H=0.9166,top10E=0.06,eRank=441.8,q75/q25=11.54 mlp_w1:H=0.9728,top10E=0.04,eRank=640.9,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.8,q75/q25=2.82 vo_prod:H=0.8453,top10E=0.13,eRank=277.1,q75/q25=64.60 train_time:522987ms step_avg:96.85ms +[2025-08-22 08:45:59] [Rank 0] PRINT: step:5400/10000 val_loss:3.6721 svd_entropy: attn_qk:H=0.9111,top10E=0.07,eRank=426.2,q75/q25=11.41 attn_vo:H=0.9166,top10E=0.06,eRank=441.8,q75/q25=11.54 mlp_w1:H=0.9728,top10E=0.04,eRank=640.9,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.8,q75/q25=2.82 vo_prod:H=0.8453,top10E=0.13,eRank=277.1,q75/q25=64.60 train_time:522987ms step_avg:96.85ms +[2025-08-22 08:45:59] [Rank 0] step:5401/10000 train_time:523041ms step_avg:96.84ms +[2025-08-22 08:45:59] [Rank 0] step:5401/10000 train_time:523041ms step_avg:96.84ms +[2025-08-22 08:46:01] [Rank 0] step:5421/10000 train_time:525068ms step_avg:96.86ms +[2025-08-22 08:46:01] [Rank 0] step:5421/10000 train_time:525068ms step_avg:96.86ms +[2025-08-22 08:46:03] [Rank 0] step:5441/10000 train_time:527074ms step_avg:96.87ms +[2025-08-22 08:46:03] [Rank 0] step:5441/10000 train_time:527074ms step_avg:96.87ms +[2025-08-22 08:46:05] [Rank 0] step:5461/10000 train_time:529087ms step_avg:96.88ms +[2025-08-22 08:46:05] [Rank 0] step:5461/10000 train_time:529087ms step_avg:96.88ms +[2025-08-22 08:46:07] [Rank 0] step:5481/10000 train_time:531098ms step_avg:96.90ms +[2025-08-22 08:46:07] [Rank 0] step:5481/10000 train_time:531098ms step_avg:96.90ms +[2025-08-22 08:46:09] [Rank 0] step:5501/10000 train_time:533114ms step_avg:96.91ms +[2025-08-22 08:46:09] [Rank 0] step:5501/10000 train_time:533114ms step_avg:96.91ms +[2025-08-22 08:46:11] [Rank 0] step:5521/10000 train_time:535129ms step_avg:96.93ms +[2025-08-22 08:46:11] [Rank 0] step:5521/10000 train_time:535129ms step_avg:96.93ms +[2025-08-22 08:46:13] [Rank 0] step:5541/10000 train_time:537146ms step_avg:96.94ms +[2025-08-22 08:46:13] [Rank 0] step:5541/10000 train_time:537146ms step_avg:96.94ms +[2025-08-22 08:46:15] [Rank 0] step:5561/10000 train_time:539158ms step_avg:96.95ms +[2025-08-22 08:46:15] [Rank 0] step:5561/10000 train_time:539158ms step_avg:96.95ms +[2025-08-22 08:46:17] [Rank 0] step:5581/10000 train_time:541174ms step_avg:96.97ms +[2025-08-22 08:46:17] [Rank 0] step:5581/10000 train_time:541174ms step_avg:96.97ms +[2025-08-22 08:46:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:46:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:46:32] [Rank 0] PRINT: step:5600/10000 val_loss:3.6637 svd_entropy: attn_qk:H=0.9109,top10E=0.07,eRank=425.7,q75/q25=11.44 attn_vo:H=0.9167,top10E=0.06,eRank=441.9,q75/q25=11.49 mlp_w1:H=0.9727,top10E=0.04,eRank=640.8,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.6,q75/q25=2.81 vo_prod:H=0.8454,top10E=0.13,eRank=277.3,q75/q25=64.47 train_time:543149ms step_avg:96.99ms +[2025-08-22 08:46:32] [Rank 0] PRINT: step:5600/10000 val_loss:3.6637 svd_entropy: attn_qk:H=0.9109,top10E=0.07,eRank=425.7,q75/q25=11.44 attn_vo:H=0.9167,top10E=0.06,eRank=441.9,q75/q25=11.49 mlp_w1:H=0.9727,top10E=0.04,eRank=640.8,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.6,q75/q25=2.81 vo_prod:H=0.8454,top10E=0.13,eRank=277.3,q75/q25=64.47 train_time:543149ms step_avg:96.99ms +[2025-08-22 08:46:33] [Rank 0] step:5601/10000 train_time:543202ms step_avg:96.98ms +[2025-08-22 08:46:33] [Rank 0] step:5601/10000 train_time:543202ms step_avg:96.98ms +[2025-08-22 08:46:35] [Rank 0] step:5621/10000 train_time:545234ms step_avg:97.00ms +[2025-08-22 08:46:35] [Rank 0] step:5621/10000 train_time:545234ms step_avg:97.00ms +[2025-08-22 08:46:37] [Rank 0] step:5641/10000 train_time:547247ms step_avg:97.01ms +[2025-08-22 08:46:37] [Rank 0] step:5641/10000 train_time:547247ms step_avg:97.01ms +[2025-08-22 08:46:39] [Rank 0] step:5661/10000 train_time:549255ms step_avg:97.02ms +[2025-08-22 08:46:39] [Rank 0] step:5661/10000 train_time:549255ms step_avg:97.02ms +[2025-08-22 08:46:41] [Rank 0] step:5681/10000 train_time:551269ms step_avg:97.04ms +[2025-08-22 08:46:41] [Rank 0] step:5681/10000 train_time:551269ms step_avg:97.04ms +[2025-08-22 08:46:43] [Rank 0] step:5701/10000 train_time:553283ms step_avg:97.05ms +[2025-08-22 08:46:43] [Rank 0] step:5701/10000 train_time:553283ms step_avg:97.05ms +[2025-08-22 08:46:45] [Rank 0] step:5721/10000 train_time:555303ms step_avg:97.06ms +[2025-08-22 08:46:45] [Rank 0] step:5721/10000 train_time:555303ms step_avg:97.06ms +[2025-08-22 08:46:47] [Rank 0] step:5741/10000 train_time:557317ms step_avg:97.08ms +[2025-08-22 08:46:47] [Rank 0] step:5741/10000 train_time:557317ms step_avg:97.08ms +[2025-08-22 08:46:49] [Rank 0] step:5761/10000 train_time:559332ms step_avg:97.09ms +[2025-08-22 08:46:49] [Rank 0] step:5761/10000 train_time:559332ms step_avg:97.09ms +[2025-08-22 08:46:51] [Rank 0] step:5781/10000 train_time:561349ms step_avg:97.10ms +[2025-08-22 08:46:51] [Rank 0] step:5781/10000 train_time:561349ms step_avg:97.10ms +[2025-08-22 08:46:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:46:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:47:06] [Rank 0] PRINT: step:5800/10000 val_loss:3.6629 svd_entropy: attn_qk:H=0.9108,top10E=0.07,eRank=425.3,q75/q25=11.43 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.52 mlp_w1:H=0.9727,top10E=0.04,eRank=640.6,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.81 vo_prod:H=0.8456,top10E=0.13,eRank=277.6,q75/q25=64.04 train_time:563324ms step_avg:97.12ms +[2025-08-22 08:47:06] [Rank 0] PRINT: step:5800/10000 val_loss:3.6629 svd_entropy: attn_qk:H=0.9108,top10E=0.07,eRank=425.3,q75/q25=11.43 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.52 mlp_w1:H=0.9727,top10E=0.04,eRank=640.6,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.81 vo_prod:H=0.8456,top10E=0.13,eRank=277.6,q75/q25=64.04 train_time:563324ms step_avg:97.12ms +[2025-08-22 08:47:07] [Rank 0] step:5801/10000 train_time:563378ms step_avg:97.12ms +[2025-08-22 08:47:07] [Rank 0] step:5801/10000 train_time:563378ms step_avg:97.12ms +[2025-08-22 08:47:09] [Rank 0] step:5821/10000 train_time:565412ms step_avg:97.13ms +[2025-08-22 08:47:09] [Rank 0] step:5821/10000 train_time:565412ms step_avg:97.13ms +[2025-08-22 08:47:11] [Rank 0] step:5841/10000 train_time:567423ms step_avg:97.14ms +[2025-08-22 08:47:11] [Rank 0] step:5841/10000 train_time:567423ms step_avg:97.14ms +[2025-08-22 08:47:13] [Rank 0] step:5861/10000 train_time:569440ms step_avg:97.16ms +[2025-08-22 08:47:13] [Rank 0] step:5861/10000 train_time:569440ms step_avg:97.16ms +[2025-08-22 08:47:15] [Rank 0] step:5881/10000 train_time:571456ms step_avg:97.17ms +[2025-08-22 08:47:15] [Rank 0] step:5881/10000 train_time:571456ms step_avg:97.17ms +[2025-08-22 08:47:17] [Rank 0] step:5901/10000 train_time:573478ms step_avg:97.18ms +[2025-08-22 08:47:17] [Rank 0] step:5901/10000 train_time:573478ms step_avg:97.18ms +[2025-08-22 08:47:19] [Rank 0] step:5921/10000 train_time:575496ms step_avg:97.20ms +[2025-08-22 08:47:19] [Rank 0] step:5921/10000 train_time:575496ms step_avg:97.20ms +[2025-08-22 08:47:21] [Rank 0] step:5941/10000 train_time:577517ms step_avg:97.21ms +[2025-08-22 08:47:21] [Rank 0] step:5941/10000 train_time:577517ms step_avg:97.21ms +[2025-08-22 08:47:23] [Rank 0] step:5961/10000 train_time:579537ms step_avg:97.22ms +[2025-08-22 08:47:23] [Rank 0] step:5961/10000 train_time:579537ms step_avg:97.22ms +[2025-08-22 08:47:25] [Rank 0] step:5981/10000 train_time:581559ms step_avg:97.23ms +[2025-08-22 08:47:25] [Rank 0] step:5981/10000 train_time:581559ms step_avg:97.23ms +[2025-08-22 08:47:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:47:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:47:40] [Rank 0] PRINT: step:6000/10000 val_loss:3.6433 svd_entropy: attn_qk:H=0.9106,top10E=0.07,eRank=424.9,q75/q25=11.46 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.50 mlp_w1:H=0.9727,top10E=0.04,eRank=640.5,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.81 vo_prod:H=0.8457,top10E=0.13,eRank=277.9,q75/q25=64.23 train_time:583536ms step_avg:97.26ms +[2025-08-22 08:47:40] [Rank 0] PRINT: step:6000/10000 val_loss:3.6433 svd_entropy: attn_qk:H=0.9106,top10E=0.07,eRank=424.9,q75/q25=11.46 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.50 mlp_w1:H=0.9727,top10E=0.04,eRank=640.5,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.81 vo_prod:H=0.8457,top10E=0.13,eRank=277.9,q75/q25=64.23 train_time:583536ms step_avg:97.26ms +[2025-08-22 08:47:41] [Rank 0] step:6001/10000 train_time:583589ms step_avg:97.25ms +[2025-08-22 08:47:41] [Rank 0] step:6001/10000 train_time:583589ms step_avg:97.25ms +[2025-08-22 08:47:43] [Rank 0] step:6021/10000 train_time:585605ms step_avg:97.26ms +[2025-08-22 08:47:43] [Rank 0] step:6021/10000 train_time:585605ms step_avg:97.26ms +[2025-08-22 08:47:45] [Rank 0] step:6041/10000 train_time:587623ms step_avg:97.27ms +[2025-08-22 08:47:45] [Rank 0] step:6041/10000 train_time:587623ms step_avg:97.27ms +[2025-08-22 08:47:47] [Rank 0] step:6061/10000 train_time:589639ms step_avg:97.28ms +[2025-08-22 08:47:47] [Rank 0] step:6061/10000 train_time:589639ms step_avg:97.28ms +[2025-08-22 08:47:49] [Rank 0] step:6081/10000 train_time:591652ms step_avg:97.30ms +[2025-08-22 08:47:49] [Rank 0] step:6081/10000 train_time:591652ms step_avg:97.30ms +[2025-08-22 08:47:51] [Rank 0] step:6101/10000 train_time:593673ms step_avg:97.31ms +[2025-08-22 08:47:51] [Rank 0] step:6101/10000 train_time:593673ms step_avg:97.31ms +[2025-08-22 08:47:53] [Rank 0] step:6121/10000 train_time:596086ms step_avg:97.38ms +[2025-08-22 08:47:53] [Rank 0] step:6121/10000 train_time:596086ms step_avg:97.38ms +[2025-08-22 08:47:55] [Rank 0] step:6141/10000 train_time:598115ms step_avg:97.40ms +[2025-08-22 08:47:55] [Rank 0] step:6141/10000 train_time:598115ms step_avg:97.40ms +[2025-08-22 08:47:57] [Rank 0] step:6161/10000 train_time:600134ms step_avg:97.41ms +[2025-08-22 08:47:57] [Rank 0] step:6161/10000 train_time:600134ms step_avg:97.41ms +[2025-08-22 08:47:59] [Rank 0] step:6181/10000 train_time:602219ms step_avg:97.43ms +[2025-08-22 08:47:59] [Rank 0] step:6181/10000 train_time:602219ms step_avg:97.43ms +[2025-08-22 08:48:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:48:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:48:15] [Rank 0] PRINT: step:6200/10000 val_loss:3.6298 svd_entropy: attn_qk:H=0.9105,top10E=0.07,eRank=424.5,q75/q25=11.46 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.49 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.4,q75/q25=2.81 vo_prod:H=0.8458,top10E=0.13,eRank=278.0,q75/q25=64.44 train_time:604267ms step_avg:97.46ms +[2025-08-22 08:48:15] [Rank 0] PRINT: step:6200/10000 val_loss:3.6298 svd_entropy: attn_qk:H=0.9105,top10E=0.07,eRank=424.5,q75/q25=11.46 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.49 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.4,q75/q25=2.81 vo_prod:H=0.8458,top10E=0.13,eRank=278.0,q75/q25=64.44 train_time:604267ms step_avg:97.46ms +[2025-08-22 08:48:15] [Rank 0] step:6201/10000 train_time:604321ms step_avg:97.46ms +[2025-08-22 08:48:15] [Rank 0] step:6201/10000 train_time:604321ms step_avg:97.46ms +[2025-08-22 08:48:17] [Rank 0] step:6221/10000 train_time:606342ms step_avg:97.47ms +[2025-08-22 08:48:17] [Rank 0] step:6221/10000 train_time:606342ms step_avg:97.47ms +[2025-08-22 08:48:19] [Rank 0] step:6241/10000 train_time:608353ms step_avg:97.48ms +[2025-08-22 08:48:19] [Rank 0] step:6241/10000 train_time:608353ms step_avg:97.48ms +[2025-08-22 08:48:21] [Rank 0] step:6261/10000 train_time:610368ms step_avg:97.49ms +[2025-08-22 08:48:21] [Rank 0] step:6261/10000 train_time:610368ms step_avg:97.49ms +[2025-08-22 08:48:23] [Rank 0] step:6281/10000 train_time:612386ms step_avg:97.50ms +[2025-08-22 08:48:23] [Rank 0] step:6281/10000 train_time:612386ms step_avg:97.50ms +[2025-08-22 08:48:25] [Rank 0] step:6301/10000 train_time:614404ms step_avg:97.51ms +[2025-08-22 08:48:25] [Rank 0] step:6301/10000 train_time:614404ms step_avg:97.51ms +[2025-08-22 08:48:27] [Rank 0] step:6321/10000 train_time:616424ms step_avg:97.52ms +[2025-08-22 08:48:27] [Rank 0] step:6321/10000 train_time:616424ms step_avg:97.52ms +[2025-08-22 08:48:29] [Rank 0] step:6341/10000 train_time:618441ms step_avg:97.53ms +[2025-08-22 08:48:29] [Rank 0] step:6341/10000 train_time:618441ms step_avg:97.53ms +[2025-08-22 08:48:31] [Rank 0] step:6361/10000 train_time:620463ms step_avg:97.54ms +[2025-08-22 08:48:31] [Rank 0] step:6361/10000 train_time:620463ms step_avg:97.54ms +[2025-08-22 08:48:33] [Rank 0] step:6381/10000 train_time:622481ms step_avg:97.55ms +[2025-08-22 08:48:33] [Rank 0] step:6381/10000 train_time:622481ms step_avg:97.55ms +[2025-08-22 08:48:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:48:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:48:49] [Rank 0] PRINT: step:6400/10000 val_loss:3.6181 svd_entropy: attn_qk:H=0.9103,top10E=0.07,eRank=424.0,q75/q25=11.47 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.52 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.81 vo_prod:H=0.8459,top10E=0.13,eRank=278.1,q75/q25=64.28 train_time:624455ms step_avg:97.57ms +[2025-08-22 08:48:49] [Rank 0] PRINT: step:6400/10000 val_loss:3.6181 svd_entropy: attn_qk:H=0.9103,top10E=0.07,eRank=424.0,q75/q25=11.47 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.52 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.81 vo_prod:H=0.8459,top10E=0.13,eRank=278.1,q75/q25=64.28 train_time:624455ms step_avg:97.57ms +[2025-08-22 08:48:49] [Rank 0] step:6401/10000 train_time:624509ms step_avg:97.56ms +[2025-08-22 08:48:49] [Rank 0] step:6401/10000 train_time:624509ms step_avg:97.56ms +[2025-08-22 08:48:51] [Rank 0] step:6421/10000 train_time:626552ms step_avg:97.58ms +[2025-08-22 08:48:51] [Rank 0] step:6421/10000 train_time:626552ms step_avg:97.58ms +[2025-08-22 08:48:53] [Rank 0] step:6441/10000 train_time:628571ms step_avg:97.59ms +[2025-08-22 08:48:53] [Rank 0] step:6441/10000 train_time:628571ms step_avg:97.59ms +[2025-08-22 08:48:55] [Rank 0] step:6461/10000 train_time:630592ms step_avg:97.60ms +[2025-08-22 08:48:55] [Rank 0] step:6461/10000 train_time:630592ms step_avg:97.60ms +[2025-08-22 08:48:57] [Rank 0] step:6481/10000 train_time:632617ms step_avg:97.61ms +[2025-08-22 08:48:57] [Rank 0] step:6481/10000 train_time:632617ms step_avg:97.61ms +[2025-08-22 08:48:59] [Rank 0] step:6501/10000 train_time:634634ms step_avg:97.62ms +[2025-08-22 08:48:59] [Rank 0] step:6501/10000 train_time:634634ms step_avg:97.62ms +[2025-08-22 08:49:01] [Rank 0] step:6521/10000 train_time:636715ms step_avg:97.64ms +[2025-08-22 08:49:01] [Rank 0] step:6521/10000 train_time:636715ms step_avg:97.64ms +[2025-08-22 08:49:03] [Rank 0] step:6541/10000 train_time:638812ms step_avg:97.66ms +[2025-08-22 08:49:03] [Rank 0] step:6541/10000 train_time:638812ms step_avg:97.66ms +[2025-08-22 08:49:05] [Rank 0] step:6561/10000 train_time:640834ms step_avg:97.67ms +[2025-08-22 08:49:05] [Rank 0] step:6561/10000 train_time:640834ms step_avg:97.67ms +[2025-08-22 08:49:07] [Rank 0] step:6581/10000 train_time:642850ms step_avg:97.68ms +[2025-08-22 08:49:07] [Rank 0] step:6581/10000 train_time:642850ms step_avg:97.68ms +[2025-08-22 08:49:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:49:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:49:23] [Rank 0] PRINT: step:6600/10000 val_loss:3.6045 svd_entropy: attn_qk:H=0.9101,top10E=0.08,eRank=423.6,q75/q25=11.50 attn_vo:H=0.9168,top10E=0.06,eRank=442.2,q75/q25=11.49 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.80 vo_prod:H=0.8460,top10E=0.13,eRank=278.4,q75/q25=63.85 train_time:644831ms step_avg:97.70ms +[2025-08-22 08:49:23] [Rank 0] PRINT: step:6600/10000 val_loss:3.6045 svd_entropy: attn_qk:H=0.9101,top10E=0.08,eRank=423.6,q75/q25=11.50 attn_vo:H=0.9168,top10E=0.06,eRank=442.2,q75/q25=11.49 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.80 vo_prod:H=0.8460,top10E=0.13,eRank=278.4,q75/q25=63.85 train_time:644831ms step_avg:97.70ms +[2025-08-22 08:49:23] [Rank 0] step:6601/10000 train_time:644884ms step_avg:97.69ms +[2025-08-22 08:49:23] [Rank 0] step:6601/10000 train_time:644884ms step_avg:97.69ms +[2025-08-22 08:49:25] [Rank 0] step:6621/10000 train_time:646908ms step_avg:97.71ms +[2025-08-22 08:49:25] [Rank 0] step:6621/10000 train_time:646908ms step_avg:97.71ms +[2025-08-22 08:49:27] [Rank 0] step:6641/10000 train_time:648940ms step_avg:97.72ms +[2025-08-22 08:49:27] [Rank 0] step:6641/10000 train_time:648940ms step_avg:97.72ms +[2025-08-22 08:49:29] [Rank 0] step:6661/10000 train_time:650963ms step_avg:97.73ms +[2025-08-22 08:49:29] [Rank 0] step:6661/10000 train_time:650963ms step_avg:97.73ms +[2025-08-22 08:49:32] [Rank 0] step:6681/10000 train_time:653000ms step_avg:97.74ms +[2025-08-22 08:49:32] [Rank 0] step:6681/10000 train_time:653000ms step_avg:97.74ms +[2025-08-22 08:49:34] [Rank 0] step:6701/10000 train_time:655061ms step_avg:97.76ms +[2025-08-22 08:49:34] [Rank 0] step:6701/10000 train_time:655061ms step_avg:97.76ms +[2025-08-22 08:49:36] [Rank 0] step:6721/10000 train_time:657113ms step_avg:97.77ms +[2025-08-22 08:49:36] [Rank 0] step:6721/10000 train_time:657113ms step_avg:97.77ms +[2025-08-22 08:49:38] [Rank 0] step:6741/10000 train_time:659162ms step_avg:97.78ms +[2025-08-22 08:49:38] [Rank 0] step:6741/10000 train_time:659162ms step_avg:97.78ms +[2025-08-22 08:49:40] [Rank 0] step:6761/10000 train_time:661210ms step_avg:97.80ms +[2025-08-22 08:49:40] [Rank 0] step:6761/10000 train_time:661210ms step_avg:97.80ms +[2025-08-22 08:49:42] [Rank 0] step:6781/10000 train_time:663262ms step_avg:97.81ms +[2025-08-22 08:49:42] [Rank 0] step:6781/10000 train_time:663262ms step_avg:97.81ms +[2025-08-22 08:49:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:49:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:49:58] [Rank 0] PRINT: step:6800/10000 val_loss:3.5896 svd_entropy: attn_qk:H=0.9099,top10E=0.08,eRank=423.0,q75/q25=11.55 attn_vo:H=0.9168,top10E=0.06,eRank=442.2,q75/q25=11.48 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.81 vo_prod:H=0.8461,top10E=0.13,eRank=278.6,q75/q25=63.90 train_time:665274ms step_avg:97.83ms +[2025-08-22 08:49:58] [Rank 0] PRINT: step:6800/10000 val_loss:3.5896 svd_entropy: attn_qk:H=0.9099,top10E=0.08,eRank=423.0,q75/q25=11.55 attn_vo:H=0.9168,top10E=0.06,eRank=442.2,q75/q25=11.48 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.81 vo_prod:H=0.8461,top10E=0.13,eRank=278.6,q75/q25=63.90 train_time:665274ms step_avg:97.83ms +[2025-08-22 08:49:58] [Rank 0] step:6801/10000 train_time:665328ms step_avg:97.83ms +[2025-08-22 08:49:58] [Rank 0] step:6801/10000 train_time:665328ms step_avg:97.83ms +[2025-08-22 08:50:00] [Rank 0] step:6821/10000 train_time:667384ms step_avg:97.84ms +[2025-08-22 08:50:00] [Rank 0] step:6821/10000 train_time:667384ms step_avg:97.84ms +[2025-08-22 08:50:02] [Rank 0] step:6841/10000 train_time:669426ms step_avg:97.85ms +[2025-08-22 08:50:02] [Rank 0] step:6841/10000 train_time:669426ms step_avg:97.85ms +[2025-08-22 08:50:04] [Rank 0] step:6861/10000 train_time:671654ms step_avg:97.89ms +[2025-08-22 08:50:04] [Rank 0] step:6861/10000 train_time:671654ms step_avg:97.89ms +[2025-08-22 08:50:06] [Rank 0] step:6881/10000 train_time:673680ms step_avg:97.90ms +[2025-08-22 08:50:06] [Rank 0] step:6881/10000 train_time:673680ms step_avg:97.90ms +[2025-08-22 08:50:08] [Rank 0] step:6901/10000 train_time:675760ms step_avg:97.92ms +[2025-08-22 08:50:08] [Rank 0] step:6901/10000 train_time:675760ms step_avg:97.92ms +[2025-08-22 08:50:10] [Rank 0] step:6921/10000 train_time:677794ms step_avg:97.93ms +[2025-08-22 08:50:10] [Rank 0] step:6921/10000 train_time:677794ms step_avg:97.93ms +[2025-08-22 08:50:12] [Rank 0] step:6941/10000 train_time:679844ms step_avg:97.95ms +[2025-08-22 08:50:12] [Rank 0] step:6941/10000 train_time:679844ms step_avg:97.95ms +[2025-08-22 08:50:14] [Rank 0] step:6961/10000 train_time:681906ms step_avg:97.96ms +[2025-08-22 08:50:14] [Rank 0] step:6961/10000 train_time:681906ms step_avg:97.96ms +[2025-08-22 08:50:17] [Rank 0] step:6981/10000 train_time:683954ms step_avg:97.97ms +[2025-08-22 08:50:17] [Rank 0] step:6981/10000 train_time:683954ms step_avg:97.97ms +[2025-08-22 08:50:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:50:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:50:33] [Rank 0] PRINT: step:7000/10000 val_loss:3.5761 svd_entropy: attn_qk:H=0.9097,top10E=0.08,eRank=422.6,q75/q25=11.57 attn_vo:H=0.9168,top10E=0.06,eRank=442.3,q75/q25=11.48 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.81 vo_prod:H=0.8462,top10E=0.13,eRank=278.8,q75/q25=63.81 train_time:685962ms step_avg:97.99ms +[2025-08-22 08:50:33] [Rank 0] PRINT: step:7000/10000 val_loss:3.5761 svd_entropy: attn_qk:H=0.9097,top10E=0.08,eRank=422.6,q75/q25=11.57 attn_vo:H=0.9168,top10E=0.06,eRank=442.3,q75/q25=11.48 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.81 vo_prod:H=0.8462,top10E=0.13,eRank=278.8,q75/q25=63.81 train_time:685962ms step_avg:97.99ms +[2025-08-22 08:50:33] [Rank 0] step:7001/10000 train_time:686017ms step_avg:97.99ms +[2025-08-22 08:50:33] [Rank 0] step:7001/10000 train_time:686017ms step_avg:97.99ms +[2025-08-22 08:50:35] [Rank 0] step:7021/10000 train_time:688065ms step_avg:98.00ms +[2025-08-22 08:50:35] [Rank 0] step:7021/10000 train_time:688065ms step_avg:98.00ms +[2025-08-22 08:50:37] [Rank 0] step:7041/10000 train_time:690105ms step_avg:98.01ms +[2025-08-22 08:50:37] [Rank 0] step:7041/10000 train_time:690105ms step_avg:98.01ms +[2025-08-22 08:50:39] [Rank 0] step:7061/10000 train_time:692149ms step_avg:98.02ms +[2025-08-22 08:50:39] [Rank 0] step:7061/10000 train_time:692149ms step_avg:98.02ms +[2025-08-22 08:50:41] [Rank 0] step:7081/10000 train_time:694193ms step_avg:98.04ms +[2025-08-22 08:50:41] [Rank 0] step:7081/10000 train_time:694193ms step_avg:98.04ms +[2025-08-22 08:50:43] [Rank 0] step:7101/10000 train_time:696242ms step_avg:98.05ms +[2025-08-22 08:50:43] [Rank 0] step:7101/10000 train_time:696242ms step_avg:98.05ms +[2025-08-22 08:50:45] [Rank 0] step:7121/10000 train_time:698281ms step_avg:98.06ms +[2025-08-22 08:50:45] [Rank 0] step:7121/10000 train_time:698281ms step_avg:98.06ms +[2025-08-22 08:50:47] [Rank 0] step:7141/10000 train_time:700326ms step_avg:98.07ms +[2025-08-22 08:50:47] [Rank 0] step:7141/10000 train_time:700326ms step_avg:98.07ms +[2025-08-22 08:50:49] [Rank 0] step:7161/10000 train_time:702375ms step_avg:98.08ms +[2025-08-22 08:50:49] [Rank 0] step:7161/10000 train_time:702375ms step_avg:98.08ms +[2025-08-22 08:50:51] [Rank 0] step:7181/10000 train_time:704424ms step_avg:98.10ms +[2025-08-22 08:50:51] [Rank 0] step:7181/10000 train_time:704424ms step_avg:98.10ms +[2025-08-22 08:50:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:50:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:51:07] [Rank 0] PRINT: step:7200/10000 val_loss:3.5636 svd_entropy: attn_qk:H=0.9096,top10E=0.08,eRank=422.2,q75/q25=11.64 attn_vo:H=0.9168,top10E=0.06,eRank=442.4,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.81 vo_prod:H=0.8464,top10E=0.13,eRank=279.1,q75/q25=63.80 train_time:706431ms step_avg:98.12ms +[2025-08-22 08:51:07] [Rank 0] PRINT: step:7200/10000 val_loss:3.5636 svd_entropy: attn_qk:H=0.9096,top10E=0.08,eRank=422.2,q75/q25=11.64 attn_vo:H=0.9168,top10E=0.06,eRank=442.4,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.81 vo_prod:H=0.8464,top10E=0.13,eRank=279.1,q75/q25=63.80 train_time:706431ms step_avg:98.12ms +[2025-08-22 08:51:07] [Rank 0] step:7201/10000 train_time:706485ms step_avg:98.11ms +[2025-08-22 08:51:07] [Rank 0] step:7201/10000 train_time:706485ms step_avg:98.11ms +[2025-08-22 08:51:09] [Rank 0] step:7221/10000 train_time:708627ms step_avg:98.13ms +[2025-08-22 08:51:09] [Rank 0] step:7221/10000 train_time:708627ms step_avg:98.13ms +[2025-08-22 08:51:11] [Rank 0] step:7241/10000 train_time:710703ms step_avg:98.15ms +[2025-08-22 08:51:11] [Rank 0] step:7241/10000 train_time:710703ms step_avg:98.15ms +[2025-08-22 08:51:13] [Rank 0] step:7261/10000 train_time:712742ms step_avg:98.16ms +[2025-08-22 08:51:13] [Rank 0] step:7261/10000 train_time:712742ms step_avg:98.16ms +[2025-08-22 08:51:15] [Rank 0] step:7281/10000 train_time:714795ms step_avg:98.17ms +[2025-08-22 08:51:15] [Rank 0] step:7281/10000 train_time:714795ms step_avg:98.17ms +[2025-08-22 08:51:18] [Rank 0] step:7301/10000 train_time:716843ms step_avg:98.18ms +[2025-08-22 08:51:18] [Rank 0] step:7301/10000 train_time:716843ms step_avg:98.18ms +[2025-08-22 08:51:20] [Rank 0] step:7321/10000 train_time:718900ms step_avg:98.20ms +[2025-08-22 08:51:20] [Rank 0] step:7321/10000 train_time:718900ms step_avg:98.20ms +[2025-08-22 08:51:22] [Rank 0] step:7341/10000 train_time:720944ms step_avg:98.21ms +[2025-08-22 08:51:22] [Rank 0] step:7341/10000 train_time:720944ms step_avg:98.21ms +[2025-08-22 08:51:24] [Rank 0] step:7361/10000 train_time:723003ms step_avg:98.22ms +[2025-08-22 08:51:24] [Rank 0] step:7361/10000 train_time:723003ms step_avg:98.22ms +[2025-08-22 08:51:26] [Rank 0] step:7381/10000 train_time:725061ms step_avg:98.23ms +[2025-08-22 08:51:26] [Rank 0] step:7381/10000 train_time:725061ms step_avg:98.23ms +[2025-08-22 08:51:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:51:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:51:42] [Rank 0] PRINT: step:7400/10000 val_loss:3.5482 svd_entropy: attn_qk:H=0.9094,top10E=0.08,eRank=421.8,q75/q25=11.65 attn_vo:H=0.9168,top10E=0.06,eRank=442.5,q75/q25=11.47 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.1,q75/q25=2.81 vo_prod:H=0.8465,top10E=0.13,eRank=279.3,q75/q25=63.34 train_time:727056ms step_avg:98.25ms +[2025-08-22 08:51:42] [Rank 0] PRINT: step:7400/10000 val_loss:3.5482 svd_entropy: attn_qk:H=0.9094,top10E=0.08,eRank=421.8,q75/q25=11.65 attn_vo:H=0.9168,top10E=0.06,eRank=442.5,q75/q25=11.47 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.1,q75/q25=2.81 vo_prod:H=0.8465,top10E=0.13,eRank=279.3,q75/q25=63.34 train_time:727056ms step_avg:98.25ms +[2025-08-22 08:51:42] [Rank 0] step:7401/10000 train_time:727110ms step_avg:98.24ms +[2025-08-22 08:51:42] [Rank 0] step:7401/10000 train_time:727110ms step_avg:98.24ms +[2025-08-22 08:51:44] [Rank 0] step:7421/10000 train_time:729173ms step_avg:98.26ms +[2025-08-22 08:51:44] [Rank 0] step:7421/10000 train_time:729173ms step_avg:98.26ms +[2025-08-22 08:51:46] [Rank 0] step:7441/10000 train_time:731224ms step_avg:98.27ms +[2025-08-22 08:51:46] [Rank 0] step:7441/10000 train_time:731224ms step_avg:98.27ms +[2025-08-22 08:51:48] [Rank 0] step:7461/10000 train_time:733276ms step_avg:98.28ms +[2025-08-22 08:51:48] [Rank 0] step:7461/10000 train_time:733276ms step_avg:98.28ms +[2025-08-22 08:51:50] [Rank 0] step:7481/10000 train_time:735335ms step_avg:98.29ms +[2025-08-22 08:51:50] [Rank 0] step:7481/10000 train_time:735335ms step_avg:98.29ms +[2025-08-22 08:51:52] [Rank 0] step:7501/10000 train_time:737393ms step_avg:98.31ms +[2025-08-22 08:51:52] [Rank 0] step:7501/10000 train_time:737393ms step_avg:98.31ms +[2025-08-22 08:51:54] [Rank 0] step:7521/10000 train_time:739451ms step_avg:98.32ms +[2025-08-22 08:51:54] [Rank 0] step:7521/10000 train_time:739451ms step_avg:98.32ms +[2025-08-22 08:51:56] [Rank 0] step:7541/10000 train_time:741518ms step_avg:98.33ms +[2025-08-22 08:51:56] [Rank 0] step:7541/10000 train_time:741518ms step_avg:98.33ms +[2025-08-22 08:51:58] [Rank 0] step:7561/10000 train_time:743565ms step_avg:98.34ms +[2025-08-22 08:51:58] [Rank 0] step:7561/10000 train_time:743565ms step_avg:98.34ms +[2025-08-22 08:52:00] [Rank 0] step:7581/10000 train_time:745640ms step_avg:98.36ms +[2025-08-22 08:52:00] [Rank 0] step:7581/10000 train_time:745640ms step_avg:98.36ms +[2025-08-22 08:52:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:52:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:52:16] [Rank 0] PRINT: step:7600/10000 val_loss:3.5382 svd_entropy: attn_qk:H=0.9093,top10E=0.08,eRank=421.5,q75/q25=11.69 attn_vo:H=0.9169,top10E=0.06,eRank=442.5,q75/q25=11.46 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.80 vo_prod:H=0.8466,top10E=0.13,eRank=279.5,q75/q25=63.56 train_time:747663ms step_avg:98.38ms +[2025-08-22 08:52:16] [Rank 0] PRINT: step:7600/10000 val_loss:3.5382 svd_entropy: attn_qk:H=0.9093,top10E=0.08,eRank=421.5,q75/q25=11.69 attn_vo:H=0.9169,top10E=0.06,eRank=442.5,q75/q25=11.46 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.80 vo_prod:H=0.8466,top10E=0.13,eRank=279.5,q75/q25=63.56 train_time:747663ms step_avg:98.38ms +[2025-08-22 08:52:16] [Rank 0] step:7601/10000 train_time:747717ms step_avg:98.37ms +[2025-08-22 08:52:16] [Rank 0] step:7601/10000 train_time:747717ms step_avg:98.37ms +[2025-08-22 08:52:19] [Rank 0] step:7621/10000 train_time:749786ms step_avg:98.38ms +[2025-08-22 08:52:19] [Rank 0] step:7621/10000 train_time:749786ms step_avg:98.38ms +[2025-08-22 08:52:21] [Rank 0] step:7641/10000 train_time:751832ms step_avg:98.39ms +[2025-08-22 08:52:21] [Rank 0] step:7641/10000 train_time:751832ms step_avg:98.39ms +[2025-08-22 08:52:23] [Rank 0] step:7661/10000 train_time:753886ms step_avg:98.41ms +[2025-08-22 08:52:23] [Rank 0] step:7661/10000 train_time:753886ms step_avg:98.41ms +[2025-08-22 08:52:25] [Rank 0] step:7681/10000 train_time:755937ms step_avg:98.42ms +[2025-08-22 08:52:25] [Rank 0] step:7681/10000 train_time:755937ms step_avg:98.42ms +[2025-08-22 08:52:27] [Rank 0] step:7701/10000 train_time:757988ms step_avg:98.43ms +[2025-08-22 08:52:27] [Rank 0] step:7701/10000 train_time:757988ms step_avg:98.43ms +[2025-08-22 08:52:29] [Rank 0] step:7721/10000 train_time:760056ms step_avg:98.44ms +[2025-08-22 08:52:29] [Rank 0] step:7721/10000 train_time:760056ms step_avg:98.44ms +[2025-08-22 08:52:31] [Rank 0] step:7741/10000 train_time:762112ms step_avg:98.45ms +[2025-08-22 08:52:31] [Rank 0] step:7741/10000 train_time:762112ms step_avg:98.45ms +[2025-08-22 08:52:33] [Rank 0] step:7761/10000 train_time:764176ms step_avg:98.46ms +[2025-08-22 08:52:33] [Rank 0] step:7761/10000 train_time:764176ms step_avg:98.46ms +[2025-08-22 08:52:35] [Rank 0] step:7781/10000 train_time:766231ms step_avg:98.47ms +[2025-08-22 08:52:35] [Rank 0] step:7781/10000 train_time:766231ms step_avg:98.47ms +[2025-08-22 08:52:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:52:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:52:51] [Rank 0] PRINT: step:7800/10000 val_loss:3.5252 svd_entropy: attn_qk:H=0.9092,top10E=0.08,eRank=421.1,q75/q25=11.70 attn_vo:H=0.9169,top10E=0.06,eRank=442.6,q75/q25=11.47 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.80 vo_prod:H=0.8466,top10E=0.13,eRank=279.6,q75/q25=63.14 train_time:768252ms step_avg:98.49ms +[2025-08-22 08:52:51] [Rank 0] PRINT: step:7800/10000 val_loss:3.5252 svd_entropy: attn_qk:H=0.9092,top10E=0.08,eRank=421.1,q75/q25=11.70 attn_vo:H=0.9169,top10E=0.06,eRank=442.6,q75/q25=11.47 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.80 vo_prod:H=0.8466,top10E=0.13,eRank=279.6,q75/q25=63.14 train_time:768252ms step_avg:98.49ms +[2025-08-22 08:52:51] [Rank 0] step:7801/10000 train_time:768307ms step_avg:98.49ms +[2025-08-22 08:52:51] [Rank 0] step:7801/10000 train_time:768307ms step_avg:98.49ms +[2025-08-22 08:52:53] [Rank 0] step:7821/10000 train_time:770374ms step_avg:98.50ms +[2025-08-22 08:52:53] [Rank 0] step:7821/10000 train_time:770374ms step_avg:98.50ms +[2025-08-22 08:52:55] [Rank 0] step:7841/10000 train_time:772421ms step_avg:98.51ms +[2025-08-22 08:52:55] [Rank 0] step:7841/10000 train_time:772421ms step_avg:98.51ms +[2025-08-22 08:52:57] [Rank 0] step:7861/10000 train_time:774474ms step_avg:98.52ms +[2025-08-22 08:52:57] [Rank 0] step:7861/10000 train_time:774474ms step_avg:98.52ms +[2025-08-22 08:52:59] [Rank 0] step:7881/10000 train_time:776533ms step_avg:98.53ms +[2025-08-22 08:52:59] [Rank 0] step:7881/10000 train_time:776533ms step_avg:98.53ms +[2025-08-22 08:53:01] [Rank 0] step:7901/10000 train_time:778581ms step_avg:98.54ms +[2025-08-22 08:53:01] [Rank 0] step:7901/10000 train_time:778581ms step_avg:98.54ms +[2025-08-22 08:53:03] [Rank 0] step:7921/10000 train_time:780639ms step_avg:98.55ms +[2025-08-22 08:53:03] [Rank 0] step:7921/10000 train_time:780639ms step_avg:98.55ms +[2025-08-22 08:53:06] [Rank 0] step:7941/10000 train_time:782697ms step_avg:98.56ms +[2025-08-22 08:53:06] [Rank 0] step:7941/10000 train_time:782697ms step_avg:98.56ms +[2025-08-22 08:53:08] [Rank 0] step:7961/10000 train_time:784754ms step_avg:98.57ms +[2025-08-22 08:53:08] [Rank 0] step:7961/10000 train_time:784754ms step_avg:98.57ms +[2025-08-22 08:53:10] [Rank 0] step:7981/10000 train_time:786799ms step_avg:98.58ms +[2025-08-22 08:53:10] [Rank 0] step:7981/10000 train_time:786799ms step_avg:98.58ms +[2025-08-22 08:53:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:53:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:53:26] [Rank 0] PRINT: step:8000/10000 val_loss:3.5116 svd_entropy: attn_qk:H=0.9090,top10E=0.08,eRank=420.8,q75/q25=11.69 attn_vo:H=0.9169,top10E=0.06,eRank=442.6,q75/q25=11.45 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.80 vo_prod:H=0.8468,top10E=0.13,eRank=279.8,q75/q25=62.77 train_time:788815ms step_avg:98.60ms +[2025-08-22 08:53:26] [Rank 0] PRINT: step:8000/10000 val_loss:3.5116 svd_entropy: attn_qk:H=0.9090,top10E=0.08,eRank=420.8,q75/q25=11.69 attn_vo:H=0.9169,top10E=0.06,eRank=442.6,q75/q25=11.45 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.2,q75/q25=2.80 vo_prod:H=0.8468,top10E=0.13,eRank=279.8,q75/q25=62.77 train_time:788815ms step_avg:98.60ms +[2025-08-22 08:53:26] [Rank 0] step:8001/10000 train_time:788868ms step_avg:98.60ms +[2025-08-22 08:53:26] [Rank 0] step:8001/10000 train_time:788868ms step_avg:98.60ms +[2025-08-22 08:53:28] [Rank 0] step:8021/10000 train_time:790932ms step_avg:98.61ms +[2025-08-22 08:53:28] [Rank 0] step:8021/10000 train_time:790932ms step_avg:98.61ms +[2025-08-22 08:53:30] [Rank 0] step:8041/10000 train_time:792986ms step_avg:98.62ms +[2025-08-22 08:53:30] [Rank 0] step:8041/10000 train_time:792986ms step_avg:98.62ms +[2025-08-22 08:53:32] [Rank 0] step:8061/10000 train_time:795036ms step_avg:98.63ms +[2025-08-22 08:53:32] [Rank 0] step:8061/10000 train_time:795036ms step_avg:98.63ms +[2025-08-22 08:53:34] [Rank 0] step:8081/10000 train_time:797073ms step_avg:98.64ms +[2025-08-22 08:53:34] [Rank 0] step:8081/10000 train_time:797073ms step_avg:98.64ms +[2025-08-22 08:53:36] [Rank 0] step:8101/10000 train_time:799131ms step_avg:98.65ms +[2025-08-22 08:53:36] [Rank 0] step:8101/10000 train_time:799131ms step_avg:98.65ms +[2025-08-22 08:53:38] [Rank 0] step:8121/10000 train_time:801181ms step_avg:98.66ms +[2025-08-22 08:53:38] [Rank 0] step:8121/10000 train_time:801181ms step_avg:98.66ms +[2025-08-22 08:53:41] [Rank 0] step:8141/10000 train_time:803943ms step_avg:98.75ms +[2025-08-22 08:53:41] [Rank 0] step:8141/10000 train_time:803943ms step_avg:98.75ms +[2025-08-22 08:53:43] [Rank 0] step:8161/10000 train_time:806012ms step_avg:98.76ms +[2025-08-22 08:53:43] [Rank 0] step:8161/10000 train_time:806012ms step_avg:98.76ms +[2025-08-22 08:53:45] [Rank 0] step:8181/10000 train_time:808097ms step_avg:98.78ms +[2025-08-22 08:53:45] [Rank 0] step:8181/10000 train_time:808097ms step_avg:98.78ms +[2025-08-22 08:53:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:53:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:54:01] [Rank 0] PRINT: step:8200/10000 val_loss:3.4997 svd_entropy: attn_qk:H=0.9089,top10E=0.08,eRank=420.4,q75/q25=11.72 attn_vo:H=0.9169,top10E=0.06,eRank=442.7,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.81 vo_prod:H=0.8468,top10E=0.13,eRank=280.0,q75/q25=62.60 train_time:810154ms step_avg:98.80ms +[2025-08-22 08:54:01] [Rank 0] PRINT: step:8200/10000 val_loss:3.4997 svd_entropy: attn_qk:H=0.9089,top10E=0.08,eRank=420.4,q75/q25=11.72 attn_vo:H=0.9169,top10E=0.06,eRank=442.7,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.81 vo_prod:H=0.8468,top10E=0.13,eRank=280.0,q75/q25=62.60 train_time:810154ms step_avg:98.80ms +[2025-08-22 08:54:01] [Rank 0] step:8201/10000 train_time:810210ms step_avg:98.79ms +[2025-08-22 08:54:01] [Rank 0] step:8201/10000 train_time:810210ms step_avg:98.79ms +[2025-08-22 08:54:03] [Rank 0] step:8221/10000 train_time:812302ms step_avg:98.81ms +[2025-08-22 08:54:03] [Rank 0] step:8221/10000 train_time:812302ms step_avg:98.81ms +[2025-08-22 08:54:05] [Rank 0] step:8241/10000 train_time:814388ms step_avg:98.82ms +[2025-08-22 08:54:05] [Rank 0] step:8241/10000 train_time:814388ms step_avg:98.82ms +[2025-08-22 08:54:07] [Rank 0] step:8261/10000 train_time:816478ms step_avg:98.84ms +[2025-08-22 08:54:07] [Rank 0] step:8261/10000 train_time:816478ms step_avg:98.84ms +[2025-08-22 08:54:09] [Rank 0] step:8281/10000 train_time:818552ms step_avg:98.85ms +[2025-08-22 08:54:09] [Rank 0] step:8281/10000 train_time:818552ms step_avg:98.85ms +[2025-08-22 08:54:11] [Rank 0] step:8301/10000 train_time:820629ms step_avg:98.86ms +[2025-08-22 08:54:11] [Rank 0] step:8301/10000 train_time:820629ms step_avg:98.86ms +[2025-08-22 08:54:13] [Rank 0] step:8321/10000 train_time:822707ms step_avg:98.87ms +[2025-08-22 08:54:13] [Rank 0] step:8321/10000 train_time:822707ms step_avg:98.87ms +[2025-08-22 08:54:16] [Rank 0] step:8341/10000 train_time:824796ms step_avg:98.88ms +[2025-08-22 08:54:16] [Rank 0] step:8341/10000 train_time:824796ms step_avg:98.88ms +[2025-08-22 08:54:18] [Rank 0] step:8361/10000 train_time:826929ms step_avg:98.90ms +[2025-08-22 08:54:18] [Rank 0] step:8361/10000 train_time:826929ms step_avg:98.90ms +[2025-08-22 08:54:20] [Rank 0] step:8381/10000 train_time:829146ms step_avg:98.93ms +[2025-08-22 08:54:20] [Rank 0] step:8381/10000 train_time:829146ms step_avg:98.93ms +[2025-08-22 08:54:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:54:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:54:36] [Rank 0] PRINT: step:8400/10000 val_loss:3.4873 svd_entropy: attn_qk:H=0.9088,top10E=0.08,eRank=420.1,q75/q25=11.72 attn_vo:H=0.9169,top10E=0.06,eRank=442.7,q75/q25=11.40 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.4,q75/q25=2.80 vo_prod:H=0.8470,top10E=0.13,eRank=280.2,q75/q25=62.64 train_time:831186ms step_avg:98.95ms +[2025-08-22 08:54:36] [Rank 0] PRINT: step:8400/10000 val_loss:3.4873 svd_entropy: attn_qk:H=0.9088,top10E=0.08,eRank=420.1,q75/q25=11.72 attn_vo:H=0.9169,top10E=0.06,eRank=442.7,q75/q25=11.40 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.4,q75/q25=2.80 vo_prod:H=0.8470,top10E=0.13,eRank=280.2,q75/q25=62.64 train_time:831186ms step_avg:98.95ms +[2025-08-22 08:54:36] [Rank 0] step:8401/10000 train_time:831242ms step_avg:98.95ms +[2025-08-22 08:54:36] [Rank 0] step:8401/10000 train_time:831242ms step_avg:98.95ms +[2025-08-22 08:54:38] [Rank 0] step:8421/10000 train_time:833346ms step_avg:98.96ms +[2025-08-22 08:54:38] [Rank 0] step:8421/10000 train_time:833346ms step_avg:98.96ms +[2025-08-22 08:54:40] [Rank 0] step:8441/10000 train_time:835426ms step_avg:98.97ms +[2025-08-22 08:54:40] [Rank 0] step:8441/10000 train_time:835426ms step_avg:98.97ms +[2025-08-22 08:54:42] [Rank 0] step:8461/10000 train_time:837504ms step_avg:98.98ms +[2025-08-22 08:54:42] [Rank 0] step:8461/10000 train_time:837504ms step_avg:98.98ms +[2025-08-22 08:54:44] [Rank 0] step:8481/10000 train_time:839591ms step_avg:99.00ms +[2025-08-22 08:54:44] [Rank 0] step:8481/10000 train_time:839591ms step_avg:99.00ms +[2025-08-22 08:54:47] [Rank 0] step:8501/10000 train_time:841695ms step_avg:99.01ms +[2025-08-22 08:54:47] [Rank 0] step:8501/10000 train_time:841695ms step_avg:99.01ms +[2025-08-22 08:54:49] [Rank 0] step:8521/10000 train_time:843781ms step_avg:99.02ms +[2025-08-22 08:54:49] [Rank 0] step:8521/10000 train_time:843781ms step_avg:99.02ms +[2025-08-22 08:54:51] [Rank 0] step:8541/10000 train_time:845881ms step_avg:99.04ms +[2025-08-22 08:54:51] [Rank 0] step:8541/10000 train_time:845881ms step_avg:99.04ms +[2025-08-22 08:54:53] [Rank 0] step:8561/10000 train_time:847970ms step_avg:99.05ms +[2025-08-22 08:54:53] [Rank 0] step:8561/10000 train_time:847970ms step_avg:99.05ms +[2025-08-22 08:54:55] [Rank 0] step:8581/10000 train_time:850058ms step_avg:99.06ms +[2025-08-22 08:54:55] [Rank 0] step:8581/10000 train_time:850058ms step_avg:99.06ms +[2025-08-22 08:54:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:54:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:55:11] [Rank 0] PRINT: step:8600/10000 val_loss:3.4792 svd_entropy: attn_qk:H=0.9086,top10E=0.08,eRank=419.8,q75/q25=11.73 attn_vo:H=0.9169,top10E=0.06,eRank=442.7,q75/q25=11.42 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.4,q75/q25=2.80 vo_prod:H=0.8470,top10E=0.13,eRank=280.3,q75/q25=62.25 train_time:852095ms step_avg:99.08ms +[2025-08-22 08:55:11] [Rank 0] PRINT: step:8600/10000 val_loss:3.4792 svd_entropy: attn_qk:H=0.9086,top10E=0.08,eRank=419.8,q75/q25=11.73 attn_vo:H=0.9169,top10E=0.06,eRank=442.7,q75/q25=11.42 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9680,top10E=0.05,eRank=621.4,q75/q25=2.80 vo_prod:H=0.8470,top10E=0.13,eRank=280.3,q75/q25=62.25 train_time:852095ms step_avg:99.08ms +[2025-08-22 08:55:11] [Rank 0] step:8601/10000 train_time:852152ms step_avg:99.08ms +[2025-08-22 08:55:11] [Rank 0] step:8601/10000 train_time:852152ms step_avg:99.08ms +[2025-08-22 08:55:13] [Rank 0] step:8621/10000 train_time:854253ms step_avg:99.09ms +[2025-08-22 08:55:13] [Rank 0] step:8621/10000 train_time:854253ms step_avg:99.09ms +[2025-08-22 08:55:15] [Rank 0] step:8641/10000 train_time:856331ms step_avg:99.10ms +[2025-08-22 08:55:15] [Rank 0] step:8641/10000 train_time:856331ms step_avg:99.10ms +[2025-08-22 08:55:17] [Rank 0] step:8661/10000 train_time:858409ms step_avg:99.11ms +[2025-08-22 08:55:17] [Rank 0] step:8661/10000 train_time:858409ms step_avg:99.11ms +[2025-08-22 08:55:19] [Rank 0] step:8681/10000 train_time:860492ms step_avg:99.12ms +[2025-08-22 08:55:19] [Rank 0] step:8681/10000 train_time:860492ms step_avg:99.12ms +[2025-08-22 08:55:21] [Rank 0] step:8701/10000 train_time:862657ms step_avg:99.14ms +[2025-08-22 08:55:21] [Rank 0] step:8701/10000 train_time:862657ms step_avg:99.14ms +[2025-08-22 08:55:24] [Rank 0] step:8721/10000 train_time:864862ms step_avg:99.17ms +[2025-08-22 08:55:24] [Rank 0] step:8721/10000 train_time:864862ms step_avg:99.17ms +[2025-08-22 08:55:26] [Rank 0] step:8741/10000 train_time:866933ms step_avg:99.18ms +[2025-08-22 08:55:26] [Rank 0] step:8741/10000 train_time:866933ms step_avg:99.18ms +[2025-08-22 08:55:28] [Rank 0] step:8761/10000 train_time:869012ms step_avg:99.19ms +[2025-08-22 08:55:28] [Rank 0] step:8761/10000 train_time:869012ms step_avg:99.19ms +[2025-08-22 08:55:30] [Rank 0] step:8781/10000 train_time:871097ms step_avg:99.20ms +[2025-08-22 08:55:30] [Rank 0] step:8781/10000 train_time:871097ms step_avg:99.20ms +[2025-08-22 08:55:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:55:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:55:45] [Rank 0] PRINT: step:8800/10000 val_loss:3.4672 svd_entropy: attn_qk:H=0.9085,top10E=0.08,eRank=419.5,q75/q25=11.79 attn_vo:H=0.9170,top10E=0.06,eRank=442.8,q75/q25=11.45 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.80 vo_prod:H=0.8471,top10E=0.13,eRank=280.4,q75/q25=62.20 train_time:873132ms step_avg:99.22ms +[2025-08-22 08:55:45] [Rank 0] PRINT: step:8800/10000 val_loss:3.4672 svd_entropy: attn_qk:H=0.9085,top10E=0.08,eRank=419.5,q75/q25=11.79 attn_vo:H=0.9170,top10E=0.06,eRank=442.8,q75/q25=11.45 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.80 vo_prod:H=0.8471,top10E=0.13,eRank=280.4,q75/q25=62.20 train_time:873132ms step_avg:99.22ms +[2025-08-22 08:55:46] [Rank 0] step:8801/10000 train_time:873186ms step_avg:99.21ms +[2025-08-22 08:55:46] [Rank 0] step:8801/10000 train_time:873186ms step_avg:99.21ms +[2025-08-22 08:55:48] [Rank 0] step:8821/10000 train_time:875268ms step_avg:99.23ms +[2025-08-22 08:55:48] [Rank 0] step:8821/10000 train_time:875268ms step_avg:99.23ms +[2025-08-22 08:55:50] [Rank 0] step:8841/10000 train_time:877367ms step_avg:99.24ms +[2025-08-22 08:55:50] [Rank 0] step:8841/10000 train_time:877367ms step_avg:99.24ms +[2025-08-22 08:55:52] [Rank 0] step:8861/10000 train_time:879438ms step_avg:99.25ms +[2025-08-22 08:55:52] [Rank 0] step:8861/10000 train_time:879438ms step_avg:99.25ms +[2025-08-22 08:55:54] [Rank 0] step:8881/10000 train_time:881513ms step_avg:99.26ms +[2025-08-22 08:55:54] [Rank 0] step:8881/10000 train_time:881513ms step_avg:99.26ms +[2025-08-22 08:55:56] [Rank 0] step:8901/10000 train_time:883593ms step_avg:99.27ms +[2025-08-22 08:55:56] [Rank 0] step:8901/10000 train_time:883593ms step_avg:99.27ms +[2025-08-22 08:55:58] [Rank 0] step:8921/10000 train_time:885692ms step_avg:99.28ms +[2025-08-22 08:55:58] [Rank 0] step:8921/10000 train_time:885692ms step_avg:99.28ms +[2025-08-22 08:56:00] [Rank 0] step:8941/10000 train_time:887775ms step_avg:99.29ms +[2025-08-22 08:56:00] [Rank 0] step:8941/10000 train_time:887775ms step_avg:99.29ms +[2025-08-22 08:56:02] [Rank 0] step:8961/10000 train_time:889856ms step_avg:99.30ms +[2025-08-22 08:56:02] [Rank 0] step:8961/10000 train_time:889856ms step_avg:99.30ms +[2025-08-22 08:56:04] [Rank 0] step:8981/10000 train_time:891937ms step_avg:99.31ms +[2025-08-22 08:56:04] [Rank 0] step:8981/10000 train_time:891937ms step_avg:99.31ms +[2025-08-22 08:56:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:56:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:56:20] [Rank 0] PRINT: step:9000/10000 val_loss:3.4567 svd_entropy: attn_qk:H=0.9084,top10E=0.08,eRank=419.2,q75/q25=11.81 attn_vo:H=0.9170,top10E=0.06,eRank=442.9,q75/q25=11.47 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.80 vo_prod:H=0.8472,top10E=0.13,eRank=280.6,q75/q25=61.85 train_time:893973ms step_avg:99.33ms +[2025-08-22 08:56:20] [Rank 0] PRINT: step:9000/10000 val_loss:3.4567 svd_entropy: attn_qk:H=0.9084,top10E=0.08,eRank=419.2,q75/q25=11.81 attn_vo:H=0.9170,top10E=0.06,eRank=442.9,q75/q25=11.47 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.71 mlp_w2:H=0.9681,top10E=0.05,eRank=621.5,q75/q25=2.80 vo_prod:H=0.8472,top10E=0.13,eRank=280.6,q75/q25=61.85 train_time:893973ms step_avg:99.33ms +[2025-08-22 08:56:20] [Rank 0] step:9001/10000 train_time:894029ms step_avg:99.33ms +[2025-08-22 08:56:20] [Rank 0] step:9001/10000 train_time:894029ms step_avg:99.33ms +[2025-08-22 08:56:22] [Rank 0] step:9021/10000 train_time:896128ms step_avg:99.34ms +[2025-08-22 08:56:22] [Rank 0] step:9021/10000 train_time:896128ms step_avg:99.34ms +[2025-08-22 08:56:24] [Rank 0] step:9041/10000 train_time:898286ms step_avg:99.36ms +[2025-08-22 08:56:24] [Rank 0] step:9041/10000 train_time:898286ms step_avg:99.36ms +[2025-08-22 08:56:27] [Rank 0] step:9061/10000 train_time:900454ms step_avg:99.38ms +[2025-08-22 08:56:27] [Rank 0] step:9061/10000 train_time:900454ms step_avg:99.38ms +[2025-08-22 08:56:29] [Rank 0] step:9081/10000 train_time:902537ms step_avg:99.39ms +[2025-08-22 08:56:29] [Rank 0] step:9081/10000 train_time:902537ms step_avg:99.39ms +[2025-08-22 08:56:31] [Rank 0] step:9101/10000 train_time:904633ms step_avg:99.40ms +[2025-08-22 08:56:31] [Rank 0] step:9101/10000 train_time:904633ms step_avg:99.40ms +[2025-08-22 08:56:33] [Rank 0] step:9121/10000 train_time:906720ms step_avg:99.41ms +[2025-08-22 08:56:33] [Rank 0] step:9121/10000 train_time:906720ms step_avg:99.41ms +[2025-08-22 08:56:35] [Rank 0] step:9141/10000 train_time:908791ms step_avg:99.42ms +[2025-08-22 08:56:35] [Rank 0] step:9141/10000 train_time:908791ms step_avg:99.42ms +[2025-08-22 08:56:37] [Rank 0] step:9161/10000 train_time:910867ms step_avg:99.43ms +[2025-08-22 08:56:37] [Rank 0] step:9161/10000 train_time:910867ms step_avg:99.43ms +[2025-08-22 08:56:39] [Rank 0] step:9181/10000 train_time:912982ms step_avg:99.44ms +[2025-08-22 08:56:39] [Rank 0] step:9181/10000 train_time:912982ms step_avg:99.44ms +[2025-08-22 08:56:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:56:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:56:55] [Rank 0] PRINT: step:9200/10000 val_loss:3.4476 svd_entropy: attn_qk:H=0.9083,top10E=0.08,eRank=419.0,q75/q25=11.80 attn_vo:H=0.9170,top10E=0.06,eRank=442.9,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.6,q75/q25=2.80 vo_prod:H=0.8473,top10E=0.13,eRank=280.7,q75/q25=61.71 train_time:915016ms step_avg:99.46ms +[2025-08-22 08:56:55] [Rank 0] PRINT: step:9200/10000 val_loss:3.4476 svd_entropy: attn_qk:H=0.9083,top10E=0.08,eRank=419.0,q75/q25=11.80 attn_vo:H=0.9170,top10E=0.06,eRank=442.9,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.6,q75/q25=2.80 vo_prod:H=0.8473,top10E=0.13,eRank=280.7,q75/q25=61.71 train_time:915016ms step_avg:99.46ms +[2025-08-22 08:56:55] [Rank 0] step:9201/10000 train_time:915070ms step_avg:99.45ms +[2025-08-22 08:56:55] [Rank 0] step:9201/10000 train_time:915070ms step_avg:99.45ms +[2025-08-22 08:56:57] [Rank 0] step:9221/10000 train_time:917175ms step_avg:99.47ms +[2025-08-22 08:56:57] [Rank 0] step:9221/10000 train_time:917175ms step_avg:99.47ms +[2025-08-22 08:56:59] [Rank 0] step:9241/10000 train_time:919264ms step_avg:99.48ms +[2025-08-22 08:56:59] [Rank 0] step:9241/10000 train_time:919264ms step_avg:99.48ms +[2025-08-22 08:57:01] [Rank 0] step:9261/10000 train_time:921351ms step_avg:99.49ms +[2025-08-22 08:57:01] [Rank 0] step:9261/10000 train_time:921351ms step_avg:99.49ms +[2025-08-22 08:57:03] [Rank 0] step:9281/10000 train_time:923421ms step_avg:99.50ms +[2025-08-22 08:57:03] [Rank 0] step:9281/10000 train_time:923421ms step_avg:99.50ms +[2025-08-22 08:57:05] [Rank 0] step:9301/10000 train_time:925497ms step_avg:99.51ms +[2025-08-22 08:57:05] [Rank 0] step:9301/10000 train_time:925497ms step_avg:99.51ms +[2025-08-22 08:57:08] [Rank 0] step:9321/10000 train_time:927583ms step_avg:99.52ms +[2025-08-22 08:57:08] [Rank 0] step:9321/10000 train_time:927583ms step_avg:99.52ms +[2025-08-22 08:57:10] [Rank 0] step:9341/10000 train_time:929669ms step_avg:99.53ms +[2025-08-22 08:57:10] [Rank 0] step:9341/10000 train_time:929669ms step_avg:99.53ms +[2025-08-22 08:57:12] [Rank 0] step:9361/10000 train_time:931758ms step_avg:99.54ms +[2025-08-22 08:57:12] [Rank 0] step:9361/10000 train_time:931758ms step_avg:99.54ms +[2025-08-22 08:57:14] [Rank 0] step:9381/10000 train_time:933859ms step_avg:99.55ms +[2025-08-22 08:57:14] [Rank 0] step:9381/10000 train_time:933859ms step_avg:99.55ms +[2025-08-22 08:57:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:57:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:57:30] [Rank 0] PRINT: step:9400/10000 val_loss:3.4390 svd_entropy: attn_qk:H=0.9083,top10E=0.08,eRank=418.8,q75/q25=11.82 attn_vo:H=0.9170,top10E=0.06,eRank=442.9,q75/q25=11.43 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.6,q75/q25=2.80 vo_prod:H=0.8473,top10E=0.13,eRank=280.9,q75/q25=61.44 train_time:935908ms step_avg:99.56ms +[2025-08-22 08:57:30] [Rank 0] PRINT: step:9400/10000 val_loss:3.4390 svd_entropy: attn_qk:H=0.9083,top10E=0.08,eRank=418.8,q75/q25=11.82 attn_vo:H=0.9170,top10E=0.06,eRank=442.9,q75/q25=11.43 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.6,q75/q25=2.80 vo_prod:H=0.8473,top10E=0.13,eRank=280.9,q75/q25=61.44 train_time:935908ms step_avg:99.56ms +[2025-08-22 08:57:30] [Rank 0] step:9401/10000 train_time:935964ms step_avg:99.56ms +[2025-08-22 08:57:30] [Rank 0] step:9401/10000 train_time:935964ms step_avg:99.56ms +[2025-08-22 08:57:32] [Rank 0] step:9421/10000 train_time:938058ms step_avg:99.57ms +[2025-08-22 08:57:32] [Rank 0] step:9421/10000 train_time:938058ms step_avg:99.57ms +[2025-08-22 08:57:34] [Rank 0] step:9441/10000 train_time:940141ms step_avg:99.58ms +[2025-08-22 08:57:34] [Rank 0] step:9441/10000 train_time:940141ms step_avg:99.58ms +[2025-08-22 08:57:36] [Rank 0] step:9461/10000 train_time:942233ms step_avg:99.59ms +[2025-08-22 08:57:36] [Rank 0] step:9461/10000 train_time:942233ms step_avg:99.59ms +[2025-08-22 08:57:38] [Rank 0] step:9481/10000 train_time:944328ms step_avg:99.60ms +[2025-08-22 08:57:38] [Rank 0] step:9481/10000 train_time:944328ms step_avg:99.60ms +[2025-08-22 08:57:40] [Rank 0] step:9501/10000 train_time:946421ms step_avg:99.61ms +[2025-08-22 08:57:40] [Rank 0] step:9501/10000 train_time:946421ms step_avg:99.61ms +[2025-08-22 08:57:42] [Rank 0] step:9521/10000 train_time:948500ms step_avg:99.62ms +[2025-08-22 08:57:42] [Rank 0] step:9521/10000 train_time:948500ms step_avg:99.62ms +[2025-08-22 08:57:44] [Rank 0] step:9541/10000 train_time:950589ms step_avg:99.63ms +[2025-08-22 08:57:44] [Rank 0] step:9541/10000 train_time:950589ms step_avg:99.63ms +[2025-08-22 08:57:46] [Rank 0] step:9561/10000 train_time:952675ms step_avg:99.64ms +[2025-08-22 08:57:46] [Rank 0] step:9561/10000 train_time:952675ms step_avg:99.64ms +[2025-08-22 08:57:48] [Rank 0] step:9581/10000 train_time:954763ms step_avg:99.65ms +[2025-08-22 08:57:48] [Rank 0] step:9581/10000 train_time:954763ms step_avg:99.65ms +[2025-08-22 08:57:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:57:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:58:04] [Rank 0] PRINT: step:9600/10000 val_loss:3.4299 svd_entropy: attn_qk:H=0.9082,top10E=0.08,eRank=418.7,q75/q25=11.82 attn_vo:H=0.9170,top10E=0.06,eRank=443.0,q75/q25=11.43 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.80 vo_prod:H=0.8474,top10E=0.13,eRank=281.0,q75/q25=61.40 train_time:956821ms step_avg:99.67ms +[2025-08-22 08:58:04] [Rank 0] PRINT: step:9600/10000 val_loss:3.4299 svd_entropy: attn_qk:H=0.9082,top10E=0.08,eRank=418.7,q75/q25=11.82 attn_vo:H=0.9170,top10E=0.06,eRank=443.0,q75/q25=11.43 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.80 vo_prod:H=0.8474,top10E=0.13,eRank=281.0,q75/q25=61.40 train_time:956821ms step_avg:99.67ms +[2025-08-22 08:58:04] [Rank 0] step:9601/10000 train_time:956876ms step_avg:99.66ms +[2025-08-22 08:58:04] [Rank 0] step:9601/10000 train_time:956876ms step_avg:99.66ms +[2025-08-22 08:58:06] [Rank 0] step:9621/10000 train_time:958978ms step_avg:99.68ms +[2025-08-22 08:58:06] [Rank 0] step:9621/10000 train_time:958978ms step_avg:99.68ms +[2025-08-22 08:58:09] [Rank 0] step:9641/10000 train_time:961070ms step_avg:99.69ms +[2025-08-22 08:58:09] [Rank 0] step:9641/10000 train_time:961070ms step_avg:99.69ms +[2025-08-22 08:58:11] [Rank 0] step:9661/10000 train_time:963182ms step_avg:99.70ms +[2025-08-22 08:58:11] [Rank 0] step:9661/10000 train_time:963182ms step_avg:99.70ms +[2025-08-22 08:58:13] [Rank 0] step:9681/10000 train_time:965284ms step_avg:99.71ms +[2025-08-22 08:58:13] [Rank 0] step:9681/10000 train_time:965284ms step_avg:99.71ms +[2025-08-22 08:58:15] [Rank 0] step:9701/10000 train_time:967406ms step_avg:99.72ms +[2025-08-22 08:58:15] [Rank 0] step:9701/10000 train_time:967406ms step_avg:99.72ms +[2025-08-22 08:58:17] [Rank 0] step:9721/10000 train_time:969513ms step_avg:99.73ms +[2025-08-22 08:58:17] [Rank 0] step:9721/10000 train_time:969513ms step_avg:99.73ms +[2025-08-22 08:58:19] [Rank 0] step:9741/10000 train_time:971635ms step_avg:99.75ms +[2025-08-22 08:58:19] [Rank 0] step:9741/10000 train_time:971635ms step_avg:99.75ms +[2025-08-22 08:58:21] [Rank 0] step:9761/10000 train_time:973748ms step_avg:99.76ms +[2025-08-22 08:58:21] [Rank 0] step:9761/10000 train_time:973748ms step_avg:99.76ms +[2025-08-22 08:58:23] [Rank 0] step:9781/10000 train_time:975866ms step_avg:99.77ms +[2025-08-22 08:58:23] [Rank 0] step:9781/10000 train_time:975866ms step_avg:99.77ms +[2025-08-22 08:58:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:58:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:58:39] [Rank 0] PRINT: step:9800/10000 val_loss:3.4221 svd_entropy: attn_qk:H=0.9081,top10E=0.08,eRank=418.5,q75/q25=11.84 attn_vo:H=0.9170,top10E=0.06,eRank=443.0,q75/q25=11.45 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.80 vo_prod:H=0.8474,top10E=0.13,eRank=281.0,q75/q25=61.23 train_time:977948ms step_avg:99.79ms +[2025-08-22 08:58:39] [Rank 0] PRINT: step:9800/10000 val_loss:3.4221 svd_entropy: attn_qk:H=0.9081,top10E=0.08,eRank=418.5,q75/q25=11.84 attn_vo:H=0.9170,top10E=0.06,eRank=443.0,q75/q25=11.45 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.80 vo_prod:H=0.8474,top10E=0.13,eRank=281.0,q75/q25=61.23 train_time:977948ms step_avg:99.79ms +[2025-08-22 08:58:39] [Rank 0] step:9801/10000 train_time:978003ms step_avg:99.79ms +[2025-08-22 08:58:39] [Rank 0] step:9801/10000 train_time:978003ms step_avg:99.79ms +[2025-08-22 08:58:41] [Rank 0] step:9821/10000 train_time:980130ms step_avg:99.80ms +[2025-08-22 08:58:41] [Rank 0] step:9821/10000 train_time:980130ms step_avg:99.80ms +[2025-08-22 08:58:43] [Rank 0] step:9841/10000 train_time:982243ms step_avg:99.81ms +[2025-08-22 08:58:43] [Rank 0] step:9841/10000 train_time:982243ms step_avg:99.81ms +[2025-08-22 08:58:46] [Rank 0] step:9861/10000 train_time:984340ms step_avg:99.82ms +[2025-08-22 08:58:46] [Rank 0] step:9861/10000 train_time:984340ms step_avg:99.82ms +[2025-08-22 08:58:48] [Rank 0] step:9881/10000 train_time:986436ms step_avg:99.83ms +[2025-08-22 08:58:48] [Rank 0] step:9881/10000 train_time:986436ms step_avg:99.83ms +[2025-08-22 08:58:50] [Rank 0] step:9901/10000 train_time:988557ms step_avg:99.84ms +[2025-08-22 08:58:50] [Rank 0] step:9901/10000 train_time:988557ms step_avg:99.84ms +[2025-08-22 08:58:52] [Rank 0] step:9921/10000 train_time:990658ms step_avg:99.85ms +[2025-08-22 08:58:52] [Rank 0] step:9921/10000 train_time:990658ms step_avg:99.85ms +[2025-08-22 08:58:54] [Rank 0] step:9941/10000 train_time:992776ms step_avg:99.87ms +[2025-08-22 08:58:54] [Rank 0] step:9941/10000 train_time:992776ms step_avg:99.87ms +[2025-08-22 08:58:56] [Rank 0] step:9961/10000 train_time:994876ms step_avg:99.88ms +[2025-08-22 08:58:56] [Rank 0] step:9961/10000 train_time:994876ms step_avg:99.88ms +[2025-08-22 08:58:58] [Rank 0] step:9981/10000 train_time:996992ms step_avg:99.89ms +[2025-08-22 08:58:58] [Rank 0] step:9981/10000 train_time:996992ms step_avg:99.89ms +[2025-08-22 08:59:00] [Rank 0] step:10000/10000 train_time:999003ms step_avg:99.90ms +[2025-08-22 08:59:00] [Rank 0] step:10000/10000 train_time:999003ms step_avg:99.90ms +[2025-08-22 08:59:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:59:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 08:59:14] [Rank 0] PRINT: step:10000/10000 val_loss:3.4158 svd_entropy: attn_qk:H=0.9081,top10E=0.08,eRank=418.4,q75/q25=11.85 attn_vo:H=0.9170,top10E=0.06,eRank=443.0,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.80 vo_prod:H=0.8474,top10E=0.13,eRank=281.1,q75/q25=61.20 train_time:999069ms step_avg:99.91ms +[2025-08-22 08:59:14] [Rank 0] PRINT: step:10000/10000 val_loss:3.4158 svd_entropy: attn_qk:H=0.9081,top10E=0.08,eRank=418.4,q75/q25=11.85 attn_vo:H=0.9170,top10E=0.06,eRank=443.0,q75/q25=11.44 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.70 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.80 vo_prod:H=0.8474,top10E=0.13,eRank=281.1,q75/q25=61.20 train_time:999069ms step_avg:99.91ms +[2025-08-22 08:59:14] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 08:59:14 2025 --- +[2025-08-22 08:59:14] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 08:59:14 2025 --- +[2025-08-22 08:59:14] [Rank 0] PRINT: Peak memory allocated: 11035 MiB reserved: 16478 MiB +[2025-08-22 08:59:14] [Rank 0] PRINT: Peak memory allocated: 11035 MiB reserved: 16478 MiB diff --git a/logs_svd_gated/mode_0_param_gated_seed_42/config.json b/logs_svd_gated/mode_0_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..4ef79be2b33e17b3584e9cba9e4a43d94c146417 --- /dev/null +++ b/logs_svd_gated/mode_0_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 0, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "6a688052-2830-4750-8670-883583eaa56a", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_0_param_gated_seed_42/training_log_6a688052-2830-4750-8670-883583eaa56a.txt b/logs_svd_gated/mode_0_param_gated_seed_42/training_log_6a688052-2830-4750-8670-883583eaa56a.txt new file mode 100644 index 0000000000000000000000000000000000000000..0a7fd0fdff762d7000620187bc175beabfa08b08 --- /dev/null +++ b/logs_svd_gated/mode_0_param_gated_seed_42/training_log_6a688052-2830-4750-8670-883583eaa56a.txt @@ -0,0 +1,2926 @@ +[2025-08-22 13:51:49] [Rank 0] PRINT: --- Script Start: Fri Aug 22 13:51:49 2025 --- +[2025-08-22 13:51:49] [Rank 0] PRINT: --- Script Start: Fri Aug 22 13:51:49 2025 --- +[2025-08-22 13:51:49] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=0, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 13:51:49] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=0, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 13:51:49] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 13:51:49] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 13:51:49] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 13:51:49] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 13:51:49] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_0_param_gated_seed_42 +[2025-08-22 13:51:49] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_0_param_gated_seed_42 +[2025-08-22 13:51:49] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 13:51:49] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 13:51:49] [Rank 0] PRINT: Constructing model... +[2025-08-22 13:51:49] [Rank 0] PRINT: Constructing model... +[2025-08-22 13:51:51] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 13:51:51] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 13:51:51] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 13:51:51] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 13:51:51] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 13:51:51] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 13:51:51] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-08-22 13:51:51] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-08-22 13:51:51] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-08-22 13:51:51] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-08-22 13:51:51] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 13:51:51] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 13:51:51] [Rank 0] PRINT: Muon optimizer is active with 80 parameters. +[2025-08-22 13:51:51] [Rank 0] PRINT: Muon optimizer is active with 80 parameters. +[2025-08-22 13:51:51] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 13:51:51] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 13:51:51] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 13:51:51] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 13:51:51] [Rank 0] PRINT: Starting warmup... +[2025-08-22 13:51:51] [Rank 0] PRINT: Starting warmup... +[2025-08-22 13:52:36] [Rank 0] PRINT: Warmup complete. +[2025-08-22 13:52:36] [Rank 0] PRINT: Warmup complete. +[2025-08-22 13:52:36] [Rank 0] PRINT: Starting training... +[2025-08-22 13:52:36] [Rank 0] PRINT: Starting training... +[2025-08-22 13:52:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:52:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:52:54] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 13:52:54] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 13:52:56] [Rank 0] step:21/10000 train_time:1930ms step_avg:91.93ms +[2025-08-22 13:52:56] [Rank 0] step:21/10000 train_time:1930ms step_avg:91.93ms +[2025-08-22 13:52:58] [Rank 0] step:41/10000 train_time:3766ms step_avg:91.85ms +[2025-08-22 13:52:58] [Rank 0] step:41/10000 train_time:3766ms step_avg:91.85ms +[2025-08-22 13:53:00] [Rank 0] step:61/10000 train_time:5604ms step_avg:91.87ms +[2025-08-22 13:53:00] [Rank 0] step:61/10000 train_time:5604ms step_avg:91.87ms +[2025-08-22 13:53:01] [Rank 0] step:81/10000 train_time:7439ms step_avg:91.83ms +[2025-08-22 13:53:01] [Rank 0] step:81/10000 train_time:7439ms step_avg:91.83ms +[2025-08-22 13:53:03] [Rank 0] step:101/10000 train_time:9274ms step_avg:91.82ms +[2025-08-22 13:53:03] [Rank 0] step:101/10000 train_time:9274ms step_avg:91.82ms +[2025-08-22 13:53:05] [Rank 0] step:121/10000 train_time:11112ms step_avg:91.83ms +[2025-08-22 13:53:05] [Rank 0] step:121/10000 train_time:11112ms step_avg:91.83ms +[2025-08-22 13:53:07] [Rank 0] step:141/10000 train_time:12951ms step_avg:91.85ms +[2025-08-22 13:53:07] [Rank 0] step:141/10000 train_time:12951ms step_avg:91.85ms +[2025-08-22 13:53:09] [Rank 0] step:161/10000 train_time:14790ms step_avg:91.86ms +[2025-08-22 13:53:09] [Rank 0] step:161/10000 train_time:14790ms step_avg:91.86ms +[2025-08-22 13:53:11] [Rank 0] step:181/10000 train_time:16629ms step_avg:91.88ms +[2025-08-22 13:53:11] [Rank 0] step:181/10000 train_time:16629ms step_avg:91.88ms +[2025-08-22 13:53:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:53:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:53:26] [Rank 0] PRINT: step:200/10000 val_loss:5.3093 svd_entropy: attn_qk:H=0.9233,top10E=0.06,eRank=461.4,q75/q25=10.14 attn_vo:H=0.9148,top10E=0.07,eRank=436.6,q75/q25=11.63 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.91 mlp_w2:H=0.9698,top10E=0.04,eRank=628.5,q75/q25=3.04 vo_prod:H=0.8365,top10E=0.14,eRank=260.9,q75/q25=77.75 train_time:18430ms step_avg:92.15ms +[2025-08-22 13:53:26] [Rank 0] PRINT: step:200/10000 val_loss:5.3093 svd_entropy: attn_qk:H=0.9233,top10E=0.06,eRank=461.4,q75/q25=10.14 attn_vo:H=0.9148,top10E=0.07,eRank=436.6,q75/q25=11.63 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.91 mlp_w2:H=0.9698,top10E=0.04,eRank=628.5,q75/q25=3.04 vo_prod:H=0.8365,top10E=0.14,eRank=260.9,q75/q25=77.75 train_time:18430ms step_avg:92.15ms +[2025-08-22 13:53:26] [Rank 0] step:201/10000 train_time:18480ms step_avg:91.94ms +[2025-08-22 13:53:26] [Rank 0] step:201/10000 train_time:18480ms step_avg:91.94ms +[2025-08-22 13:53:28] [Rank 0] step:221/10000 train_time:20329ms step_avg:91.99ms +[2025-08-22 13:53:28] [Rank 0] step:221/10000 train_time:20329ms step_avg:91.99ms +[2025-08-22 13:53:30] [Rank 0] step:241/10000 train_time:22164ms step_avg:91.97ms +[2025-08-22 13:53:30] [Rank 0] step:241/10000 train_time:22164ms step_avg:91.97ms +[2025-08-22 13:53:32] [Rank 0] step:261/10000 train_time:24001ms step_avg:91.96ms +[2025-08-22 13:53:32] [Rank 0] step:261/10000 train_time:24001ms step_avg:91.96ms +[2025-08-22 13:53:34] [Rank 0] step:281/10000 train_time:25840ms step_avg:91.96ms +[2025-08-22 13:53:34] [Rank 0] step:281/10000 train_time:25840ms step_avg:91.96ms +[2025-08-22 13:53:36] [Rank 0] step:301/10000 train_time:27679ms step_avg:91.96ms +[2025-08-22 13:53:36] [Rank 0] step:301/10000 train_time:27679ms step_avg:91.96ms +[2025-08-22 13:53:37] [Rank 0] step:321/10000 train_time:29521ms step_avg:91.96ms +[2025-08-22 13:53:37] [Rank 0] step:321/10000 train_time:29521ms step_avg:91.96ms +[2025-08-22 13:53:39] [Rank 0] step:341/10000 train_time:31487ms step_avg:92.34ms +[2025-08-22 13:53:39] [Rank 0] step:341/10000 train_time:31487ms step_avg:92.34ms +[2025-08-22 13:53:41] [Rank 0] step:361/10000 train_time:33487ms step_avg:92.76ms +[2025-08-22 13:53:41] [Rank 0] step:361/10000 train_time:33487ms step_avg:92.76ms +[2025-08-22 13:53:43] [Rank 0] step:381/10000 train_time:35330ms step_avg:92.73ms +[2025-08-22 13:53:43] [Rank 0] step:381/10000 train_time:35330ms step_avg:92.73ms +[2025-08-22 13:53:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:53:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:53:59] [Rank 0] PRINT: step:400/10000 val_loss:4.9322 svd_entropy: attn_qk:H=0.9237,top10E=0.06,eRank=462.6,q75/q25=9.94 attn_vo:H=0.9204,top10E=0.06,eRank=452.9,q75/q25=10.42 mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.87 mlp_w2:H=0.9699,top10E=0.04,eRank=628.7,q75/q25=2.98 vo_prod:H=0.8481,top10E=0.12,eRank=281.2,q75/q25=58.69 train_time:37133ms step_avg:92.83ms +[2025-08-22 13:53:59] [Rank 0] PRINT: step:400/10000 val_loss:4.9322 svd_entropy: attn_qk:H=0.9237,top10E=0.06,eRank=462.6,q75/q25=9.94 attn_vo:H=0.9204,top10E=0.06,eRank=452.9,q75/q25=10.42 mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.87 mlp_w2:H=0.9699,top10E=0.04,eRank=628.7,q75/q25=2.98 vo_prod:H=0.8481,top10E=0.12,eRank=281.2,q75/q25=58.69 train_time:37133ms step_avg:92.83ms +[2025-08-22 13:53:59] [Rank 0] step:401/10000 train_time:37183ms step_avg:92.72ms +[2025-08-22 13:53:59] [Rank 0] step:401/10000 train_time:37183ms step_avg:92.72ms +[2025-08-22 13:54:01] [Rank 0] step:421/10000 train_time:39027ms step_avg:92.70ms +[2025-08-22 13:54:01] [Rank 0] step:421/10000 train_time:39027ms step_avg:92.70ms +[2025-08-22 13:54:03] [Rank 0] step:441/10000 train_time:40870ms step_avg:92.68ms +[2025-08-22 13:54:03] [Rank 0] step:441/10000 train_time:40870ms step_avg:92.68ms +[2025-08-22 13:54:05] [Rank 0] step:461/10000 train_time:42711ms step_avg:92.65ms +[2025-08-22 13:54:05] [Rank 0] step:461/10000 train_time:42711ms step_avg:92.65ms +[2025-08-22 13:54:06] [Rank 0] step:481/10000 train_time:44553ms step_avg:92.63ms +[2025-08-22 13:54:06] [Rank 0] step:481/10000 train_time:44553ms step_avg:92.63ms +[2025-08-22 13:54:08] [Rank 0] step:501/10000 train_time:46396ms step_avg:92.61ms +[2025-08-22 13:54:08] [Rank 0] step:501/10000 train_time:46396ms step_avg:92.61ms +[2025-08-22 13:54:10] [Rank 0] step:521/10000 train_time:48241ms step_avg:92.59ms +[2025-08-22 13:54:10] [Rank 0] step:521/10000 train_time:48241ms step_avg:92.59ms +[2025-08-22 13:54:12] [Rank 0] step:541/10000 train_time:50084ms step_avg:92.58ms +[2025-08-22 13:54:12] [Rank 0] step:541/10000 train_time:50084ms step_avg:92.58ms +[2025-08-22 13:54:14] [Rank 0] step:561/10000 train_time:51928ms step_avg:92.56ms +[2025-08-22 13:54:14] [Rank 0] step:561/10000 train_time:51928ms step_avg:92.56ms +[2025-08-22 13:54:16] [Rank 0] step:581/10000 train_time:53773ms step_avg:92.55ms +[2025-08-22 13:54:16] [Rank 0] step:581/10000 train_time:53773ms step_avg:92.55ms +[2025-08-22 13:54:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:54:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:54:31] [Rank 0] PRINT: step:600/10000 val_loss:4.7319 svd_entropy: attn_qk:H=0.9211,top10E=0.06,eRank=454.8,q75/q25=10.30 attn_vo:H=0.9187,top10E=0.06,eRank=447.6,q75/q25=10.75 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.87 mlp_w2:H=0.9690,top10E=0.04,eRank=624.9,q75/q25=2.99 vo_prod:H=0.8451,top10E=0.13,eRank=275.9,q75/q25=62.06 train_time:55577ms step_avg:92.63ms +[2025-08-22 13:54:31] [Rank 0] PRINT: step:600/10000 val_loss:4.7319 svd_entropy: attn_qk:H=0.9211,top10E=0.06,eRank=454.8,q75/q25=10.30 attn_vo:H=0.9187,top10E=0.06,eRank=447.6,q75/q25=10.75 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.87 mlp_w2:H=0.9690,top10E=0.04,eRank=624.9,q75/q25=2.99 vo_prod:H=0.8451,top10E=0.13,eRank=275.9,q75/q25=62.06 train_time:55577ms step_avg:92.63ms +[2025-08-22 13:54:31] [Rank 0] step:601/10000 train_time:55627ms step_avg:92.56ms +[2025-08-22 13:54:31] [Rank 0] step:601/10000 train_time:55627ms step_avg:92.56ms +[2025-08-22 13:54:33] [Rank 0] step:621/10000 train_time:57480ms step_avg:92.56ms +[2025-08-22 13:54:33] [Rank 0] step:621/10000 train_time:57480ms step_avg:92.56ms +[2025-08-22 13:54:35] [Rank 0] step:641/10000 train_time:59316ms step_avg:92.54ms +[2025-08-22 13:54:35] [Rank 0] step:641/10000 train_time:59316ms step_avg:92.54ms +[2025-08-22 13:54:37] [Rank 0] step:661/10000 train_time:61154ms step_avg:92.52ms +[2025-08-22 13:54:37] [Rank 0] step:661/10000 train_time:61154ms step_avg:92.52ms +[2025-08-22 13:54:39] [Rank 0] step:681/10000 train_time:62992ms step_avg:92.50ms +[2025-08-22 13:54:39] [Rank 0] step:681/10000 train_time:62992ms step_avg:92.50ms +[2025-08-22 13:54:41] [Rank 0] step:701/10000 train_time:64833ms step_avg:92.49ms +[2025-08-22 13:54:41] [Rank 0] step:701/10000 train_time:64833ms step_avg:92.49ms +[2025-08-22 13:54:43] [Rank 0] step:721/10000 train_time:66817ms step_avg:92.67ms +[2025-08-22 13:54:43] [Rank 0] step:721/10000 train_time:66817ms step_avg:92.67ms +[2025-08-22 13:54:45] [Rank 0] step:741/10000 train_time:68800ms step_avg:92.85ms +[2025-08-22 13:54:45] [Rank 0] step:741/10000 train_time:68800ms step_avg:92.85ms +[2025-08-22 13:54:46] [Rank 0] step:761/10000 train_time:70653ms step_avg:92.84ms +[2025-08-22 13:54:46] [Rank 0] step:761/10000 train_time:70653ms step_avg:92.84ms +[2025-08-22 13:54:48] [Rank 0] step:781/10000 train_time:72508ms step_avg:92.84ms +[2025-08-22 13:54:48] [Rank 0] step:781/10000 train_time:72508ms step_avg:92.84ms +[2025-08-22 13:54:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:54:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:55:04] [Rank 0] PRINT: step:800/10000 val_loss:4.4742 svd_entropy: attn_qk:H=0.9193,top10E=0.06,eRank=449.3,q75/q25=10.50 attn_vo:H=0.9174,top10E=0.06,eRank=443.9,q75/q25=11.00 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.86 mlp_w2:H=0.9684,top10E=0.05,eRank=622.6,q75/q25=2.98 vo_prod:H=0.8433,top10E=0.13,eRank=272.8,q75/q25=64.20 train_time:74321ms step_avg:92.90ms +[2025-08-22 13:55:04] [Rank 0] PRINT: step:800/10000 val_loss:4.4742 svd_entropy: attn_qk:H=0.9193,top10E=0.06,eRank=449.3,q75/q25=10.50 attn_vo:H=0.9174,top10E=0.06,eRank=443.9,q75/q25=11.00 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.86 mlp_w2:H=0.9684,top10E=0.05,eRank=622.6,q75/q25=2.98 vo_prod:H=0.8433,top10E=0.13,eRank=272.8,q75/q25=64.20 train_time:74321ms step_avg:92.90ms +[2025-08-22 13:55:04] [Rank 0] step:801/10000 train_time:74371ms step_avg:92.85ms +[2025-08-22 13:55:04] [Rank 0] step:801/10000 train_time:74371ms step_avg:92.85ms +[2025-08-22 13:55:06] [Rank 0] step:821/10000 train_time:76258ms step_avg:92.88ms +[2025-08-22 13:55:06] [Rank 0] step:821/10000 train_time:76258ms step_avg:92.88ms +[2025-08-22 13:55:08] [Rank 0] step:841/10000 train_time:78107ms step_avg:92.87ms +[2025-08-22 13:55:08] [Rank 0] step:841/10000 train_time:78107ms step_avg:92.87ms +[2025-08-22 13:55:09] [Rank 0] step:861/10000 train_time:79958ms step_avg:92.87ms +[2025-08-22 13:55:09] [Rank 0] step:861/10000 train_time:79958ms step_avg:92.87ms +[2025-08-22 13:55:11] [Rank 0] step:881/10000 train_time:81810ms step_avg:92.86ms +[2025-08-22 13:55:11] [Rank 0] step:881/10000 train_time:81810ms step_avg:92.86ms +[2025-08-22 13:55:13] [Rank 0] step:901/10000 train_time:83662ms step_avg:92.85ms +[2025-08-22 13:55:13] [Rank 0] step:901/10000 train_time:83662ms step_avg:92.85ms +[2025-08-22 13:55:15] [Rank 0] step:921/10000 train_time:85515ms step_avg:92.85ms +[2025-08-22 13:55:15] [Rank 0] step:921/10000 train_time:85515ms step_avg:92.85ms +[2025-08-22 13:55:17] [Rank 0] step:941/10000 train_time:87370ms step_avg:92.85ms +[2025-08-22 13:55:17] [Rank 0] step:941/10000 train_time:87370ms step_avg:92.85ms +[2025-08-22 13:55:19] [Rank 0] step:961/10000 train_time:89224ms step_avg:92.85ms +[2025-08-22 13:55:19] [Rank 0] step:961/10000 train_time:89224ms step_avg:92.85ms +[2025-08-22 13:55:21] [Rank 0] step:981/10000 train_time:91077ms step_avg:92.84ms +[2025-08-22 13:55:21] [Rank 0] step:981/10000 train_time:91077ms step_avg:92.84ms +[2025-08-22 13:55:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:55:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:55:36] [Rank 0] PRINT: step:1000/10000 val_loss:4.3439 svd_entropy: attn_qk:H=0.9179,top10E=0.06,eRank=445.2,q75/q25=10.69 attn_vo:H=0.9166,top10E=0.06,eRank=441.5,q75/q25=11.16 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.84 mlp_w2:H=0.9680,top10E=0.05,eRank=621.1,q75/q25=2.97 vo_prod:H=0.8424,top10E=0.13,eRank=271.1,q75/q25=65.18 train_time:92891ms step_avg:92.89ms +[2025-08-22 13:55:36] [Rank 0] PRINT: step:1000/10000 val_loss:4.3439 svd_entropy: attn_qk:H=0.9179,top10E=0.06,eRank=445.2,q75/q25=10.69 attn_vo:H=0.9166,top10E=0.06,eRank=441.5,q75/q25=11.16 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.84 mlp_w2:H=0.9680,top10E=0.05,eRank=621.1,q75/q25=2.97 vo_prod:H=0.8424,top10E=0.13,eRank=271.1,q75/q25=65.18 train_time:92891ms step_avg:92.89ms +[2025-08-22 13:55:36] [Rank 0] step:1001/10000 train_time:92942ms step_avg:92.85ms +[2025-08-22 13:55:36] [Rank 0] step:1001/10000 train_time:92942ms step_avg:92.85ms +[2025-08-22 13:55:38] [Rank 0] step:1021/10000 train_time:94818ms step_avg:92.87ms +[2025-08-22 13:55:38] [Rank 0] step:1021/10000 train_time:94818ms step_avg:92.87ms +[2025-08-22 13:55:40] [Rank 0] step:1041/10000 train_time:96666ms step_avg:92.86ms +[2025-08-22 13:55:40] [Rank 0] step:1041/10000 train_time:96666ms step_avg:92.86ms +[2025-08-22 13:55:42] [Rank 0] step:1061/10000 train_time:98517ms step_avg:92.85ms +[2025-08-22 13:55:42] [Rank 0] step:1061/10000 train_time:98517ms step_avg:92.85ms +[2025-08-22 13:55:44] [Rank 0] step:1081/10000 train_time:100370ms step_avg:92.85ms +[2025-08-22 13:55:44] [Rank 0] step:1081/10000 train_time:100370ms step_avg:92.85ms +[2025-08-22 13:55:46] [Rank 0] step:1101/10000 train_time:102380ms step_avg:92.99ms +[2025-08-22 13:55:46] [Rank 0] step:1101/10000 train_time:102380ms step_avg:92.99ms +[2025-08-22 13:55:48] [Rank 0] step:1121/10000 train_time:104353ms step_avg:93.09ms +[2025-08-22 13:55:48] [Rank 0] step:1121/10000 train_time:104353ms step_avg:93.09ms +[2025-08-22 13:55:49] [Rank 0] step:1141/10000 train_time:106208ms step_avg:93.08ms +[2025-08-22 13:55:49] [Rank 0] step:1141/10000 train_time:106208ms step_avg:93.08ms +[2025-08-22 13:55:51] [Rank 0] step:1161/10000 train_time:108062ms step_avg:93.08ms +[2025-08-22 13:55:51] [Rank 0] step:1161/10000 train_time:108062ms step_avg:93.08ms +[2025-08-22 13:55:53] [Rank 0] step:1181/10000 train_time:109916ms step_avg:93.07ms +[2025-08-22 13:55:53] [Rank 0] step:1181/10000 train_time:109916ms step_avg:93.07ms +[2025-08-22 13:55:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:55:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:56:09] [Rank 0] PRINT: step:1200/10000 val_loss:4.2479 svd_entropy: attn_qk:H=0.9170,top10E=0.07,eRank=442.5,q75/q25=10.77 attn_vo:H=0.9162,top10E=0.06,eRank=440.4,q75/q25=11.22 mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.83 mlp_w2:H=0.9677,top10E=0.05,eRank=619.7,q75/q25=2.96 vo_prod:H=0.8422,top10E=0.13,eRank=270.8,q75/q25=66.18 train_time:111731ms step_avg:93.11ms +[2025-08-22 13:56:09] [Rank 0] PRINT: step:1200/10000 val_loss:4.2479 svd_entropy: attn_qk:H=0.9170,top10E=0.07,eRank=442.5,q75/q25=10.77 attn_vo:H=0.9162,top10E=0.06,eRank=440.4,q75/q25=11.22 mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.83 mlp_w2:H=0.9677,top10E=0.05,eRank=619.7,q75/q25=2.96 vo_prod:H=0.8422,top10E=0.13,eRank=270.8,q75/q25=66.18 train_time:111731ms step_avg:93.11ms +[2025-08-22 13:56:09] [Rank 0] step:1201/10000 train_time:111781ms step_avg:93.07ms +[2025-08-22 13:56:09] [Rank 0] step:1201/10000 train_time:111781ms step_avg:93.07ms +[2025-08-22 13:56:11] [Rank 0] step:1221/10000 train_time:113659ms step_avg:93.09ms +[2025-08-22 13:56:11] [Rank 0] step:1221/10000 train_time:113659ms step_avg:93.09ms +[2025-08-22 13:56:12] [Rank 0] step:1241/10000 train_time:115514ms step_avg:93.08ms +[2025-08-22 13:56:12] [Rank 0] step:1241/10000 train_time:115514ms step_avg:93.08ms +[2025-08-22 13:56:14] [Rank 0] step:1261/10000 train_time:117367ms step_avg:93.07ms +[2025-08-22 13:56:14] [Rank 0] step:1261/10000 train_time:117367ms step_avg:93.07ms +[2025-08-22 13:56:16] [Rank 0] step:1281/10000 train_time:119224ms step_avg:93.07ms +[2025-08-22 13:56:16] [Rank 0] step:1281/10000 train_time:119224ms step_avg:93.07ms +[2025-08-22 13:56:18] [Rank 0] step:1301/10000 train_time:121080ms step_avg:93.07ms +[2025-08-22 13:56:18] [Rank 0] step:1301/10000 train_time:121080ms step_avg:93.07ms +[2025-08-22 13:56:20] [Rank 0] step:1321/10000 train_time:122937ms step_avg:93.06ms +[2025-08-22 13:56:20] [Rank 0] step:1321/10000 train_time:122937ms step_avg:93.06ms +[2025-08-22 13:56:22] [Rank 0] step:1341/10000 train_time:124794ms step_avg:93.06ms +[2025-08-22 13:56:22] [Rank 0] step:1341/10000 train_time:124794ms step_avg:93.06ms +[2025-08-22 13:56:24] [Rank 0] step:1361/10000 train_time:126653ms step_avg:93.06ms +[2025-08-22 13:56:24] [Rank 0] step:1361/10000 train_time:126653ms step_avg:93.06ms +[2025-08-22 13:56:25] [Rank 0] step:1381/10000 train_time:128513ms step_avg:93.06ms +[2025-08-22 13:56:25] [Rank 0] step:1381/10000 train_time:128513ms step_avg:93.06ms +[2025-08-22 13:56:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:56:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:56:41] [Rank 0] PRINT: step:1400/10000 val_loss:4.1977 svd_entropy: attn_qk:H=0.9163,top10E=0.07,eRank=440.7,q75/q25=10.82 attn_vo:H=0.9160,top10E=0.06,eRank=439.8,q75/q25=11.29 mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.82 mlp_w2:H=0.9674,top10E=0.05,eRank=618.7,q75/q25=2.96 vo_prod:H=0.8421,top10E=0.13,eRank=270.8,q75/q25=66.47 train_time:130333ms step_avg:93.09ms +[2025-08-22 13:56:41] [Rank 0] PRINT: step:1400/10000 val_loss:4.1977 svd_entropy: attn_qk:H=0.9163,top10E=0.07,eRank=440.7,q75/q25=10.82 attn_vo:H=0.9160,top10E=0.06,eRank=439.8,q75/q25=11.29 mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.82 mlp_w2:H=0.9674,top10E=0.05,eRank=618.7,q75/q25=2.96 vo_prod:H=0.8421,top10E=0.13,eRank=270.8,q75/q25=66.47 train_time:130333ms step_avg:93.09ms +[2025-08-22 13:56:41] [Rank 0] step:1401/10000 train_time:130383ms step_avg:93.06ms +[2025-08-22 13:56:41] [Rank 0] step:1401/10000 train_time:130383ms step_avg:93.06ms +[2025-08-22 13:56:43] [Rank 0] step:1421/10000 train_time:132251ms step_avg:93.07ms +[2025-08-22 13:56:43] [Rank 0] step:1421/10000 train_time:132251ms step_avg:93.07ms +[2025-08-22 13:56:45] [Rank 0] step:1441/10000 train_time:134103ms step_avg:93.06ms +[2025-08-22 13:56:45] [Rank 0] step:1441/10000 train_time:134103ms step_avg:93.06ms +[2025-08-22 13:56:47] [Rank 0] step:1461/10000 train_time:135955ms step_avg:93.06ms +[2025-08-22 13:56:47] [Rank 0] step:1461/10000 train_time:135955ms step_avg:93.06ms +[2025-08-22 13:56:49] [Rank 0] step:1481/10000 train_time:137930ms step_avg:93.13ms +[2025-08-22 13:56:49] [Rank 0] step:1481/10000 train_time:137930ms step_avg:93.13ms +[2025-08-22 13:56:51] [Rank 0] step:1501/10000 train_time:139942ms step_avg:93.23ms +[2025-08-22 13:56:51] [Rank 0] step:1501/10000 train_time:139942ms step_avg:93.23ms +[2025-08-22 13:56:52] [Rank 0] step:1521/10000 train_time:141807ms step_avg:93.23ms +[2025-08-22 13:56:52] [Rank 0] step:1521/10000 train_time:141807ms step_avg:93.23ms +[2025-08-22 13:56:54] [Rank 0] step:1541/10000 train_time:143673ms step_avg:93.23ms +[2025-08-22 13:56:54] [Rank 0] step:1541/10000 train_time:143673ms step_avg:93.23ms +[2025-08-22 13:56:56] [Rank 0] step:1561/10000 train_time:145539ms step_avg:93.23ms +[2025-08-22 13:56:56] [Rank 0] step:1561/10000 train_time:145539ms step_avg:93.23ms +[2025-08-22 13:56:58] [Rank 0] step:1581/10000 train_time:147406ms step_avg:93.24ms +[2025-08-22 13:56:58] [Rank 0] step:1581/10000 train_time:147406ms step_avg:93.24ms +[2025-08-22 13:57:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:57:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:57:14] [Rank 0] PRINT: step:1600/10000 val_loss:4.1081 svd_entropy: attn_qk:H=0.9157,top10E=0.07,eRank=439.0,q75/q25=10.88 attn_vo:H=0.9158,top10E=0.06,eRank=439.3,q75/q25=11.35 mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.82 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.95 vo_prod:H=0.8419,top10E=0.13,eRank=270.5,q75/q25=67.31 train_time:149231ms step_avg:93.27ms +[2025-08-22 13:57:14] [Rank 0] PRINT: step:1600/10000 val_loss:4.1081 svd_entropy: attn_qk:H=0.9157,top10E=0.07,eRank=439.0,q75/q25=10.88 attn_vo:H=0.9158,top10E=0.06,eRank=439.3,q75/q25=11.35 mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.82 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.95 vo_prod:H=0.8419,top10E=0.13,eRank=270.5,q75/q25=67.31 train_time:149231ms step_avg:93.27ms +[2025-08-22 13:57:14] [Rank 0] step:1601/10000 train_time:149281ms step_avg:93.24ms +[2025-08-22 13:57:14] [Rank 0] step:1601/10000 train_time:149281ms step_avg:93.24ms +[2025-08-22 13:57:16] [Rank 0] step:1621/10000 train_time:151177ms step_avg:93.26ms +[2025-08-22 13:57:16] [Rank 0] step:1621/10000 train_time:151177ms step_avg:93.26ms +[2025-08-22 13:57:17] [Rank 0] step:1641/10000 train_time:153038ms step_avg:93.26ms +[2025-08-22 13:57:17] [Rank 0] step:1641/10000 train_time:153038ms step_avg:93.26ms +[2025-08-22 13:57:19] [Rank 0] step:1661/10000 train_time:154900ms step_avg:93.26ms +[2025-08-22 13:57:19] [Rank 0] step:1661/10000 train_time:154900ms step_avg:93.26ms +[2025-08-22 13:57:21] [Rank 0] step:1681/10000 train_time:156764ms step_avg:93.26ms +[2025-08-22 13:57:21] [Rank 0] step:1681/10000 train_time:156764ms step_avg:93.26ms +[2025-08-22 13:57:23] [Rank 0] step:1701/10000 train_time:158628ms step_avg:93.26ms +[2025-08-22 13:57:23] [Rank 0] step:1701/10000 train_time:158628ms step_avg:93.26ms +[2025-08-22 13:57:25] [Rank 0] step:1721/10000 train_time:160494ms step_avg:93.26ms +[2025-08-22 13:57:25] [Rank 0] step:1721/10000 train_time:160494ms step_avg:93.26ms +[2025-08-22 13:57:27] [Rank 0] step:1741/10000 train_time:162361ms step_avg:93.26ms +[2025-08-22 13:57:27] [Rank 0] step:1741/10000 train_time:162361ms step_avg:93.26ms +[2025-08-22 13:57:29] [Rank 0] step:1761/10000 train_time:164227ms step_avg:93.26ms +[2025-08-22 13:57:29] [Rank 0] step:1761/10000 train_time:164227ms step_avg:93.26ms +[2025-08-22 13:57:31] [Rank 0] step:1781/10000 train_time:166094ms step_avg:93.26ms +[2025-08-22 13:57:31] [Rank 0] step:1781/10000 train_time:166094ms step_avg:93.26ms +[2025-08-22 13:57:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:57:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:57:46] [Rank 0] PRINT: step:1800/10000 val_loss:4.0560 svd_entropy: attn_qk:H=0.9152,top10E=0.07,eRank=437.5,q75/q25=10.96 attn_vo:H=0.9157,top10E=0.06,eRank=438.9,q75/q25=11.42 mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.1,q75/q25=2.94 vo_prod:H=0.8418,top10E=0.13,eRank=270.4,q75/q25=67.15 train_time:167920ms step_avg:93.29ms +[2025-08-22 13:57:46] [Rank 0] PRINT: step:1800/10000 val_loss:4.0560 svd_entropy: attn_qk:H=0.9152,top10E=0.07,eRank=437.5,q75/q25=10.96 attn_vo:H=0.9157,top10E=0.06,eRank=438.9,q75/q25=11.42 mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.1,q75/q25=2.94 vo_prod:H=0.8418,top10E=0.13,eRank=270.4,q75/q25=67.15 train_time:167920ms step_avg:93.29ms +[2025-08-22 13:57:46] [Rank 0] step:1801/10000 train_time:167971ms step_avg:93.27ms +[2025-08-22 13:57:46] [Rank 0] step:1801/10000 train_time:167971ms step_avg:93.27ms +[2025-08-22 13:57:48] [Rank 0] step:1821/10000 train_time:169848ms step_avg:93.27ms +[2025-08-22 13:57:48] [Rank 0] step:1821/10000 train_time:169848ms step_avg:93.27ms +[2025-08-22 13:57:50] [Rank 0] step:1841/10000 train_time:171713ms step_avg:93.27ms +[2025-08-22 13:57:50] [Rank 0] step:1841/10000 train_time:171713ms step_avg:93.27ms +[2025-08-22 13:57:52] [Rank 0] step:1861/10000 train_time:173677ms step_avg:93.32ms +[2025-08-22 13:57:52] [Rank 0] step:1861/10000 train_time:173677ms step_avg:93.32ms +[2025-08-22 13:57:54] [Rank 0] step:1881/10000 train_time:175708ms step_avg:93.41ms +[2025-08-22 13:57:54] [Rank 0] step:1881/10000 train_time:175708ms step_avg:93.41ms +[2025-08-22 13:57:56] [Rank 0] step:1901/10000 train_time:177575ms step_avg:93.41ms +[2025-08-22 13:57:56] [Rank 0] step:1901/10000 train_time:177575ms step_avg:93.41ms +[2025-08-22 13:57:58] [Rank 0] step:1921/10000 train_time:179442ms step_avg:93.41ms +[2025-08-22 13:57:58] [Rank 0] step:1921/10000 train_time:179442ms step_avg:93.41ms +[2025-08-22 13:57:59] [Rank 0] step:1941/10000 train_time:181312ms step_avg:93.41ms +[2025-08-22 13:57:59] [Rank 0] step:1941/10000 train_time:181312ms step_avg:93.41ms +[2025-08-22 13:58:01] [Rank 0] step:1961/10000 train_time:183181ms step_avg:93.41ms +[2025-08-22 13:58:01] [Rank 0] step:1961/10000 train_time:183181ms step_avg:93.41ms +[2025-08-22 13:58:03] [Rank 0] step:1981/10000 train_time:185050ms step_avg:93.41ms +[2025-08-22 13:58:03] [Rank 0] step:1981/10000 train_time:185050ms step_avg:93.41ms +[2025-08-22 13:58:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:58:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:58:19] [Rank 0] PRINT: step:2000/10000 val_loss:4.0288 svd_entropy: attn_qk:H=0.9148,top10E=0.07,eRank=436.2,q75/q25=10.98 attn_vo:H=0.9156,top10E=0.06,eRank=438.7,q75/q25=11.46 mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.80 mlp_w2:H=0.9670,top10E=0.05,eRank=616.7,q75/q25=2.94 vo_prod:H=0.8420,top10E=0.13,eRank=270.8,q75/q25=67.70 train_time:186880ms step_avg:93.44ms +[2025-08-22 13:58:19] [Rank 0] PRINT: step:2000/10000 val_loss:4.0288 svd_entropy: attn_qk:H=0.9148,top10E=0.07,eRank=436.2,q75/q25=10.98 attn_vo:H=0.9156,top10E=0.06,eRank=438.7,q75/q25=11.46 mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.80 mlp_w2:H=0.9670,top10E=0.05,eRank=616.7,q75/q25=2.94 vo_prod:H=0.8420,top10E=0.13,eRank=270.8,q75/q25=67.70 train_time:186880ms step_avg:93.44ms +[2025-08-22 13:58:19] [Rank 0] step:2001/10000 train_time:186931ms step_avg:93.42ms +[2025-08-22 13:58:19] [Rank 0] step:2001/10000 train_time:186931ms step_avg:93.42ms +[2025-08-22 13:58:21] [Rank 0] step:2021/10000 train_time:188809ms step_avg:93.42ms +[2025-08-22 13:58:21] [Rank 0] step:2021/10000 train_time:188809ms step_avg:93.42ms +[2025-08-22 13:58:23] [Rank 0] step:2041/10000 train_time:191391ms step_avg:93.77ms +[2025-08-22 13:58:23] [Rank 0] step:2041/10000 train_time:191391ms step_avg:93.77ms +[2025-08-22 13:58:25] [Rank 0] step:2061/10000 train_time:193259ms step_avg:93.77ms +[2025-08-22 13:58:25] [Rank 0] step:2061/10000 train_time:193259ms step_avg:93.77ms +[2025-08-22 13:58:27] [Rank 0] step:2081/10000 train_time:195126ms step_avg:93.77ms +[2025-08-22 13:58:27] [Rank 0] step:2081/10000 train_time:195126ms step_avg:93.77ms +[2025-08-22 13:58:29] [Rank 0] step:2101/10000 train_time:196995ms step_avg:93.76ms +[2025-08-22 13:58:29] [Rank 0] step:2101/10000 train_time:196995ms step_avg:93.76ms +[2025-08-22 13:58:31] [Rank 0] step:2121/10000 train_time:198864ms step_avg:93.76ms +[2025-08-22 13:58:31] [Rank 0] step:2121/10000 train_time:198864ms step_avg:93.76ms +[2025-08-22 13:58:33] [Rank 0] step:2141/10000 train_time:200733ms step_avg:93.76ms +[2025-08-22 13:58:33] [Rank 0] step:2141/10000 train_time:200733ms step_avg:93.76ms +[2025-08-22 13:58:34] [Rank 0] step:2161/10000 train_time:202605ms step_avg:93.75ms +[2025-08-22 13:58:34] [Rank 0] step:2161/10000 train_time:202605ms step_avg:93.75ms +[2025-08-22 13:58:36] [Rank 0] step:2181/10000 train_time:204477ms step_avg:93.75ms +[2025-08-22 13:58:36] [Rank 0] step:2181/10000 train_time:204477ms step_avg:93.75ms +[2025-08-22 13:58:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:58:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:58:52] [Rank 0] PRINT: step:2200/10000 val_loss:3.9883 svd_entropy: attn_qk:H=0.9144,top10E=0.07,eRank=435.2,q75/q25=11.00 attn_vo:H=0.9156,top10E=0.06,eRank=438.6,q75/q25=11.46 mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.80 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.93 vo_prod:H=0.8420,top10E=0.13,eRank=271.0,q75/q25=67.11 train_time:206308ms step_avg:93.78ms +[2025-08-22 13:58:52] [Rank 0] PRINT: step:2200/10000 val_loss:3.9883 svd_entropy: attn_qk:H=0.9144,top10E=0.07,eRank=435.2,q75/q25=11.00 attn_vo:H=0.9156,top10E=0.06,eRank=438.6,q75/q25=11.46 mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.80 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.93 vo_prod:H=0.8420,top10E=0.13,eRank=271.0,q75/q25=67.11 train_time:206308ms step_avg:93.78ms +[2025-08-22 13:58:52] [Rank 0] step:2201/10000 train_time:206358ms step_avg:93.76ms +[2025-08-22 13:58:52] [Rank 0] step:2201/10000 train_time:206358ms step_avg:93.76ms +[2025-08-22 13:58:54] [Rank 0] step:2221/10000 train_time:208235ms step_avg:93.76ms +[2025-08-22 13:58:54] [Rank 0] step:2221/10000 train_time:208235ms step_avg:93.76ms +[2025-08-22 13:58:56] [Rank 0] step:2241/10000 train_time:210264ms step_avg:93.83ms +[2025-08-22 13:58:56] [Rank 0] step:2241/10000 train_time:210264ms step_avg:93.83ms +[2025-08-22 13:58:58] [Rank 0] step:2261/10000 train_time:212242ms step_avg:93.87ms +[2025-08-22 13:58:58] [Rank 0] step:2261/10000 train_time:212242ms step_avg:93.87ms +[2025-08-22 13:59:00] [Rank 0] step:2281/10000 train_time:214151ms step_avg:93.88ms +[2025-08-22 13:59:00] [Rank 0] step:2281/10000 train_time:214151ms step_avg:93.88ms +[2025-08-22 13:59:02] [Rank 0] step:2301/10000 train_time:216061ms step_avg:93.90ms +[2025-08-22 13:59:02] [Rank 0] step:2301/10000 train_time:216061ms step_avg:93.90ms +[2025-08-22 13:59:04] [Rank 0] step:2321/10000 train_time:217971ms step_avg:93.91ms +[2025-08-22 13:59:04] [Rank 0] step:2321/10000 train_time:217971ms step_avg:93.91ms +[2025-08-22 13:59:05] [Rank 0] step:2341/10000 train_time:219882ms step_avg:93.93ms +[2025-08-22 13:59:05] [Rank 0] step:2341/10000 train_time:219882ms step_avg:93.93ms +[2025-08-22 13:59:07] [Rank 0] step:2361/10000 train_time:221793ms step_avg:93.94ms +[2025-08-22 13:59:07] [Rank 0] step:2361/10000 train_time:221793ms step_avg:93.94ms +[2025-08-22 13:59:09] [Rank 0] step:2381/10000 train_time:223703ms step_avg:93.95ms +[2025-08-22 13:59:09] [Rank 0] step:2381/10000 train_time:223703ms step_avg:93.95ms +[2025-08-22 13:59:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:59:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:59:25] [Rank 0] PRINT: step:2400/10000 val_loss:3.9238 svd_entropy: attn_qk:H=0.9139,top10E=0.07,eRank=433.8,q75/q25=11.05 attn_vo:H=0.9155,top10E=0.06,eRank=438.5,q75/q25=11.50 mlp_w1:H=0.9722,top10E=0.04,eRank=638.6,q75/q25=2.79 mlp_w2:H=0.9668,top10E=0.05,eRank=615.9,q75/q25=2.93 vo_prod:H=0.8421,top10E=0.13,eRank=271.2,q75/q25=67.77 train_time:225573ms step_avg:93.99ms +[2025-08-22 13:59:25] [Rank 0] PRINT: step:2400/10000 val_loss:3.9238 svd_entropy: attn_qk:H=0.9139,top10E=0.07,eRank=433.8,q75/q25=11.05 attn_vo:H=0.9155,top10E=0.06,eRank=438.5,q75/q25=11.50 mlp_w1:H=0.9722,top10E=0.04,eRank=638.6,q75/q25=2.79 mlp_w2:H=0.9668,top10E=0.05,eRank=615.9,q75/q25=2.93 vo_prod:H=0.8421,top10E=0.13,eRank=271.2,q75/q25=67.77 train_time:225573ms step_avg:93.99ms +[2025-08-22 13:59:25] [Rank 0] step:2401/10000 train_time:225624ms step_avg:93.97ms +[2025-08-22 13:59:25] [Rank 0] step:2401/10000 train_time:225624ms step_avg:93.97ms +[2025-08-22 13:59:27] [Rank 0] step:2421/10000 train_time:227532ms step_avg:93.98ms +[2025-08-22 13:59:27] [Rank 0] step:2421/10000 train_time:227532ms step_avg:93.98ms +[2025-08-22 13:59:29] [Rank 0] step:2441/10000 train_time:229439ms step_avg:93.99ms +[2025-08-22 13:59:29] [Rank 0] step:2441/10000 train_time:229439ms step_avg:93.99ms +[2025-08-22 13:59:31] [Rank 0] step:2461/10000 train_time:231346ms step_avg:94.01ms +[2025-08-22 13:59:31] [Rank 0] step:2461/10000 train_time:231346ms step_avg:94.01ms +[2025-08-22 13:59:33] [Rank 0] step:2481/10000 train_time:233254ms step_avg:94.02ms +[2025-08-22 13:59:33] [Rank 0] step:2481/10000 train_time:233254ms step_avg:94.02ms +[2025-08-22 13:59:34] [Rank 0] step:2501/10000 train_time:235163ms step_avg:94.03ms +[2025-08-22 13:59:34] [Rank 0] step:2501/10000 train_time:235163ms step_avg:94.03ms +[2025-08-22 13:59:36] [Rank 0] step:2521/10000 train_time:237072ms step_avg:94.04ms +[2025-08-22 13:59:36] [Rank 0] step:2521/10000 train_time:237072ms step_avg:94.04ms +[2025-08-22 13:59:38] [Rank 0] step:2541/10000 train_time:238980ms step_avg:94.05ms +[2025-08-22 13:59:38] [Rank 0] step:2541/10000 train_time:238980ms step_avg:94.05ms +[2025-08-22 13:59:40] [Rank 0] step:2561/10000 train_time:240891ms step_avg:94.06ms +[2025-08-22 13:59:40] [Rank 0] step:2561/10000 train_time:240891ms step_avg:94.06ms +[2025-08-22 13:59:42] [Rank 0] step:2581/10000 train_time:242803ms step_avg:94.07ms +[2025-08-22 13:59:42] [Rank 0] step:2581/10000 train_time:242803ms step_avg:94.07ms +[2025-08-22 13:59:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:59:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:59:58] [Rank 0] PRINT: step:2600/10000 val_loss:3.9013 svd_entropy: attn_qk:H=0.9136,top10E=0.07,eRank=432.9,q75/q25=11.11 attn_vo:H=0.9155,top10E=0.06,eRank=438.5,q75/q25=11.53 mlp_w1:H=0.9722,top10E=0.04,eRank=638.7,q75/q25=2.79 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.92 vo_prod:H=0.8422,top10E=0.13,eRank=271.6,q75/q25=67.92 train_time:244673ms step_avg:94.11ms +[2025-08-22 13:59:58] [Rank 0] PRINT: step:2600/10000 val_loss:3.9013 svd_entropy: attn_qk:H=0.9136,top10E=0.07,eRank=432.9,q75/q25=11.11 attn_vo:H=0.9155,top10E=0.06,eRank=438.5,q75/q25=11.53 mlp_w1:H=0.9722,top10E=0.04,eRank=638.7,q75/q25=2.79 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.92 vo_prod:H=0.8422,top10E=0.13,eRank=271.6,q75/q25=67.92 train_time:244673ms step_avg:94.11ms +[2025-08-22 13:59:58] [Rank 0] step:2601/10000 train_time:244724ms step_avg:94.09ms +[2025-08-22 13:59:58] [Rank 0] step:2601/10000 train_time:244724ms step_avg:94.09ms +[2025-08-22 14:00:00] [Rank 0] step:2621/10000 train_time:246680ms step_avg:94.12ms +[2025-08-22 14:00:00] [Rank 0] step:2621/10000 train_time:246680ms step_avg:94.12ms +[2025-08-22 14:00:02] [Rank 0] step:2641/10000 train_time:248709ms step_avg:94.17ms +[2025-08-22 14:00:02] [Rank 0] step:2641/10000 train_time:248709ms step_avg:94.17ms +[2025-08-22 14:00:04] [Rank 0] step:2661/10000 train_time:250618ms step_avg:94.18ms +[2025-08-22 14:00:04] [Rank 0] step:2661/10000 train_time:250618ms step_avg:94.18ms +[2025-08-22 14:00:06] [Rank 0] step:2681/10000 train_time:252527ms step_avg:94.19ms +[2025-08-22 14:00:06] [Rank 0] step:2681/10000 train_time:252527ms step_avg:94.19ms +[2025-08-22 14:00:07] [Rank 0] step:2701/10000 train_time:254437ms step_avg:94.20ms +[2025-08-22 14:00:07] [Rank 0] step:2701/10000 train_time:254437ms step_avg:94.20ms +[2025-08-22 14:00:09] [Rank 0] step:2721/10000 train_time:256348ms step_avg:94.21ms +[2025-08-22 14:00:09] [Rank 0] step:2721/10000 train_time:256348ms step_avg:94.21ms +[2025-08-22 14:00:11] [Rank 0] step:2741/10000 train_time:258260ms step_avg:94.22ms +[2025-08-22 14:00:11] [Rank 0] step:2741/10000 train_time:258260ms step_avg:94.22ms +[2025-08-22 14:00:13] [Rank 0] step:2761/10000 train_time:260173ms step_avg:94.23ms +[2025-08-22 14:00:13] [Rank 0] step:2761/10000 train_time:260173ms step_avg:94.23ms +[2025-08-22 14:00:15] [Rank 0] step:2781/10000 train_time:262086ms step_avg:94.24ms +[2025-08-22 14:00:15] [Rank 0] step:2781/10000 train_time:262086ms step_avg:94.24ms +[2025-08-22 14:00:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:00:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:00:31] [Rank 0] PRINT: step:2800/10000 val_loss:3.8819 svd_entropy: attn_qk:H=0.9133,top10E=0.07,eRank=432.1,q75/q25=11.14 attn_vo:H=0.9156,top10E=0.06,eRank=438.7,q75/q25=11.53 mlp_w1:H=0.9723,top10E=0.04,eRank=638.8,q75/q25=2.78 mlp_w2:H=0.9667,top10E=0.05,eRank=615.5,q75/q25=2.92 vo_prod:H=0.8425,top10E=0.13,eRank=272.2,q75/q25=67.25 train_time:263957ms step_avg:94.27ms +[2025-08-22 14:00:31] [Rank 0] PRINT: step:2800/10000 val_loss:3.8819 svd_entropy: attn_qk:H=0.9133,top10E=0.07,eRank=432.1,q75/q25=11.14 attn_vo:H=0.9156,top10E=0.06,eRank=438.7,q75/q25=11.53 mlp_w1:H=0.9723,top10E=0.04,eRank=638.8,q75/q25=2.78 mlp_w2:H=0.9667,top10E=0.05,eRank=615.5,q75/q25=2.92 vo_prod:H=0.8425,top10E=0.13,eRank=272.2,q75/q25=67.25 train_time:263957ms step_avg:94.27ms +[2025-08-22 14:00:31] [Rank 0] step:2801/10000 train_time:264009ms step_avg:94.26ms +[2025-08-22 14:00:31] [Rank 0] step:2801/10000 train_time:264009ms step_avg:94.26ms +[2025-08-22 14:00:33] [Rank 0] step:2821/10000 train_time:265942ms step_avg:94.27ms +[2025-08-22 14:00:33] [Rank 0] step:2821/10000 train_time:265942ms step_avg:94.27ms +[2025-08-22 14:00:35] [Rank 0] step:2841/10000 train_time:267851ms step_avg:94.28ms +[2025-08-22 14:00:35] [Rank 0] step:2841/10000 train_time:267851ms step_avg:94.28ms +[2025-08-22 14:00:36] [Rank 0] step:2861/10000 train_time:269766ms step_avg:94.29ms +[2025-08-22 14:00:36] [Rank 0] step:2861/10000 train_time:269766ms step_avg:94.29ms +[2025-08-22 14:00:38] [Rank 0] step:2881/10000 train_time:271678ms step_avg:94.30ms +[2025-08-22 14:00:38] [Rank 0] step:2881/10000 train_time:271678ms step_avg:94.30ms +[2025-08-22 14:00:40] [Rank 0] step:2901/10000 train_time:273590ms step_avg:94.31ms +[2025-08-22 14:00:40] [Rank 0] step:2901/10000 train_time:273590ms step_avg:94.31ms +[2025-08-22 14:00:42] [Rank 0] step:2921/10000 train_time:275504ms step_avg:94.32ms +[2025-08-22 14:00:42] [Rank 0] step:2921/10000 train_time:275504ms step_avg:94.32ms +[2025-08-22 14:00:44] [Rank 0] step:2941/10000 train_time:277420ms step_avg:94.33ms +[2025-08-22 14:00:44] [Rank 0] step:2941/10000 train_time:277420ms step_avg:94.33ms +[2025-08-22 14:00:46] [Rank 0] step:2961/10000 train_time:279334ms step_avg:94.34ms +[2025-08-22 14:00:46] [Rank 0] step:2961/10000 train_time:279334ms step_avg:94.34ms +[2025-08-22 14:00:48] [Rank 0] step:2981/10000 train_time:281259ms step_avg:94.35ms +[2025-08-22 14:00:48] [Rank 0] step:2981/10000 train_time:281259ms step_avg:94.35ms +[2025-08-22 14:00:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:00:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:01:03] [Rank 0] PRINT: step:3000/10000 val_loss:3.8500 svd_entropy: attn_qk:H=0.9130,top10E=0.07,eRank=431.3,q75/q25=11.18 attn_vo:H=0.9156,top10E=0.06,eRank=438.8,q75/q25=11.52 mlp_w1:H=0.9723,top10E=0.04,eRank=638.9,q75/q25=2.78 mlp_w2:H=0.9666,top10E=0.05,eRank=615.3,q75/q25=2.91 vo_prod:H=0.8428,top10E=0.13,eRank=272.6,q75/q25=67.28 train_time:283142ms step_avg:94.38ms +[2025-08-22 14:01:03] [Rank 0] PRINT: step:3000/10000 val_loss:3.8500 svd_entropy: attn_qk:H=0.9130,top10E=0.07,eRank=431.3,q75/q25=11.18 attn_vo:H=0.9156,top10E=0.06,eRank=438.8,q75/q25=11.52 mlp_w1:H=0.9723,top10E=0.04,eRank=638.9,q75/q25=2.78 mlp_w2:H=0.9666,top10E=0.05,eRank=615.3,q75/q25=2.91 vo_prod:H=0.8428,top10E=0.13,eRank=272.6,q75/q25=67.28 train_time:283142ms step_avg:94.38ms +[2025-08-22 14:01:04] [Rank 0] step:3001/10000 train_time:283194ms step_avg:94.37ms +[2025-08-22 14:01:04] [Rank 0] step:3001/10000 train_time:283194ms step_avg:94.37ms +[2025-08-22 14:01:06] [Rank 0] step:3021/10000 train_time:285116ms step_avg:94.38ms +[2025-08-22 14:01:06] [Rank 0] step:3021/10000 train_time:285116ms step_avg:94.38ms +[2025-08-22 14:01:07] [Rank 0] step:3041/10000 train_time:287031ms step_avg:94.39ms +[2025-08-22 14:01:07] [Rank 0] step:3041/10000 train_time:287031ms step_avg:94.39ms +[2025-08-22 14:01:09] [Rank 0] step:3061/10000 train_time:288946ms step_avg:94.40ms +[2025-08-22 14:01:09] [Rank 0] step:3061/10000 train_time:288946ms step_avg:94.40ms +[2025-08-22 14:01:11] [Rank 0] step:3081/10000 train_time:290863ms step_avg:94.41ms +[2025-08-22 14:01:11] [Rank 0] step:3081/10000 train_time:290863ms step_avg:94.41ms +[2025-08-22 14:01:13] [Rank 0] step:3101/10000 train_time:292779ms step_avg:94.41ms +[2025-08-22 14:01:13] [Rank 0] step:3101/10000 train_time:292779ms step_avg:94.41ms +[2025-08-22 14:01:15] [Rank 0] step:3121/10000 train_time:294695ms step_avg:94.42ms +[2025-08-22 14:01:15] [Rank 0] step:3121/10000 train_time:294695ms step_avg:94.42ms +[2025-08-22 14:01:17] [Rank 0] step:3141/10000 train_time:296612ms step_avg:94.43ms +[2025-08-22 14:01:17] [Rank 0] step:3141/10000 train_time:296612ms step_avg:94.43ms +[2025-08-22 14:01:19] [Rank 0] step:3161/10000 train_time:298530ms step_avg:94.44ms +[2025-08-22 14:01:19] [Rank 0] step:3161/10000 train_time:298530ms step_avg:94.44ms +[2025-08-22 14:01:21] [Rank 0] step:3181/10000 train_time:300449ms step_avg:94.45ms +[2025-08-22 14:01:21] [Rank 0] step:3181/10000 train_time:300449ms step_avg:94.45ms +[2025-08-22 14:01:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:01:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:01:36] [Rank 0] PRINT: step:3200/10000 val_loss:3.8244 svd_entropy: attn_qk:H=0.9127,top10E=0.07,eRank=430.5,q75/q25=11.19 attn_vo:H=0.9156,top10E=0.06,eRank=438.8,q75/q25=11.52 mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.78 mlp_w2:H=0.9666,top10E=0.05,eRank=615.2,q75/q25=2.91 vo_prod:H=0.8429,top10E=0.13,eRank=272.9,q75/q25=67.57 train_time:302325ms step_avg:94.48ms +[2025-08-22 14:01:36] [Rank 0] PRINT: step:3200/10000 val_loss:3.8244 svd_entropy: attn_qk:H=0.9127,top10E=0.07,eRank=430.5,q75/q25=11.19 attn_vo:H=0.9156,top10E=0.06,eRank=438.8,q75/q25=11.52 mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.78 mlp_w2:H=0.9666,top10E=0.05,eRank=615.2,q75/q25=2.91 vo_prod:H=0.8429,top10E=0.13,eRank=272.9,q75/q25=67.57 train_time:302325ms step_avg:94.48ms +[2025-08-22 14:01:36] [Rank 0] step:3201/10000 train_time:302377ms step_avg:94.46ms +[2025-08-22 14:01:36] [Rank 0] step:3201/10000 train_time:302377ms step_avg:94.46ms +[2025-08-22 14:01:38] [Rank 0] step:3221/10000 train_time:304298ms step_avg:94.47ms +[2025-08-22 14:01:38] [Rank 0] step:3221/10000 train_time:304298ms step_avg:94.47ms +[2025-08-22 14:01:40] [Rank 0] step:3241/10000 train_time:306211ms step_avg:94.48ms +[2025-08-22 14:01:40] [Rank 0] step:3241/10000 train_time:306211ms step_avg:94.48ms +[2025-08-22 14:01:42] [Rank 0] step:3261/10000 train_time:308124ms step_avg:94.49ms +[2025-08-22 14:01:42] [Rank 0] step:3261/10000 train_time:308124ms step_avg:94.49ms +[2025-08-22 14:01:44] [Rank 0] step:3281/10000 train_time:310040ms step_avg:94.50ms +[2025-08-22 14:01:44] [Rank 0] step:3281/10000 train_time:310040ms step_avg:94.50ms +[2025-08-22 14:01:46] [Rank 0] step:3301/10000 train_time:311955ms step_avg:94.50ms +[2025-08-22 14:01:46] [Rank 0] step:3301/10000 train_time:311955ms step_avg:94.50ms +[2025-08-22 14:01:48] [Rank 0] step:3321/10000 train_time:313873ms step_avg:94.51ms +[2025-08-22 14:01:48] [Rank 0] step:3321/10000 train_time:313873ms step_avg:94.51ms +[2025-08-22 14:01:50] [Rank 0] step:3341/10000 train_time:315790ms step_avg:94.52ms +[2025-08-22 14:01:50] [Rank 0] step:3341/10000 train_time:315790ms step_avg:94.52ms +[2025-08-22 14:01:52] [Rank 0] step:3361/10000 train_time:317709ms step_avg:94.53ms +[2025-08-22 14:01:52] [Rank 0] step:3361/10000 train_time:317709ms step_avg:94.53ms +[2025-08-22 14:01:54] [Rank 0] step:3381/10000 train_time:319627ms step_avg:94.54ms +[2025-08-22 14:01:54] [Rank 0] step:3381/10000 train_time:319627ms step_avg:94.54ms +[2025-08-22 14:01:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:01:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:02:09] [Rank 0] PRINT: step:3400/10000 val_loss:3.8038 svd_entropy: attn_qk:H=0.9125,top10E=0.07,eRank=429.9,q75/q25=11.24 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.54 mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.77 mlp_w2:H=0.9666,top10E=0.05,eRank=615.2,q75/q25=2.91 vo_prod:H=0.8432,top10E=0.13,eRank=273.4,q75/q25=67.31 train_time:321506ms step_avg:94.56ms +[2025-08-22 14:02:09] [Rank 0] PRINT: step:3400/10000 val_loss:3.8038 svd_entropy: attn_qk:H=0.9125,top10E=0.07,eRank=429.9,q75/q25=11.24 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.54 mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.77 mlp_w2:H=0.9666,top10E=0.05,eRank=615.2,q75/q25=2.91 vo_prod:H=0.8432,top10E=0.13,eRank=273.4,q75/q25=67.31 train_time:321506ms step_avg:94.56ms +[2025-08-22 14:02:09] [Rank 0] step:3401/10000 train_time:321558ms step_avg:94.55ms +[2025-08-22 14:02:09] [Rank 0] step:3401/10000 train_time:321558ms step_avg:94.55ms +[2025-08-22 14:02:11] [Rank 0] step:3421/10000 train_time:323494ms step_avg:94.56ms +[2025-08-22 14:02:11] [Rank 0] step:3421/10000 train_time:323494ms step_avg:94.56ms +[2025-08-22 14:02:13] [Rank 0] step:3441/10000 train_time:325411ms step_avg:94.57ms +[2025-08-22 14:02:13] [Rank 0] step:3441/10000 train_time:325411ms step_avg:94.57ms +[2025-08-22 14:02:15] [Rank 0] step:3461/10000 train_time:327328ms step_avg:94.58ms +[2025-08-22 14:02:15] [Rank 0] step:3461/10000 train_time:327328ms step_avg:94.58ms +[2025-08-22 14:02:17] [Rank 0] step:3481/10000 train_time:329247ms step_avg:94.58ms +[2025-08-22 14:02:17] [Rank 0] step:3481/10000 train_time:329247ms step_avg:94.58ms +[2025-08-22 14:02:19] [Rank 0] step:3501/10000 train_time:331169ms step_avg:94.59ms +[2025-08-22 14:02:19] [Rank 0] step:3501/10000 train_time:331169ms step_avg:94.59ms +[2025-08-22 14:02:21] [Rank 0] step:3521/10000 train_time:333093ms step_avg:94.60ms +[2025-08-22 14:02:21] [Rank 0] step:3521/10000 train_time:333093ms step_avg:94.60ms +[2025-08-22 14:02:23] [Rank 0] step:3541/10000 train_time:335015ms step_avg:94.61ms +[2025-08-22 14:02:23] [Rank 0] step:3541/10000 train_time:335015ms step_avg:94.61ms +[2025-08-22 14:02:25] [Rank 0] step:3561/10000 train_time:336937ms step_avg:94.62ms +[2025-08-22 14:02:25] [Rank 0] step:3561/10000 train_time:336937ms step_avg:94.62ms +[2025-08-22 14:02:27] [Rank 0] step:3581/10000 train_time:338861ms step_avg:94.63ms +[2025-08-22 14:02:27] [Rank 0] step:3581/10000 train_time:338861ms step_avg:94.63ms +[2025-08-22 14:02:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:02:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:02:42] [Rank 0] PRINT: step:3600/10000 val_loss:3.7963 svd_entropy: attn_qk:H=0.9123,top10E=0.07,eRank=429.3,q75/q25=11.27 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.54 mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9666,top10E=0.05,eRank=615.1,q75/q25=2.91 vo_prod:H=0.8433,top10E=0.13,eRank=273.8,q75/q25=67.58 train_time:340744ms step_avg:94.65ms +[2025-08-22 14:02:42] [Rank 0] PRINT: step:3600/10000 val_loss:3.7963 svd_entropy: attn_qk:H=0.9123,top10E=0.07,eRank=429.3,q75/q25=11.27 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.54 mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9666,top10E=0.05,eRank=615.1,q75/q25=2.91 vo_prod:H=0.8433,top10E=0.13,eRank=273.8,q75/q25=67.58 train_time:340744ms step_avg:94.65ms +[2025-08-22 14:02:42] [Rank 0] step:3601/10000 train_time:340795ms step_avg:94.64ms +[2025-08-22 14:02:42] [Rank 0] step:3601/10000 train_time:340795ms step_avg:94.64ms +[2025-08-22 14:02:44] [Rank 0] step:3621/10000 train_time:342736ms step_avg:94.65ms +[2025-08-22 14:02:44] [Rank 0] step:3621/10000 train_time:342736ms step_avg:94.65ms +[2025-08-22 14:02:46] [Rank 0] step:3641/10000 train_time:344656ms step_avg:94.66ms +[2025-08-22 14:02:46] [Rank 0] step:3641/10000 train_time:344656ms step_avg:94.66ms +[2025-08-22 14:02:48] [Rank 0] step:3661/10000 train_time:346577ms step_avg:94.67ms +[2025-08-22 14:02:48] [Rank 0] step:3661/10000 train_time:346577ms step_avg:94.67ms +[2025-08-22 14:02:50] [Rank 0] step:3681/10000 train_time:348498ms step_avg:94.67ms +[2025-08-22 14:02:50] [Rank 0] step:3681/10000 train_time:348498ms step_avg:94.67ms +[2025-08-22 14:02:52] [Rank 0] step:3701/10000 train_time:350420ms step_avg:94.68ms +[2025-08-22 14:02:52] [Rank 0] step:3701/10000 train_time:350420ms step_avg:94.68ms +[2025-08-22 14:02:54] [Rank 0] step:3721/10000 train_time:352372ms step_avg:94.70ms +[2025-08-22 14:02:54] [Rank 0] step:3721/10000 train_time:352372ms step_avg:94.70ms +[2025-08-22 14:02:56] [Rank 0] step:3741/10000 train_time:354332ms step_avg:94.72ms +[2025-08-22 14:02:56] [Rank 0] step:3741/10000 train_time:354332ms step_avg:94.72ms +[2025-08-22 14:02:58] [Rank 0] step:3761/10000 train_time:356291ms step_avg:94.73ms +[2025-08-22 14:02:58] [Rank 0] step:3761/10000 train_time:356291ms step_avg:94.73ms +[2025-08-22 14:03:00] [Rank 0] step:3781/10000 train_time:358253ms step_avg:94.75ms +[2025-08-22 14:03:00] [Rank 0] step:3781/10000 train_time:358253ms step_avg:94.75ms +[2025-08-22 14:03:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:03:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:03:15] [Rank 0] PRINT: step:3800/10000 val_loss:3.7708 svd_entropy: attn_qk:H=0.9120,top10E=0.07,eRank=428.6,q75/q25=11.28 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.59 mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.77 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.90 vo_prod:H=0.8433,top10E=0.13,eRank=273.9,q75/q25=67.16 train_time:360174ms step_avg:94.78ms +[2025-08-22 14:03:15] [Rank 0] PRINT: step:3800/10000 val_loss:3.7708 svd_entropy: attn_qk:H=0.9120,top10E=0.07,eRank=428.6,q75/q25=11.28 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.59 mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.77 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.90 vo_prod:H=0.8433,top10E=0.13,eRank=273.9,q75/q25=67.16 train_time:360174ms step_avg:94.78ms +[2025-08-22 14:03:15] [Rank 0] step:3801/10000 train_time:360226ms step_avg:94.77ms +[2025-08-22 14:03:15] [Rank 0] step:3801/10000 train_time:360226ms step_avg:94.77ms +[2025-08-22 14:03:17] [Rank 0] step:3821/10000 train_time:362189ms step_avg:94.79ms +[2025-08-22 14:03:17] [Rank 0] step:3821/10000 train_time:362189ms step_avg:94.79ms +[2025-08-22 14:03:19] [Rank 0] step:3841/10000 train_time:364146ms step_avg:94.80ms +[2025-08-22 14:03:19] [Rank 0] step:3841/10000 train_time:364146ms step_avg:94.80ms +[2025-08-22 14:03:21] [Rank 0] step:3861/10000 train_time:366100ms step_avg:94.82ms +[2025-08-22 14:03:21] [Rank 0] step:3861/10000 train_time:366100ms step_avg:94.82ms +[2025-08-22 14:03:23] [Rank 0] step:3881/10000 train_time:368054ms step_avg:94.83ms +[2025-08-22 14:03:23] [Rank 0] step:3881/10000 train_time:368054ms step_avg:94.83ms +[2025-08-22 14:03:25] [Rank 0] step:3901/10000 train_time:370051ms step_avg:94.86ms +[2025-08-22 14:03:25] [Rank 0] step:3901/10000 train_time:370051ms step_avg:94.86ms +[2025-08-22 14:03:27] [Rank 0] step:3921/10000 train_time:372005ms step_avg:94.88ms +[2025-08-22 14:03:27] [Rank 0] step:3921/10000 train_time:372005ms step_avg:94.88ms +[2025-08-22 14:03:29] [Rank 0] step:3941/10000 train_time:373957ms step_avg:94.89ms +[2025-08-22 14:03:29] [Rank 0] step:3941/10000 train_time:373957ms step_avg:94.89ms +[2025-08-22 14:03:31] [Rank 0] step:3961/10000 train_time:375910ms step_avg:94.90ms +[2025-08-22 14:03:31] [Rank 0] step:3961/10000 train_time:375910ms step_avg:94.90ms +[2025-08-22 14:03:33] [Rank 0] step:3981/10000 train_time:377864ms step_avg:94.92ms +[2025-08-22 14:03:33] [Rank 0] step:3981/10000 train_time:377864ms step_avg:94.92ms +[2025-08-22 14:03:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:03:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:03:49] [Rank 0] PRINT: step:4000/10000 val_loss:3.7514 svd_entropy: attn_qk:H=0.9117,top10E=0.07,eRank=427.8,q75/q25=11.30 attn_vo:H=0.9157,top10E=0.06,eRank=439.2,q75/q25=11.57 mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.77 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.90 vo_prod:H=0.8435,top10E=0.13,eRank=274.3,q75/q25=67.36 train_time:379776ms step_avg:94.94ms +[2025-08-22 14:03:49] [Rank 0] PRINT: step:4000/10000 val_loss:3.7514 svd_entropy: attn_qk:H=0.9117,top10E=0.07,eRank=427.8,q75/q25=11.30 attn_vo:H=0.9157,top10E=0.06,eRank=439.2,q75/q25=11.57 mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.77 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.90 vo_prod:H=0.8435,top10E=0.13,eRank=274.3,q75/q25=67.36 train_time:379776ms step_avg:94.94ms +[2025-08-22 14:03:49] [Rank 0] step:4001/10000 train_time:379829ms step_avg:94.93ms +[2025-08-22 14:03:49] [Rank 0] step:4001/10000 train_time:379829ms step_avg:94.93ms +[2025-08-22 14:03:51] [Rank 0] step:4021/10000 train_time:381791ms step_avg:94.95ms +[2025-08-22 14:03:51] [Rank 0] step:4021/10000 train_time:381791ms step_avg:94.95ms +[2025-08-22 14:03:53] [Rank 0] step:4041/10000 train_time:383743ms step_avg:94.96ms +[2025-08-22 14:03:53] [Rank 0] step:4041/10000 train_time:383743ms step_avg:94.96ms +[2025-08-22 14:03:55] [Rank 0] step:4061/10000 train_time:385693ms step_avg:94.98ms +[2025-08-22 14:03:55] [Rank 0] step:4061/10000 train_time:385693ms step_avg:94.98ms +[2025-08-22 14:03:57] [Rank 0] step:4081/10000 train_time:388349ms step_avg:95.16ms +[2025-08-22 14:03:57] [Rank 0] step:4081/10000 train_time:388349ms step_avg:95.16ms +[2025-08-22 14:03:59] [Rank 0] step:4101/10000 train_time:390300ms step_avg:95.17ms +[2025-08-22 14:03:59] [Rank 0] step:4101/10000 train_time:390300ms step_avg:95.17ms +[2025-08-22 14:04:01] [Rank 0] step:4121/10000 train_time:392250ms step_avg:95.18ms +[2025-08-22 14:04:01] [Rank 0] step:4121/10000 train_time:392250ms step_avg:95.18ms +[2025-08-22 14:04:03] [Rank 0] step:4141/10000 train_time:394206ms step_avg:95.20ms +[2025-08-22 14:04:03] [Rank 0] step:4141/10000 train_time:394206ms step_avg:95.20ms +[2025-08-22 14:04:05] [Rank 0] step:4161/10000 train_time:396160ms step_avg:95.21ms +[2025-08-22 14:04:05] [Rank 0] step:4161/10000 train_time:396160ms step_avg:95.21ms +[2025-08-22 14:04:07] [Rank 0] step:4181/10000 train_time:398116ms step_avg:95.22ms +[2025-08-22 14:04:07] [Rank 0] step:4181/10000 train_time:398116ms step_avg:95.22ms +[2025-08-22 14:04:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:04:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:04:22] [Rank 0] PRINT: step:4200/10000 val_loss:3.7392 svd_entropy: attn_qk:H=0.9115,top10E=0.07,eRank=427.2,q75/q25=11.32 attn_vo:H=0.9158,top10E=0.06,eRank=439.3,q75/q25=11.59 mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.90 vo_prod:H=0.8436,top10E=0.13,eRank=274.5,q75/q25=67.10 train_time:400026ms step_avg:95.24ms +[2025-08-22 14:04:22] [Rank 0] PRINT: step:4200/10000 val_loss:3.7392 svd_entropy: attn_qk:H=0.9115,top10E=0.07,eRank=427.2,q75/q25=11.32 attn_vo:H=0.9158,top10E=0.06,eRank=439.3,q75/q25=11.59 mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.90 vo_prod:H=0.8436,top10E=0.13,eRank=274.5,q75/q25=67.10 train_time:400026ms step_avg:95.24ms +[2025-08-22 14:04:23] [Rank 0] step:4201/10000 train_time:400078ms step_avg:95.23ms +[2025-08-22 14:04:23] [Rank 0] step:4201/10000 train_time:400078ms step_avg:95.23ms +[2025-08-22 14:04:24] [Rank 0] step:4221/10000 train_time:402031ms step_avg:95.25ms +[2025-08-22 14:04:24] [Rank 0] step:4221/10000 train_time:402031ms step_avg:95.25ms +[2025-08-22 14:04:26] [Rank 0] step:4241/10000 train_time:403985ms step_avg:95.26ms +[2025-08-22 14:04:26] [Rank 0] step:4241/10000 train_time:403985ms step_avg:95.26ms +[2025-08-22 14:04:28] [Rank 0] step:4261/10000 train_time:405937ms step_avg:95.27ms +[2025-08-22 14:04:28] [Rank 0] step:4261/10000 train_time:405937ms step_avg:95.27ms +[2025-08-22 14:04:30] [Rank 0] step:4281/10000 train_time:407891ms step_avg:95.28ms +[2025-08-22 14:04:30] [Rank 0] step:4281/10000 train_time:407891ms step_avg:95.28ms +[2025-08-22 14:04:32] [Rank 0] step:4301/10000 train_time:409843ms step_avg:95.29ms +[2025-08-22 14:04:32] [Rank 0] step:4301/10000 train_time:409843ms step_avg:95.29ms +[2025-08-22 14:04:34] [Rank 0] step:4321/10000 train_time:411798ms step_avg:95.30ms +[2025-08-22 14:04:34] [Rank 0] step:4321/10000 train_time:411798ms step_avg:95.30ms +[2025-08-22 14:04:36] [Rank 0] step:4341/10000 train_time:413751ms step_avg:95.31ms +[2025-08-22 14:04:36] [Rank 0] step:4341/10000 train_time:413751ms step_avg:95.31ms +[2025-08-22 14:04:38] [Rank 0] step:4361/10000 train_time:415707ms step_avg:95.32ms +[2025-08-22 14:04:38] [Rank 0] step:4361/10000 train_time:415707ms step_avg:95.32ms +[2025-08-22 14:04:40] [Rank 0] step:4381/10000 train_time:417662ms step_avg:95.33ms +[2025-08-22 14:04:40] [Rank 0] step:4381/10000 train_time:417662ms step_avg:95.33ms +[2025-08-22 14:04:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:04:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:04:55] [Rank 0] PRINT: step:4400/10000 val_loss:3.7253 svd_entropy: attn_qk:H=0.9113,top10E=0.07,eRank=426.6,q75/q25=11.31 attn_vo:H=0.9158,top10E=0.06,eRank=439.4,q75/q25=11.61 mlp_w1:H=0.9724,top10E=0.04,eRank=639.5,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8438,top10E=0.13,eRank=274.8,q75/q25=66.90 train_time:419576ms step_avg:95.36ms +[2025-08-22 14:04:55] [Rank 0] PRINT: step:4400/10000 val_loss:3.7253 svd_entropy: attn_qk:H=0.9113,top10E=0.07,eRank=426.6,q75/q25=11.31 attn_vo:H=0.9158,top10E=0.06,eRank=439.4,q75/q25=11.61 mlp_w1:H=0.9724,top10E=0.04,eRank=639.5,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8438,top10E=0.13,eRank=274.8,q75/q25=66.90 train_time:419576ms step_avg:95.36ms +[2025-08-22 14:04:56] [Rank 0] step:4401/10000 train_time:419629ms step_avg:95.35ms +[2025-08-22 14:04:56] [Rank 0] step:4401/10000 train_time:419629ms step_avg:95.35ms +[2025-08-22 14:04:57] [Rank 0] step:4421/10000 train_time:421593ms step_avg:95.36ms +[2025-08-22 14:04:57] [Rank 0] step:4421/10000 train_time:421593ms step_avg:95.36ms +[2025-08-22 14:04:59] [Rank 0] step:4441/10000 train_time:423544ms step_avg:95.37ms +[2025-08-22 14:04:59] [Rank 0] step:4441/10000 train_time:423544ms step_avg:95.37ms +[2025-08-22 14:05:01] [Rank 0] step:4461/10000 train_time:425504ms step_avg:95.38ms +[2025-08-22 14:05:01] [Rank 0] step:4461/10000 train_time:425504ms step_avg:95.38ms +[2025-08-22 14:05:03] [Rank 0] step:4481/10000 train_time:427471ms step_avg:95.40ms +[2025-08-22 14:05:03] [Rank 0] step:4481/10000 train_time:427471ms step_avg:95.40ms +[2025-08-22 14:05:05] [Rank 0] step:4501/10000 train_time:429432ms step_avg:95.41ms +[2025-08-22 14:05:05] [Rank 0] step:4501/10000 train_time:429432ms step_avg:95.41ms +[2025-08-22 14:05:07] [Rank 0] step:4521/10000 train_time:431396ms step_avg:95.42ms +[2025-08-22 14:05:07] [Rank 0] step:4521/10000 train_time:431396ms step_avg:95.42ms +[2025-08-22 14:05:09] [Rank 0] step:4541/10000 train_time:433361ms step_avg:95.43ms +[2025-08-22 14:05:09] [Rank 0] step:4541/10000 train_time:433361ms step_avg:95.43ms +[2025-08-22 14:05:11] [Rank 0] step:4561/10000 train_time:435325ms step_avg:95.44ms +[2025-08-22 14:05:11] [Rank 0] step:4561/10000 train_time:435325ms step_avg:95.44ms +[2025-08-22 14:05:13] [Rank 0] step:4581/10000 train_time:437293ms step_avg:95.46ms +[2025-08-22 14:05:13] [Rank 0] step:4581/10000 train_time:437293ms step_avg:95.46ms +[2025-08-22 14:05:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:05:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:05:29] [Rank 0] PRINT: step:4600/10000 val_loss:3.7093 svd_entropy: attn_qk:H=0.9111,top10E=0.07,eRank=426.0,q75/q25=11.38 attn_vo:H=0.9159,top10E=0.06,eRank=439.6,q75/q25=11.66 mlp_w1:H=0.9724,top10E=0.04,eRank=639.5,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8439,top10E=0.13,eRank=275.1,q75/q25=67.02 train_time:439363ms step_avg:95.51ms +[2025-08-22 14:05:29] [Rank 0] PRINT: step:4600/10000 val_loss:3.7093 svd_entropy: attn_qk:H=0.9111,top10E=0.07,eRank=426.0,q75/q25=11.38 attn_vo:H=0.9159,top10E=0.06,eRank=439.6,q75/q25=11.66 mlp_w1:H=0.9724,top10E=0.04,eRank=639.5,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8439,top10E=0.13,eRank=275.1,q75/q25=67.02 train_time:439363ms step_avg:95.51ms +[2025-08-22 14:05:29] [Rank 0] step:4601/10000 train_time:439416ms step_avg:95.50ms +[2025-08-22 14:05:29] [Rank 0] step:4601/10000 train_time:439416ms step_avg:95.50ms +[2025-08-22 14:05:31] [Rank 0] step:4621/10000 train_time:441378ms step_avg:95.52ms +[2025-08-22 14:05:31] [Rank 0] step:4621/10000 train_time:441378ms step_avg:95.52ms +[2025-08-22 14:05:33] [Rank 0] step:4641/10000 train_time:443337ms step_avg:95.53ms +[2025-08-22 14:05:33] [Rank 0] step:4641/10000 train_time:443337ms step_avg:95.53ms +[2025-08-22 14:05:35] [Rank 0] step:4661/10000 train_time:445296ms step_avg:95.54ms +[2025-08-22 14:05:35] [Rank 0] step:4661/10000 train_time:445296ms step_avg:95.54ms +[2025-08-22 14:05:37] [Rank 0] step:4681/10000 train_time:447256ms step_avg:95.55ms +[2025-08-22 14:05:37] [Rank 0] step:4681/10000 train_time:447256ms step_avg:95.55ms +[2025-08-22 14:05:39] [Rank 0] step:4701/10000 train_time:449216ms step_avg:95.56ms +[2025-08-22 14:05:39] [Rank 0] step:4701/10000 train_time:449216ms step_avg:95.56ms +[2025-08-22 14:05:41] [Rank 0] step:4721/10000 train_time:451174ms step_avg:95.57ms +[2025-08-22 14:05:41] [Rank 0] step:4721/10000 train_time:451174ms step_avg:95.57ms +[2025-08-22 14:05:43] [Rank 0] step:4741/10000 train_time:453133ms step_avg:95.58ms +[2025-08-22 14:05:43] [Rank 0] step:4741/10000 train_time:453133ms step_avg:95.58ms +[2025-08-22 14:05:45] [Rank 0] step:4761/10000 train_time:455094ms step_avg:95.59ms +[2025-08-22 14:05:45] [Rank 0] step:4761/10000 train_time:455094ms step_avg:95.59ms +[2025-08-22 14:05:47] [Rank 0] step:4781/10000 train_time:457054ms step_avg:95.60ms +[2025-08-22 14:05:47] [Rank 0] step:4781/10000 train_time:457054ms step_avg:95.60ms +[2025-08-22 14:05:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:05:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:06:02] [Rank 0] PRINT: step:4800/10000 val_loss:3.7002 svd_entropy: attn_qk:H=0.9108,top10E=0.07,eRank=425.5,q75/q25=11.39 attn_vo:H=0.9159,top10E=0.06,eRank=439.6,q75/q25=11.61 mlp_w1:H=0.9724,top10E=0.04,eRank=639.5,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8441,top10E=0.13,eRank=275.3,q75/q25=66.66 train_time:458971ms step_avg:95.62ms +[2025-08-22 14:06:02] [Rank 0] PRINT: step:4800/10000 val_loss:3.7002 svd_entropy: attn_qk:H=0.9108,top10E=0.07,eRank=425.5,q75/q25=11.39 attn_vo:H=0.9159,top10E=0.06,eRank=439.6,q75/q25=11.61 mlp_w1:H=0.9724,top10E=0.04,eRank=639.5,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8441,top10E=0.13,eRank=275.3,q75/q25=66.66 train_time:458971ms step_avg:95.62ms +[2025-08-22 14:06:02] [Rank 0] step:4801/10000 train_time:459024ms step_avg:95.61ms +[2025-08-22 14:06:02] [Rank 0] step:4801/10000 train_time:459024ms step_avg:95.61ms +[2025-08-22 14:06:04] [Rank 0] step:4821/10000 train_time:461005ms step_avg:95.62ms +[2025-08-22 14:06:04] [Rank 0] step:4821/10000 train_time:461005ms step_avg:95.62ms +[2025-08-22 14:06:06] [Rank 0] step:4841/10000 train_time:462960ms step_avg:95.63ms +[2025-08-22 14:06:06] [Rank 0] step:4841/10000 train_time:462960ms step_avg:95.63ms +[2025-08-22 14:06:08] [Rank 0] step:4861/10000 train_time:464918ms step_avg:95.64ms +[2025-08-22 14:06:08] [Rank 0] step:4861/10000 train_time:464918ms step_avg:95.64ms +[2025-08-22 14:06:10] [Rank 0] step:4881/10000 train_time:466874ms step_avg:95.65ms +[2025-08-22 14:06:10] [Rank 0] step:4881/10000 train_time:466874ms step_avg:95.65ms +[2025-08-22 14:06:12] [Rank 0] step:4901/10000 train_time:468829ms step_avg:95.66ms +[2025-08-22 14:06:12] [Rank 0] step:4901/10000 train_time:468829ms step_avg:95.66ms +[2025-08-22 14:06:14] [Rank 0] step:4921/10000 train_time:470789ms step_avg:95.67ms +[2025-08-22 14:06:14] [Rank 0] step:4921/10000 train_time:470789ms step_avg:95.67ms +[2025-08-22 14:06:16] [Rank 0] step:4941/10000 train_time:472750ms step_avg:95.68ms +[2025-08-22 14:06:16] [Rank 0] step:4941/10000 train_time:472750ms step_avg:95.68ms +[2025-08-22 14:06:18] [Rank 0] step:4961/10000 train_time:474813ms step_avg:95.71ms +[2025-08-22 14:06:18] [Rank 0] step:4961/10000 train_time:474813ms step_avg:95.71ms +[2025-08-22 14:06:20] [Rank 0] step:4981/10000 train_time:476895ms step_avg:95.74ms +[2025-08-22 14:06:20] [Rank 0] step:4981/10000 train_time:476895ms step_avg:95.74ms +[2025-08-22 14:06:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:06:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:06:36] [Rank 0] PRINT: step:5000/10000 val_loss:3.6906 svd_entropy: attn_qk:H=0.9107,top10E=0.08,eRank=425.0,q75/q25=11.39 attn_vo:H=0.9159,top10E=0.06,eRank=439.8,q75/q25=11.64 mlp_w1:H=0.9724,top10E=0.04,eRank=639.6,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8441,top10E=0.13,eRank=275.5,q75/q25=66.84 train_time:478811ms step_avg:95.76ms +[2025-08-22 14:06:36] [Rank 0] PRINT: step:5000/10000 val_loss:3.6906 svd_entropy: attn_qk:H=0.9107,top10E=0.08,eRank=425.0,q75/q25=11.39 attn_vo:H=0.9159,top10E=0.06,eRank=439.8,q75/q25=11.64 mlp_w1:H=0.9724,top10E=0.04,eRank=639.6,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8441,top10E=0.13,eRank=275.5,q75/q25=66.84 train_time:478811ms step_avg:95.76ms +[2025-08-22 14:06:36] [Rank 0] step:5001/10000 train_time:478865ms step_avg:95.75ms +[2025-08-22 14:06:36] [Rank 0] step:5001/10000 train_time:478865ms step_avg:95.75ms +[2025-08-22 14:06:38] [Rank 0] step:5021/10000 train_time:480836ms step_avg:95.76ms +[2025-08-22 14:06:38] [Rank 0] step:5021/10000 train_time:480836ms step_avg:95.76ms +[2025-08-22 14:06:40] [Rank 0] step:5041/10000 train_time:482794ms step_avg:95.77ms +[2025-08-22 14:06:40] [Rank 0] step:5041/10000 train_time:482794ms step_avg:95.77ms +[2025-08-22 14:06:42] [Rank 0] step:5061/10000 train_time:484749ms step_avg:95.78ms +[2025-08-22 14:06:42] [Rank 0] step:5061/10000 train_time:484749ms step_avg:95.78ms +[2025-08-22 14:06:44] [Rank 0] step:5081/10000 train_time:486710ms step_avg:95.79ms +[2025-08-22 14:06:44] [Rank 0] step:5081/10000 train_time:486710ms step_avg:95.79ms +[2025-08-22 14:06:46] [Rank 0] step:5101/10000 train_time:488667ms step_avg:95.80ms +[2025-08-22 14:06:46] [Rank 0] step:5101/10000 train_time:488667ms step_avg:95.80ms +[2025-08-22 14:06:48] [Rank 0] step:5121/10000 train_time:490627ms step_avg:95.81ms +[2025-08-22 14:06:48] [Rank 0] step:5121/10000 train_time:490627ms step_avg:95.81ms +[2025-08-22 14:06:50] [Rank 0] step:5141/10000 train_time:492594ms step_avg:95.82ms +[2025-08-22 14:06:50] [Rank 0] step:5141/10000 train_time:492594ms step_avg:95.82ms +[2025-08-22 14:06:52] [Rank 0] step:5161/10000 train_time:494555ms step_avg:95.83ms +[2025-08-22 14:06:52] [Rank 0] step:5161/10000 train_time:494555ms step_avg:95.83ms +[2025-08-22 14:06:54] [Rank 0] step:5181/10000 train_time:496521ms step_avg:95.83ms +[2025-08-22 14:06:54] [Rank 0] step:5181/10000 train_time:496521ms step_avg:95.83ms +[2025-08-22 14:06:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:06:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:07:09] [Rank 0] PRINT: step:5200/10000 val_loss:3.6786 svd_entropy: attn_qk:H=0.9105,top10E=0.08,eRank=424.5,q75/q25=11.45 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.62 mlp_w1:H=0.9725,top10E=0.04,eRank=639.6,q75/q25=2.75 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8443,top10E=0.13,eRank=275.9,q75/q25=66.16 train_time:498466ms step_avg:95.86ms +[2025-08-22 14:07:09] [Rank 0] PRINT: step:5200/10000 val_loss:3.6786 svd_entropy: attn_qk:H=0.9105,top10E=0.08,eRank=424.5,q75/q25=11.45 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.62 mlp_w1:H=0.9725,top10E=0.04,eRank=639.6,q75/q25=2.75 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.89 vo_prod:H=0.8443,top10E=0.13,eRank=275.9,q75/q25=66.16 train_time:498466ms step_avg:95.86ms +[2025-08-22 14:07:09] [Rank 0] step:5201/10000 train_time:498519ms step_avg:95.85ms +[2025-08-22 14:07:09] [Rank 0] step:5201/10000 train_time:498519ms step_avg:95.85ms +[2025-08-22 14:07:11] [Rank 0] step:5221/10000 train_time:500511ms step_avg:95.86ms +[2025-08-22 14:07:11] [Rank 0] step:5221/10000 train_time:500511ms step_avg:95.86ms +[2025-08-22 14:07:13] [Rank 0] step:5241/10000 train_time:502501ms step_avg:95.88ms +[2025-08-22 14:07:13] [Rank 0] step:5241/10000 train_time:502501ms step_avg:95.88ms +[2025-08-22 14:07:15] [Rank 0] step:5261/10000 train_time:504493ms step_avg:95.89ms +[2025-08-22 14:07:15] [Rank 0] step:5261/10000 train_time:504493ms step_avg:95.89ms +[2025-08-22 14:07:17] [Rank 0] step:5281/10000 train_time:506487ms step_avg:95.91ms +[2025-08-22 14:07:17] [Rank 0] step:5281/10000 train_time:506487ms step_avg:95.91ms +[2025-08-22 14:07:19] [Rank 0] step:5301/10000 train_time:508494ms step_avg:95.92ms +[2025-08-22 14:07:19] [Rank 0] step:5301/10000 train_time:508494ms step_avg:95.92ms +[2025-08-22 14:07:21] [Rank 0] step:5321/10000 train_time:510571ms step_avg:95.95ms +[2025-08-22 14:07:21] [Rank 0] step:5321/10000 train_time:510571ms step_avg:95.95ms +[2025-08-22 14:07:24] [Rank 0] step:5341/10000 train_time:512654ms step_avg:95.98ms +[2025-08-22 14:07:24] [Rank 0] step:5341/10000 train_time:512654ms step_avg:95.98ms +[2025-08-22 14:07:26] [Rank 0] step:5361/10000 train_time:514651ms step_avg:96.00ms +[2025-08-22 14:07:26] [Rank 0] step:5361/10000 train_time:514651ms step_avg:96.00ms +[2025-08-22 14:07:28] [Rank 0] step:5381/10000 train_time:516651ms step_avg:96.01ms +[2025-08-22 14:07:28] [Rank 0] step:5381/10000 train_time:516651ms step_avg:96.01ms +[2025-08-22 14:07:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:07:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:07:43] [Rank 0] PRINT: step:5400/10000 val_loss:3.6684 svd_entropy: attn_qk:H=0.9103,top10E=0.08,eRank=423.9,q75/q25=11.46 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.57 mlp_w1:H=0.9725,top10E=0.04,eRank=639.7,q75/q25=2.75 mlp_w2:H=0.9665,top10E=0.05,eRank=614.8,q75/q25=2.88 vo_prod:H=0.8444,top10E=0.13,eRank=276.1,q75/q25=66.15 train_time:518602ms step_avg:96.04ms +[2025-08-22 14:07:43] [Rank 0] PRINT: step:5400/10000 val_loss:3.6684 svd_entropy: attn_qk:H=0.9103,top10E=0.08,eRank=423.9,q75/q25=11.46 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.57 mlp_w1:H=0.9725,top10E=0.04,eRank=639.7,q75/q25=2.75 mlp_w2:H=0.9665,top10E=0.05,eRank=614.8,q75/q25=2.88 vo_prod:H=0.8444,top10E=0.13,eRank=276.1,q75/q25=66.15 train_time:518602ms step_avg:96.04ms +[2025-08-22 14:07:43] [Rank 0] step:5401/10000 train_time:518655ms step_avg:96.03ms +[2025-08-22 14:07:43] [Rank 0] step:5401/10000 train_time:518655ms step_avg:96.03ms +[2025-08-22 14:07:45] [Rank 0] step:5421/10000 train_time:520660ms step_avg:96.04ms +[2025-08-22 14:07:45] [Rank 0] step:5421/10000 train_time:520660ms step_avg:96.04ms +[2025-08-22 14:07:47] [Rank 0] step:5441/10000 train_time:522651ms step_avg:96.06ms +[2025-08-22 14:07:47] [Rank 0] step:5441/10000 train_time:522651ms step_avg:96.06ms +[2025-08-22 14:07:49] [Rank 0] step:5461/10000 train_time:524646ms step_avg:96.07ms +[2025-08-22 14:07:49] [Rank 0] step:5461/10000 train_time:524646ms step_avg:96.07ms +[2025-08-22 14:07:51] [Rank 0] step:5481/10000 train_time:526635ms step_avg:96.08ms +[2025-08-22 14:07:51] [Rank 0] step:5481/10000 train_time:526635ms step_avg:96.08ms +[2025-08-22 14:07:53] [Rank 0] step:5501/10000 train_time:528635ms step_avg:96.10ms +[2025-08-22 14:07:53] [Rank 0] step:5501/10000 train_time:528635ms step_avg:96.10ms +[2025-08-22 14:07:55] [Rank 0] step:5521/10000 train_time:530632ms step_avg:96.11ms +[2025-08-22 14:07:55] [Rank 0] step:5521/10000 train_time:530632ms step_avg:96.11ms +[2025-08-22 14:07:57] [Rank 0] step:5541/10000 train_time:532625ms step_avg:96.12ms +[2025-08-22 14:07:57] [Rank 0] step:5541/10000 train_time:532625ms step_avg:96.12ms +[2025-08-22 14:07:59] [Rank 0] step:5561/10000 train_time:534619ms step_avg:96.14ms +[2025-08-22 14:07:59] [Rank 0] step:5561/10000 train_time:534619ms step_avg:96.14ms +[2025-08-22 14:08:01] [Rank 0] step:5581/10000 train_time:536612ms step_avg:96.15ms +[2025-08-22 14:08:01] [Rank 0] step:5581/10000 train_time:536612ms step_avg:96.15ms +[2025-08-22 14:08:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:08:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:08:17] [Rank 0] PRINT: step:5600/10000 val_loss:3.6596 svd_entropy: attn_qk:H=0.9101,top10E=0.08,eRank=423.4,q75/q25=11.48 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.57 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.75 mlp_w2:H=0.9665,top10E=0.05,eRank=614.8,q75/q25=2.88 vo_prod:H=0.8445,top10E=0.13,eRank=276.2,q75/q25=66.01 train_time:538565ms step_avg:96.17ms +[2025-08-22 14:08:17] [Rank 0] PRINT: step:5600/10000 val_loss:3.6596 svd_entropy: attn_qk:H=0.9101,top10E=0.08,eRank=423.4,q75/q25=11.48 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.57 mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.75 mlp_w2:H=0.9665,top10E=0.05,eRank=614.8,q75/q25=2.88 vo_prod:H=0.8445,top10E=0.13,eRank=276.2,q75/q25=66.01 train_time:538565ms step_avg:96.17ms +[2025-08-22 14:08:17] [Rank 0] step:5601/10000 train_time:538618ms step_avg:96.16ms +[2025-08-22 14:08:17] [Rank 0] step:5601/10000 train_time:538618ms step_avg:96.16ms +[2025-08-22 14:08:19] [Rank 0] step:5621/10000 train_time:540618ms step_avg:96.18ms +[2025-08-22 14:08:19] [Rank 0] step:5621/10000 train_time:540618ms step_avg:96.18ms +[2025-08-22 14:08:21] [Rank 0] step:5641/10000 train_time:542607ms step_avg:96.19ms +[2025-08-22 14:08:21] [Rank 0] step:5641/10000 train_time:542607ms step_avg:96.19ms +[2025-08-22 14:08:23] [Rank 0] step:5661/10000 train_time:544594ms step_avg:96.20ms +[2025-08-22 14:08:23] [Rank 0] step:5661/10000 train_time:544594ms step_avg:96.20ms +[2025-08-22 14:08:25] [Rank 0] step:5681/10000 train_time:546662ms step_avg:96.23ms +[2025-08-22 14:08:25] [Rank 0] step:5681/10000 train_time:546662ms step_avg:96.23ms +[2025-08-22 14:08:27] [Rank 0] step:5701/10000 train_time:548787ms step_avg:96.26ms +[2025-08-22 14:08:27] [Rank 0] step:5701/10000 train_time:548787ms step_avg:96.26ms +[2025-08-22 14:08:29] [Rank 0] step:5721/10000 train_time:550782ms step_avg:96.27ms +[2025-08-22 14:08:29] [Rank 0] step:5721/10000 train_time:550782ms step_avg:96.27ms +[2025-08-22 14:08:31] [Rank 0] step:5741/10000 train_time:552771ms step_avg:96.28ms +[2025-08-22 14:08:31] [Rank 0] step:5741/10000 train_time:552771ms step_avg:96.28ms +[2025-08-22 14:08:33] [Rank 0] step:5761/10000 train_time:554763ms step_avg:96.30ms +[2025-08-22 14:08:33] [Rank 0] step:5761/10000 train_time:554763ms step_avg:96.30ms +[2025-08-22 14:08:35] [Rank 0] step:5781/10000 train_time:556758ms step_avg:96.31ms +[2025-08-22 14:08:35] [Rank 0] step:5781/10000 train_time:556758ms step_avg:96.31ms +[2025-08-22 14:08:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:08:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:08:51] [Rank 0] PRINT: step:5800/10000 val_loss:3.6571 svd_entropy: attn_qk:H=0.9099,top10E=0.08,eRank=422.9,q75/q25=11.49 attn_vo:H=0.9160,top10E=0.06,eRank=440.0,q75/q25=11.61 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8446,top10E=0.13,eRank=276.4,q75/q25=65.70 train_time:558709ms step_avg:96.33ms +[2025-08-22 14:08:51] [Rank 0] PRINT: step:5800/10000 val_loss:3.6571 svd_entropy: attn_qk:H=0.9099,top10E=0.08,eRank=422.9,q75/q25=11.49 attn_vo:H=0.9160,top10E=0.06,eRank=440.0,q75/q25=11.61 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8446,top10E=0.13,eRank=276.4,q75/q25=65.70 train_time:558709ms step_avg:96.33ms +[2025-08-22 14:08:51] [Rank 0] step:5801/10000 train_time:558762ms step_avg:96.32ms +[2025-08-22 14:08:51] [Rank 0] step:5801/10000 train_time:558762ms step_avg:96.32ms +[2025-08-22 14:08:53] [Rank 0] step:5821/10000 train_time:560769ms step_avg:96.34ms +[2025-08-22 14:08:53] [Rank 0] step:5821/10000 train_time:560769ms step_avg:96.34ms +[2025-08-22 14:08:55] [Rank 0] step:5841/10000 train_time:562758ms step_avg:96.35ms +[2025-08-22 14:08:55] [Rank 0] step:5841/10000 train_time:562758ms step_avg:96.35ms +[2025-08-22 14:08:57] [Rank 0] step:5861/10000 train_time:564752ms step_avg:96.36ms +[2025-08-22 14:08:57] [Rank 0] step:5861/10000 train_time:564752ms step_avg:96.36ms +[2025-08-22 14:08:59] [Rank 0] step:5881/10000 train_time:566743ms step_avg:96.37ms +[2025-08-22 14:08:59] [Rank 0] step:5881/10000 train_time:566743ms step_avg:96.37ms +[2025-08-22 14:09:01] [Rank 0] step:5901/10000 train_time:568736ms step_avg:96.38ms +[2025-08-22 14:09:01] [Rank 0] step:5901/10000 train_time:568736ms step_avg:96.38ms +[2025-08-22 14:09:03] [Rank 0] step:5921/10000 train_time:570730ms step_avg:96.39ms +[2025-08-22 14:09:03] [Rank 0] step:5921/10000 train_time:570730ms step_avg:96.39ms +[2025-08-22 14:09:05] [Rank 0] step:5941/10000 train_time:572730ms step_avg:96.40ms +[2025-08-22 14:09:05] [Rank 0] step:5941/10000 train_time:572730ms step_avg:96.40ms +[2025-08-22 14:09:07] [Rank 0] step:5961/10000 train_time:574724ms step_avg:96.41ms +[2025-08-22 14:09:07] [Rank 0] step:5961/10000 train_time:574724ms step_avg:96.41ms +[2025-08-22 14:09:09] [Rank 0] step:5981/10000 train_time:576719ms step_avg:96.43ms +[2025-08-22 14:09:09] [Rank 0] step:5981/10000 train_time:576719ms step_avg:96.43ms +[2025-08-22 14:09:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:09:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:09:25] [Rank 0] PRINT: step:6000/10000 val_loss:3.6379 svd_entropy: attn_qk:H=0.9097,top10E=0.08,eRank=422.5,q75/q25=11.55 attn_vo:H=0.9160,top10E=0.06,eRank=440.0,q75/q25=11.62 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8447,top10E=0.13,eRank=276.6,q75/q25=65.84 train_time:578672ms step_avg:96.45ms +[2025-08-22 14:09:25] [Rank 0] PRINT: step:6000/10000 val_loss:3.6379 svd_entropy: attn_qk:H=0.9097,top10E=0.08,eRank=422.5,q75/q25=11.55 attn_vo:H=0.9160,top10E=0.06,eRank=440.0,q75/q25=11.62 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8447,top10E=0.13,eRank=276.6,q75/q25=65.84 train_time:578672ms step_avg:96.45ms +[2025-08-22 14:09:25] [Rank 0] step:6001/10000 train_time:578726ms step_avg:96.44ms +[2025-08-22 14:09:25] [Rank 0] step:6001/10000 train_time:578726ms step_avg:96.44ms +[2025-08-22 14:09:27] [Rank 0] step:6021/10000 train_time:580850ms step_avg:96.47ms +[2025-08-22 14:09:27] [Rank 0] step:6021/10000 train_time:580850ms step_avg:96.47ms +[2025-08-22 14:09:29] [Rank 0] step:6041/10000 train_time:582916ms step_avg:96.49ms +[2025-08-22 14:09:29] [Rank 0] step:6041/10000 train_time:582916ms step_avg:96.49ms +[2025-08-22 14:09:31] [Rank 0] step:6061/10000 train_time:584924ms step_avg:96.51ms +[2025-08-22 14:09:31] [Rank 0] step:6061/10000 train_time:584924ms step_avg:96.51ms +[2025-08-22 14:09:33] [Rank 0] step:6081/10000 train_time:586921ms step_avg:96.52ms +[2025-08-22 14:09:33] [Rank 0] step:6081/10000 train_time:586921ms step_avg:96.52ms +[2025-08-22 14:09:35] [Rank 0] step:6101/10000 train_time:588925ms step_avg:96.53ms +[2025-08-22 14:09:35] [Rank 0] step:6101/10000 train_time:588925ms step_avg:96.53ms +[2025-08-22 14:09:37] [Rank 0] step:6121/10000 train_time:591208ms step_avg:96.59ms +[2025-08-22 14:09:37] [Rank 0] step:6121/10000 train_time:591208ms step_avg:96.59ms +[2025-08-22 14:09:39] [Rank 0] step:6141/10000 train_time:593218ms step_avg:96.60ms +[2025-08-22 14:09:39] [Rank 0] step:6141/10000 train_time:593218ms step_avg:96.60ms +[2025-08-22 14:09:41] [Rank 0] step:6161/10000 train_time:595223ms step_avg:96.61ms +[2025-08-22 14:09:41] [Rank 0] step:6161/10000 train_time:595223ms step_avg:96.61ms +[2025-08-22 14:09:43] [Rank 0] step:6181/10000 train_time:597224ms step_avg:96.62ms +[2025-08-22 14:09:43] [Rank 0] step:6181/10000 train_time:597224ms step_avg:96.62ms +[2025-08-22 14:09:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:09:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:09:59] [Rank 0] PRINT: step:6200/10000 val_loss:3.6237 svd_entropy: attn_qk:H=0.9095,top10E=0.08,eRank=422.0,q75/q25=11.57 attn_vo:H=0.9160,top10E=0.06,eRank=440.1,q75/q25=11.62 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8447,top10E=0.13,eRank=276.8,q75/q25=65.82 train_time:599180ms step_avg:96.64ms +[2025-08-22 14:09:59] [Rank 0] PRINT: step:6200/10000 val_loss:3.6237 svd_entropy: attn_qk:H=0.9095,top10E=0.08,eRank=422.0,q75/q25=11.57 attn_vo:H=0.9160,top10E=0.06,eRank=440.1,q75/q25=11.62 mlp_w1:H=0.9725,top10E=0.04,eRank=639.9,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8447,top10E=0.13,eRank=276.8,q75/q25=65.82 train_time:599180ms step_avg:96.64ms +[2025-08-22 14:09:59] [Rank 0] step:6201/10000 train_time:599234ms step_avg:96.64ms +[2025-08-22 14:09:59] [Rank 0] step:6201/10000 train_time:599234ms step_avg:96.64ms +[2025-08-22 14:10:01] [Rank 0] step:6221/10000 train_time:601247ms step_avg:96.65ms +[2025-08-22 14:10:01] [Rank 0] step:6221/10000 train_time:601247ms step_avg:96.65ms +[2025-08-22 14:10:03] [Rank 0] step:6241/10000 train_time:603243ms step_avg:96.66ms +[2025-08-22 14:10:03] [Rank 0] step:6241/10000 train_time:603243ms step_avg:96.66ms +[2025-08-22 14:10:05] [Rank 0] step:6261/10000 train_time:605239ms step_avg:96.67ms +[2025-08-22 14:10:05] [Rank 0] step:6261/10000 train_time:605239ms step_avg:96.67ms +[2025-08-22 14:10:07] [Rank 0] step:6281/10000 train_time:607239ms step_avg:96.68ms +[2025-08-22 14:10:07] [Rank 0] step:6281/10000 train_time:607239ms step_avg:96.68ms +[2025-08-22 14:10:09] [Rank 0] step:6301/10000 train_time:609234ms step_avg:96.69ms +[2025-08-22 14:10:09] [Rank 0] step:6301/10000 train_time:609234ms step_avg:96.69ms +[2025-08-22 14:10:11] [Rank 0] step:6321/10000 train_time:611234ms step_avg:96.70ms +[2025-08-22 14:10:11] [Rank 0] step:6321/10000 train_time:611234ms step_avg:96.70ms +[2025-08-22 14:10:13] [Rank 0] step:6341/10000 train_time:613232ms step_avg:96.71ms +[2025-08-22 14:10:13] [Rank 0] step:6341/10000 train_time:613232ms step_avg:96.71ms +[2025-08-22 14:10:15] [Rank 0] step:6361/10000 train_time:615237ms step_avg:96.72ms +[2025-08-22 14:10:15] [Rank 0] step:6361/10000 train_time:615237ms step_avg:96.72ms +[2025-08-22 14:10:17] [Rank 0] step:6381/10000 train_time:617240ms step_avg:96.73ms +[2025-08-22 14:10:17] [Rank 0] step:6381/10000 train_time:617240ms step_avg:96.73ms +[2025-08-22 14:10:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:10:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:10:32] [Rank 0] PRINT: step:6400/10000 val_loss:3.6134 svd_entropy: attn_qk:H=0.9094,top10E=0.08,eRank=421.6,q75/q25=11.61 attn_vo:H=0.9161,top10E=0.06,eRank=440.2,q75/q25=11.64 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8448,top10E=0.13,eRank=276.9,q75/q25=65.70 train_time:619192ms step_avg:96.75ms +[2025-08-22 14:10:32] [Rank 0] PRINT: step:6400/10000 val_loss:3.6134 svd_entropy: attn_qk:H=0.9094,top10E=0.08,eRank=421.6,q75/q25=11.61 attn_vo:H=0.9161,top10E=0.06,eRank=440.2,q75/q25=11.64 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.75 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8448,top10E=0.13,eRank=276.9,q75/q25=65.70 train_time:619192ms step_avg:96.75ms +[2025-08-22 14:10:33] [Rank 0] step:6401/10000 train_time:619246ms step_avg:96.74ms +[2025-08-22 14:10:33] [Rank 0] step:6401/10000 train_time:619246ms step_avg:96.74ms +[2025-08-22 14:10:35] [Rank 0] step:6421/10000 train_time:621257ms step_avg:96.75ms +[2025-08-22 14:10:35] [Rank 0] step:6421/10000 train_time:621257ms step_avg:96.75ms +[2025-08-22 14:10:37] [Rank 0] step:6441/10000 train_time:623251ms step_avg:96.76ms +[2025-08-22 14:10:37] [Rank 0] step:6441/10000 train_time:623251ms step_avg:96.76ms +[2025-08-22 14:10:39] [Rank 0] step:6461/10000 train_time:625249ms step_avg:96.77ms +[2025-08-22 14:10:39] [Rank 0] step:6461/10000 train_time:625249ms step_avg:96.77ms +[2025-08-22 14:10:41] [Rank 0] step:6481/10000 train_time:627251ms step_avg:96.78ms +[2025-08-22 14:10:41] [Rank 0] step:6481/10000 train_time:627251ms step_avg:96.78ms +[2025-08-22 14:10:43] [Rank 0] step:6501/10000 train_time:629247ms step_avg:96.79ms +[2025-08-22 14:10:43] [Rank 0] step:6501/10000 train_time:629247ms step_avg:96.79ms +[2025-08-22 14:10:45] [Rank 0] step:6521/10000 train_time:631239ms step_avg:96.80ms +[2025-08-22 14:10:45] [Rank 0] step:6521/10000 train_time:631239ms step_avg:96.80ms +[2025-08-22 14:10:47] [Rank 0] step:6541/10000 train_time:633238ms step_avg:96.81ms +[2025-08-22 14:10:47] [Rank 0] step:6541/10000 train_time:633238ms step_avg:96.81ms +[2025-08-22 14:10:49] [Rank 0] step:6561/10000 train_time:635239ms step_avg:96.82ms +[2025-08-22 14:10:49] [Rank 0] step:6561/10000 train_time:635239ms step_avg:96.82ms +[2025-08-22 14:10:51] [Rank 0] step:6581/10000 train_time:637232ms step_avg:96.83ms +[2025-08-22 14:10:51] [Rank 0] step:6581/10000 train_time:637232ms step_avg:96.83ms +[2025-08-22 14:10:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:10:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:11:06] [Rank 0] PRINT: step:6600/10000 val_loss:3.5985 svd_entropy: attn_qk:H=0.9092,top10E=0.08,eRank=421.2,q75/q25=11.62 attn_vo:H=0.9161,top10E=0.06,eRank=440.3,q75/q25=11.68 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8449,top10E=0.13,eRank=277.2,q75/q25=65.39 train_time:639187ms step_avg:96.85ms +[2025-08-22 14:11:06] [Rank 0] PRINT: step:6600/10000 val_loss:3.5985 svd_entropy: attn_qk:H=0.9092,top10E=0.08,eRank=421.2,q75/q25=11.62 attn_vo:H=0.9161,top10E=0.06,eRank=440.3,q75/q25=11.68 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8449,top10E=0.13,eRank=277.2,q75/q25=65.39 train_time:639187ms step_avg:96.85ms +[2025-08-22 14:11:06] [Rank 0] step:6601/10000 train_time:639240ms step_avg:96.84ms +[2025-08-22 14:11:06] [Rank 0] step:6601/10000 train_time:639240ms step_avg:96.84ms +[2025-08-22 14:11:08] [Rank 0] step:6621/10000 train_time:641256ms step_avg:96.85ms +[2025-08-22 14:11:08] [Rank 0] step:6621/10000 train_time:641256ms step_avg:96.85ms +[2025-08-22 14:11:10] [Rank 0] step:6641/10000 train_time:643258ms step_avg:96.86ms +[2025-08-22 14:11:10] [Rank 0] step:6641/10000 train_time:643258ms step_avg:96.86ms +[2025-08-22 14:11:12] [Rank 0] step:6661/10000 train_time:645252ms step_avg:96.87ms +[2025-08-22 14:11:12] [Rank 0] step:6661/10000 train_time:645252ms step_avg:96.87ms +[2025-08-22 14:11:14] [Rank 0] step:6681/10000 train_time:647272ms step_avg:96.88ms +[2025-08-22 14:11:14] [Rank 0] step:6681/10000 train_time:647272ms step_avg:96.88ms +[2025-08-22 14:11:16] [Rank 0] step:6701/10000 train_time:649308ms step_avg:96.90ms +[2025-08-22 14:11:16] [Rank 0] step:6701/10000 train_time:649308ms step_avg:96.90ms +[2025-08-22 14:11:18] [Rank 0] step:6721/10000 train_time:651333ms step_avg:96.91ms +[2025-08-22 14:11:18] [Rank 0] step:6721/10000 train_time:651333ms step_avg:96.91ms +[2025-08-22 14:11:20] [Rank 0] step:6741/10000 train_time:653356ms step_avg:96.92ms +[2025-08-22 14:11:20] [Rank 0] step:6741/10000 train_time:653356ms step_avg:96.92ms +[2025-08-22 14:11:22] [Rank 0] step:6761/10000 train_time:655383ms step_avg:96.94ms +[2025-08-22 14:11:22] [Rank 0] step:6761/10000 train_time:655383ms step_avg:96.94ms +[2025-08-22 14:11:24] [Rank 0] step:6781/10000 train_time:657411ms step_avg:96.95ms +[2025-08-22 14:11:24] [Rank 0] step:6781/10000 train_time:657411ms step_avg:96.95ms +[2025-08-22 14:11:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:11:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:11:40] [Rank 0] PRINT: step:6800/10000 val_loss:3.5827 svd_entropy: attn_qk:H=0.9091,top10E=0.08,eRank=420.7,q75/q25=11.61 attn_vo:H=0.9161,top10E=0.06,eRank=440.4,q75/q25=11.66 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8451,top10E=0.13,eRank=277.4,q75/q25=65.23 train_time:659403ms step_avg:96.97ms +[2025-08-22 14:11:40] [Rank 0] PRINT: step:6800/10000 val_loss:3.5827 svd_entropy: attn_qk:H=0.9091,top10E=0.08,eRank=420.7,q75/q25=11.61 attn_vo:H=0.9161,top10E=0.06,eRank=440.4,q75/q25=11.66 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8451,top10E=0.13,eRank=277.4,q75/q25=65.23 train_time:659403ms step_avg:96.97ms +[2025-08-22 14:11:40] [Rank 0] step:6801/10000 train_time:659457ms step_avg:96.96ms +[2025-08-22 14:11:40] [Rank 0] step:6801/10000 train_time:659457ms step_avg:96.96ms +[2025-08-22 14:11:42] [Rank 0] step:6821/10000 train_time:661490ms step_avg:96.98ms +[2025-08-22 14:11:42] [Rank 0] step:6821/10000 train_time:661490ms step_avg:96.98ms +[2025-08-22 14:11:44] [Rank 0] step:6841/10000 train_time:663517ms step_avg:96.99ms +[2025-08-22 14:11:44] [Rank 0] step:6841/10000 train_time:663517ms step_avg:96.99ms +[2025-08-22 14:11:46] [Rank 0] step:6861/10000 train_time:665541ms step_avg:97.00ms +[2025-08-22 14:11:46] [Rank 0] step:6861/10000 train_time:665541ms step_avg:97.00ms +[2025-08-22 14:11:48] [Rank 0] step:6881/10000 train_time:667576ms step_avg:97.02ms +[2025-08-22 14:11:48] [Rank 0] step:6881/10000 train_time:667576ms step_avg:97.02ms +[2025-08-22 14:11:50] [Rank 0] step:6901/10000 train_time:669600ms step_avg:97.03ms +[2025-08-22 14:11:50] [Rank 0] step:6901/10000 train_time:669600ms step_avg:97.03ms +[2025-08-22 14:11:52] [Rank 0] step:6921/10000 train_time:671626ms step_avg:97.04ms +[2025-08-22 14:11:52] [Rank 0] step:6921/10000 train_time:671626ms step_avg:97.04ms +[2025-08-22 14:11:54] [Rank 0] step:6941/10000 train_time:673661ms step_avg:97.06ms +[2025-08-22 14:11:54] [Rank 0] step:6941/10000 train_time:673661ms step_avg:97.06ms +[2025-08-22 14:11:56] [Rank 0] step:6961/10000 train_time:675703ms step_avg:97.07ms +[2025-08-22 14:11:56] [Rank 0] step:6961/10000 train_time:675703ms step_avg:97.07ms +[2025-08-22 14:11:59] [Rank 0] step:6981/10000 train_time:677737ms step_avg:97.08ms +[2025-08-22 14:11:59] [Rank 0] step:6981/10000 train_time:677737ms step_avg:97.08ms +[2025-08-22 14:12:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:12:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:12:14] [Rank 0] PRINT: step:7000/10000 val_loss:3.5686 svd_entropy: attn_qk:H=0.9089,top10E=0.08,eRank=420.2,q75/q25=11.62 attn_vo:H=0.9162,top10E=0.06,eRank=440.5,q75/q25=11.67 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8451,top10E=0.13,eRank=277.6,q75/q25=64.92 train_time:679726ms step_avg:97.10ms +[2025-08-22 14:12:14] [Rank 0] PRINT: step:7000/10000 val_loss:3.5686 svd_entropy: attn_qk:H=0.9089,top10E=0.08,eRank=420.2,q75/q25=11.62 attn_vo:H=0.9162,top10E=0.06,eRank=440.5,q75/q25=11.67 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9664,top10E=0.05,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8451,top10E=0.13,eRank=277.6,q75/q25=64.92 train_time:679726ms step_avg:97.10ms +[2025-08-22 14:12:14] [Rank 0] step:7001/10000 train_time:679781ms step_avg:97.10ms +[2025-08-22 14:12:14] [Rank 0] step:7001/10000 train_time:679781ms step_avg:97.10ms +[2025-08-22 14:12:16] [Rank 0] step:7021/10000 train_time:681810ms step_avg:97.11ms +[2025-08-22 14:12:16] [Rank 0] step:7021/10000 train_time:681810ms step_avg:97.11ms +[2025-08-22 14:12:18] [Rank 0] step:7041/10000 train_time:683832ms step_avg:97.12ms +[2025-08-22 14:12:18] [Rank 0] step:7041/10000 train_time:683832ms step_avg:97.12ms +[2025-08-22 14:12:20] [Rank 0] step:7061/10000 train_time:685857ms step_avg:97.13ms +[2025-08-22 14:12:20] [Rank 0] step:7061/10000 train_time:685857ms step_avg:97.13ms +[2025-08-22 14:12:22] [Rank 0] step:7081/10000 train_time:687882ms step_avg:97.14ms +[2025-08-22 14:12:22] [Rank 0] step:7081/10000 train_time:687882ms step_avg:97.14ms +[2025-08-22 14:12:24] [Rank 0] step:7101/10000 train_time:689914ms step_avg:97.16ms +[2025-08-22 14:12:24] [Rank 0] step:7101/10000 train_time:689914ms step_avg:97.16ms +[2025-08-22 14:12:26] [Rank 0] step:7121/10000 train_time:691936ms step_avg:97.17ms +[2025-08-22 14:12:26] [Rank 0] step:7121/10000 train_time:691936ms step_avg:97.17ms +[2025-08-22 14:12:28] [Rank 0] step:7141/10000 train_time:693964ms step_avg:97.18ms +[2025-08-22 14:12:28] [Rank 0] step:7141/10000 train_time:693964ms step_avg:97.18ms +[2025-08-22 14:12:30] [Rank 0] step:7161/10000 train_time:695994ms step_avg:97.19ms +[2025-08-22 14:12:30] [Rank 0] step:7161/10000 train_time:695994ms step_avg:97.19ms +[2025-08-22 14:12:32] [Rank 0] step:7181/10000 train_time:698025ms step_avg:97.20ms +[2025-08-22 14:12:32] [Rank 0] step:7181/10000 train_time:698025ms step_avg:97.20ms +[2025-08-22 14:12:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:12:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:12:48] [Rank 0] PRINT: step:7200/10000 val_loss:3.5571 svd_entropy: attn_qk:H=0.9087,top10E=0.08,eRank=419.8,q75/q25=11.67 attn_vo:H=0.9162,top10E=0.06,eRank=440.6,q75/q25=11.64 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.86 vo_prod:H=0.8453,top10E=0.13,eRank=277.8,q75/q25=64.97 train_time:700015ms step_avg:97.22ms +[2025-08-22 14:12:48] [Rank 0] PRINT: step:7200/10000 val_loss:3.5571 svd_entropy: attn_qk:H=0.9087,top10E=0.08,eRank=419.8,q75/q25=11.67 attn_vo:H=0.9162,top10E=0.06,eRank=440.6,q75/q25=11.64 mlp_w1:H=0.9725,top10E=0.04,eRank=640.0,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.86 vo_prod:H=0.8453,top10E=0.13,eRank=277.8,q75/q25=64.97 train_time:700015ms step_avg:97.22ms +[2025-08-22 14:12:48] [Rank 0] step:7201/10000 train_time:700069ms step_avg:97.22ms +[2025-08-22 14:12:48] [Rank 0] step:7201/10000 train_time:700069ms step_avg:97.22ms +[2025-08-22 14:12:50] [Rank 0] step:7221/10000 train_time:702112ms step_avg:97.23ms +[2025-08-22 14:12:50] [Rank 0] step:7221/10000 train_time:702112ms step_avg:97.23ms +[2025-08-22 14:12:52] [Rank 0] step:7241/10000 train_time:704138ms step_avg:97.24ms +[2025-08-22 14:12:52] [Rank 0] step:7241/10000 train_time:704138ms step_avg:97.24ms +[2025-08-22 14:12:54] [Rank 0] step:7261/10000 train_time:706161ms step_avg:97.25ms +[2025-08-22 14:12:54] [Rank 0] step:7261/10000 train_time:706161ms step_avg:97.25ms +[2025-08-22 14:12:56] [Rank 0] step:7281/10000 train_time:708190ms step_avg:97.27ms +[2025-08-22 14:12:56] [Rank 0] step:7281/10000 train_time:708190ms step_avg:97.27ms +[2025-08-22 14:12:58] [Rank 0] step:7301/10000 train_time:710213ms step_avg:97.28ms +[2025-08-22 14:12:58] [Rank 0] step:7301/10000 train_time:710213ms step_avg:97.28ms +[2025-08-22 14:13:00] [Rank 0] step:7321/10000 train_time:712249ms step_avg:97.29ms +[2025-08-22 14:13:00] [Rank 0] step:7321/10000 train_time:712249ms step_avg:97.29ms +[2025-08-22 14:13:02] [Rank 0] step:7341/10000 train_time:714277ms step_avg:97.30ms +[2025-08-22 14:13:02] [Rank 0] step:7341/10000 train_time:714277ms step_avg:97.30ms +[2025-08-22 14:13:04] [Rank 0] step:7361/10000 train_time:716312ms step_avg:97.31ms +[2025-08-22 14:13:04] [Rank 0] step:7361/10000 train_time:716312ms step_avg:97.31ms +[2025-08-22 14:13:06] [Rank 0] step:7381/10000 train_time:718344ms step_avg:97.32ms +[2025-08-22 14:13:06] [Rank 0] step:7381/10000 train_time:718344ms step_avg:97.32ms +[2025-08-22 14:13:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:13:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:13:22] [Rank 0] PRINT: step:7400/10000 val_loss:3.5426 svd_entropy: attn_qk:H=0.9085,top10E=0.08,eRank=419.4,q75/q25=11.68 attn_vo:H=0.9162,top10E=0.06,eRank=440.7,q75/q25=11.63 mlp_w1:H=0.9725,top10E=0.04,eRank=640.1,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.86 vo_prod:H=0.8454,top10E=0.13,eRank=278.1,q75/q25=64.54 train_time:720313ms step_avg:97.34ms +[2025-08-22 14:13:22] [Rank 0] PRINT: step:7400/10000 val_loss:3.5426 svd_entropy: attn_qk:H=0.9085,top10E=0.08,eRank=419.4,q75/q25=11.68 attn_vo:H=0.9162,top10E=0.06,eRank=440.7,q75/q25=11.63 mlp_w1:H=0.9725,top10E=0.04,eRank=640.1,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=614.9,q75/q25=2.86 vo_prod:H=0.8454,top10E=0.13,eRank=278.1,q75/q25=64.54 train_time:720313ms step_avg:97.34ms +[2025-08-22 14:13:22] [Rank 0] step:7401/10000 train_time:720367ms step_avg:97.33ms +[2025-08-22 14:13:22] [Rank 0] step:7401/10000 train_time:720367ms step_avg:97.33ms +[2025-08-22 14:13:24] [Rank 0] step:7421/10000 train_time:722421ms step_avg:97.35ms +[2025-08-22 14:13:24] [Rank 0] step:7421/10000 train_time:722421ms step_avg:97.35ms +[2025-08-22 14:13:26] [Rank 0] step:7441/10000 train_time:724438ms step_avg:97.36ms +[2025-08-22 14:13:26] [Rank 0] step:7441/10000 train_time:724438ms step_avg:97.36ms +[2025-08-22 14:13:28] [Rank 0] step:7461/10000 train_time:726462ms step_avg:97.37ms +[2025-08-22 14:13:28] [Rank 0] step:7461/10000 train_time:726462ms step_avg:97.37ms +[2025-08-22 14:13:30] [Rank 0] step:7481/10000 train_time:728496ms step_avg:97.38ms +[2025-08-22 14:13:30] [Rank 0] step:7481/10000 train_time:728496ms step_avg:97.38ms +[2025-08-22 14:13:32] [Rank 0] step:7501/10000 train_time:730523ms step_avg:97.39ms +[2025-08-22 14:13:32] [Rank 0] step:7501/10000 train_time:730523ms step_avg:97.39ms +[2025-08-22 14:13:34] [Rank 0] step:7521/10000 train_time:732560ms step_avg:97.40ms +[2025-08-22 14:13:34] [Rank 0] step:7521/10000 train_time:732560ms step_avg:97.40ms +[2025-08-22 14:13:36] [Rank 0] step:7541/10000 train_time:734600ms step_avg:97.41ms +[2025-08-22 14:13:36] [Rank 0] step:7541/10000 train_time:734600ms step_avg:97.41ms +[2025-08-22 14:13:38] [Rank 0] step:7561/10000 train_time:736620ms step_avg:97.42ms +[2025-08-22 14:13:38] [Rank 0] step:7561/10000 train_time:736620ms step_avg:97.42ms +[2025-08-22 14:13:40] [Rank 0] step:7581/10000 train_time:738736ms step_avg:97.45ms +[2025-08-22 14:13:40] [Rank 0] step:7581/10000 train_time:738736ms step_avg:97.45ms +[2025-08-22 14:13:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:13:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:13:56] [Rank 0] PRINT: step:7600/10000 val_loss:3.5321 svd_entropy: attn_qk:H=0.9084,top10E=0.08,eRank=419.0,q75/q25=11.71 attn_vo:H=0.9162,top10E=0.06,eRank=440.7,q75/q25=11.67 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.86 vo_prod:H=0.8455,top10E=0.13,eRank=278.3,q75/q25=64.58 train_time:740797ms step_avg:97.47ms +[2025-08-22 14:13:56] [Rank 0] PRINT: step:7600/10000 val_loss:3.5321 svd_entropy: attn_qk:H=0.9084,top10E=0.08,eRank=419.0,q75/q25=11.71 attn_vo:H=0.9162,top10E=0.06,eRank=440.7,q75/q25=11.67 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.86 vo_prod:H=0.8455,top10E=0.13,eRank=278.3,q75/q25=64.58 train_time:740797ms step_avg:97.47ms +[2025-08-22 14:13:56] [Rank 0] step:7601/10000 train_time:740850ms step_avg:97.47ms +[2025-08-22 14:13:56] [Rank 0] step:7601/10000 train_time:740850ms step_avg:97.47ms +[2025-08-22 14:13:58] [Rank 0] step:7621/10000 train_time:742876ms step_avg:97.48ms +[2025-08-22 14:13:58] [Rank 0] step:7621/10000 train_time:742876ms step_avg:97.48ms +[2025-08-22 14:14:00] [Rank 0] step:7641/10000 train_time:744897ms step_avg:97.49ms +[2025-08-22 14:14:00] [Rank 0] step:7641/10000 train_time:744897ms step_avg:97.49ms +[2025-08-22 14:14:02] [Rank 0] step:7661/10000 train_time:746929ms step_avg:97.50ms +[2025-08-22 14:14:02] [Rank 0] step:7661/10000 train_time:746929ms step_avg:97.50ms +[2025-08-22 14:14:04] [Rank 0] step:7681/10000 train_time:748954ms step_avg:97.51ms +[2025-08-22 14:14:04] [Rank 0] step:7681/10000 train_time:748954ms step_avg:97.51ms +[2025-08-22 14:14:06] [Rank 0] step:7701/10000 train_time:750981ms step_avg:97.52ms +[2025-08-22 14:14:06] [Rank 0] step:7701/10000 train_time:750981ms step_avg:97.52ms +[2025-08-22 14:14:09] [Rank 0] step:7721/10000 train_time:753025ms step_avg:97.53ms +[2025-08-22 14:14:09] [Rank 0] step:7721/10000 train_time:753025ms step_avg:97.53ms +[2025-08-22 14:14:11] [Rank 0] step:7741/10000 train_time:755053ms step_avg:97.54ms +[2025-08-22 14:14:11] [Rank 0] step:7741/10000 train_time:755053ms step_avg:97.54ms +[2025-08-22 14:14:13] [Rank 0] step:7761/10000 train_time:757096ms step_avg:97.55ms +[2025-08-22 14:14:13] [Rank 0] step:7761/10000 train_time:757096ms step_avg:97.55ms +[2025-08-22 14:14:15] [Rank 0] step:7781/10000 train_time:759130ms step_avg:97.56ms +[2025-08-22 14:14:15] [Rank 0] step:7781/10000 train_time:759130ms step_avg:97.56ms +[2025-08-22 14:14:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:14:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:14:30] [Rank 0] PRINT: step:7800/10000 val_loss:3.5189 svd_entropy: attn_qk:H=0.9083,top10E=0.08,eRank=418.6,q75/q25=11.73 attn_vo:H=0.9163,top10E=0.06,eRank=440.8,q75/q25=11.64 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=2.86 vo_prod:H=0.8456,top10E=0.13,eRank=278.5,q75/q25=64.72 train_time:761129ms step_avg:97.58ms +[2025-08-22 14:14:30] [Rank 0] PRINT: step:7800/10000 val_loss:3.5189 svd_entropy: attn_qk:H=0.9083,top10E=0.08,eRank=418.6,q75/q25=11.73 attn_vo:H=0.9163,top10E=0.06,eRank=440.8,q75/q25=11.64 mlp_w1:H=0.9726,top10E=0.04,eRank=640.1,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=2.86 vo_prod:H=0.8456,top10E=0.13,eRank=278.5,q75/q25=64.72 train_time:761129ms step_avg:97.58ms +[2025-08-22 14:14:30] [Rank 0] step:7801/10000 train_time:761183ms step_avg:97.58ms +[2025-08-22 14:14:30] [Rank 0] step:7801/10000 train_time:761183ms step_avg:97.58ms +[2025-08-22 14:14:32] [Rank 0] step:7821/10000 train_time:763220ms step_avg:97.59ms +[2025-08-22 14:14:32] [Rank 0] step:7821/10000 train_time:763220ms step_avg:97.59ms +[2025-08-22 14:14:34] [Rank 0] step:7841/10000 train_time:765239ms step_avg:97.59ms +[2025-08-22 14:14:34] [Rank 0] step:7841/10000 train_time:765239ms step_avg:97.59ms +[2025-08-22 14:14:36] [Rank 0] step:7861/10000 train_time:767268ms step_avg:97.60ms +[2025-08-22 14:14:36] [Rank 0] step:7861/10000 train_time:767268ms step_avg:97.60ms +[2025-08-22 14:14:38] [Rank 0] step:7881/10000 train_time:769310ms step_avg:97.62ms +[2025-08-22 14:14:38] [Rank 0] step:7881/10000 train_time:769310ms step_avg:97.62ms +[2025-08-22 14:14:40] [Rank 0] step:7901/10000 train_time:771335ms step_avg:97.63ms +[2025-08-22 14:14:40] [Rank 0] step:7901/10000 train_time:771335ms step_avg:97.63ms +[2025-08-22 14:14:42] [Rank 0] step:7921/10000 train_time:773375ms step_avg:97.64ms +[2025-08-22 14:14:42] [Rank 0] step:7921/10000 train_time:773375ms step_avg:97.64ms +[2025-08-22 14:14:44] [Rank 0] step:7941/10000 train_time:775529ms step_avg:97.66ms +[2025-08-22 14:14:44] [Rank 0] step:7941/10000 train_time:775529ms step_avg:97.66ms +[2025-08-22 14:14:47] [Rank 0] step:7961/10000 train_time:777713ms step_avg:97.69ms +[2025-08-22 14:14:47] [Rank 0] step:7961/10000 train_time:777713ms step_avg:97.69ms +[2025-08-22 14:14:49] [Rank 0] step:7981/10000 train_time:779737ms step_avg:97.70ms +[2025-08-22 14:14:49] [Rank 0] step:7981/10000 train_time:779737ms step_avg:97.70ms +[2025-08-22 14:14:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:14:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:15:04] [Rank 0] PRINT: step:8000/10000 val_loss:3.5044 svd_entropy: attn_qk:H=0.9081,top10E=0.08,eRank=418.3,q75/q25=11.78 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.66 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=615.2,q75/q25=2.86 vo_prod:H=0.8457,top10E=0.13,eRank=278.7,q75/q25=64.86 train_time:781729ms step_avg:97.72ms +[2025-08-22 14:15:04] [Rank 0] PRINT: step:8000/10000 val_loss:3.5044 svd_entropy: attn_qk:H=0.9081,top10E=0.08,eRank=418.3,q75/q25=11.78 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.66 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.74 mlp_w2:H=0.9665,top10E=0.05,eRank=615.2,q75/q25=2.86 vo_prod:H=0.8457,top10E=0.13,eRank=278.7,q75/q25=64.86 train_time:781729ms step_avg:97.72ms +[2025-08-22 14:15:05] [Rank 0] step:8001/10000 train_time:781783ms step_avg:97.71ms +[2025-08-22 14:15:05] [Rank 0] step:8001/10000 train_time:781783ms step_avg:97.71ms +[2025-08-22 14:15:07] [Rank 0] step:8021/10000 train_time:783810ms step_avg:97.72ms +[2025-08-22 14:15:07] [Rank 0] step:8021/10000 train_time:783810ms step_avg:97.72ms +[2025-08-22 14:15:09] [Rank 0] step:8041/10000 train_time:785848ms step_avg:97.73ms +[2025-08-22 14:15:09] [Rank 0] step:8041/10000 train_time:785848ms step_avg:97.73ms +[2025-08-22 14:15:11] [Rank 0] step:8061/10000 train_time:787877ms step_avg:97.74ms +[2025-08-22 14:15:11] [Rank 0] step:8061/10000 train_time:787877ms step_avg:97.74ms +[2025-08-22 14:15:13] [Rank 0] step:8081/10000 train_time:789896ms step_avg:97.75ms +[2025-08-22 14:15:13] [Rank 0] step:8081/10000 train_time:789896ms step_avg:97.75ms +[2025-08-22 14:15:15] [Rank 0] step:8101/10000 train_time:791929ms step_avg:97.76ms +[2025-08-22 14:15:15] [Rank 0] step:8101/10000 train_time:791929ms step_avg:97.76ms +[2025-08-22 14:15:17] [Rank 0] step:8121/10000 train_time:793960ms step_avg:97.77ms +[2025-08-22 14:15:17] [Rank 0] step:8121/10000 train_time:793960ms step_avg:97.77ms +[2025-08-22 14:15:19] [Rank 0] step:8141/10000 train_time:796709ms step_avg:97.86ms +[2025-08-22 14:15:19] [Rank 0] step:8141/10000 train_time:796709ms step_avg:97.86ms +[2025-08-22 14:15:21] [Rank 0] step:8161/10000 train_time:798754ms step_avg:97.87ms +[2025-08-22 14:15:21] [Rank 0] step:8161/10000 train_time:798754ms step_avg:97.87ms +[2025-08-22 14:15:24] [Rank 0] step:8181/10000 train_time:800815ms step_avg:97.89ms +[2025-08-22 14:15:24] [Rank 0] step:8181/10000 train_time:800815ms step_avg:97.89ms +[2025-08-22 14:15:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:15:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:15:39] [Rank 0] PRINT: step:8200/10000 val_loss:3.4927 svd_entropy: attn_qk:H=0.9080,top10E=0.08,eRank=417.9,q75/q25=11.80 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.65 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.3,q75/q25=2.86 vo_prod:H=0.8458,top10E=0.13,eRank=278.9,q75/q25=63.78 train_time:802851ms step_avg:97.91ms +[2025-08-22 14:15:39] [Rank 0] PRINT: step:8200/10000 val_loss:3.4927 svd_entropy: attn_qk:H=0.9080,top10E=0.08,eRank=417.9,q75/q25=11.80 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.65 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.3,q75/q25=2.86 vo_prod:H=0.8458,top10E=0.13,eRank=278.9,q75/q25=63.78 train_time:802851ms step_avg:97.91ms +[2025-08-22 14:15:39] [Rank 0] step:8201/10000 train_time:802907ms step_avg:97.90ms +[2025-08-22 14:15:39] [Rank 0] step:8201/10000 train_time:802907ms step_avg:97.90ms +[2025-08-22 14:15:41] [Rank 0] step:8221/10000 train_time:804985ms step_avg:97.92ms +[2025-08-22 14:15:41] [Rank 0] step:8221/10000 train_time:804985ms step_avg:97.92ms +[2025-08-22 14:15:44] [Rank 0] step:8241/10000 train_time:807048ms step_avg:97.93ms +[2025-08-22 14:15:44] [Rank 0] step:8241/10000 train_time:807048ms step_avg:97.93ms +[2025-08-22 14:15:46] [Rank 0] step:8261/10000 train_time:809113ms step_avg:97.94ms +[2025-08-22 14:15:46] [Rank 0] step:8261/10000 train_time:809113ms step_avg:97.94ms +[2025-08-22 14:15:48] [Rank 0] step:8281/10000 train_time:811240ms step_avg:97.96ms +[2025-08-22 14:15:48] [Rank 0] step:8281/10000 train_time:811240ms step_avg:97.96ms +[2025-08-22 14:15:50] [Rank 0] step:8301/10000 train_time:813387ms step_avg:97.99ms +[2025-08-22 14:15:50] [Rank 0] step:8301/10000 train_time:813387ms step_avg:97.99ms +[2025-08-22 14:15:52] [Rank 0] step:8321/10000 train_time:815438ms step_avg:98.00ms +[2025-08-22 14:15:52] [Rank 0] step:8321/10000 train_time:815438ms step_avg:98.00ms +[2025-08-22 14:15:54] [Rank 0] step:8341/10000 train_time:817503ms step_avg:98.01ms +[2025-08-22 14:15:54] [Rank 0] step:8341/10000 train_time:817503ms step_avg:98.01ms +[2025-08-22 14:15:56] [Rank 0] step:8361/10000 train_time:819562ms step_avg:98.02ms +[2025-08-22 14:15:56] [Rank 0] step:8361/10000 train_time:819562ms step_avg:98.02ms +[2025-08-22 14:15:58] [Rank 0] step:8381/10000 train_time:821620ms step_avg:98.03ms +[2025-08-22 14:15:58] [Rank 0] step:8381/10000 train_time:821620ms step_avg:98.03ms +[2025-08-22 14:16:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:16:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:16:14] [Rank 0] PRINT: step:8400/10000 val_loss:3.4794 svd_entropy: attn_qk:H=0.9079,top10E=0.08,eRank=417.6,q75/q25=11.82 attn_vo:H=0.9163,top10E=0.06,eRank=441.0,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.4,q75/q25=2.86 vo_prod:H=0.8459,top10E=0.13,eRank=279.1,q75/q25=63.80 train_time:823632ms step_avg:98.05ms +[2025-08-22 14:16:14] [Rank 0] PRINT: step:8400/10000 val_loss:3.4794 svd_entropy: attn_qk:H=0.9079,top10E=0.08,eRank=417.6,q75/q25=11.82 attn_vo:H=0.9163,top10E=0.06,eRank=441.0,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.2,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.4,q75/q25=2.86 vo_prod:H=0.8459,top10E=0.13,eRank=279.1,q75/q25=63.80 train_time:823632ms step_avg:98.05ms +[2025-08-22 14:16:14] [Rank 0] step:8401/10000 train_time:823688ms step_avg:98.05ms +[2025-08-22 14:16:14] [Rank 0] step:8401/10000 train_time:823688ms step_avg:98.05ms +[2025-08-22 14:16:16] [Rank 0] step:8421/10000 train_time:825743ms step_avg:98.06ms +[2025-08-22 14:16:16] [Rank 0] step:8421/10000 train_time:825743ms step_avg:98.06ms +[2025-08-22 14:16:18] [Rank 0] step:8441/10000 train_time:827799ms step_avg:98.07ms +[2025-08-22 14:16:18] [Rank 0] step:8441/10000 train_time:827799ms step_avg:98.07ms +[2025-08-22 14:16:20] [Rank 0] step:8461/10000 train_time:829853ms step_avg:98.08ms +[2025-08-22 14:16:20] [Rank 0] step:8461/10000 train_time:829853ms step_avg:98.08ms +[2025-08-22 14:16:22] [Rank 0] step:8481/10000 train_time:831914ms step_avg:98.09ms +[2025-08-22 14:16:22] [Rank 0] step:8481/10000 train_time:831914ms step_avg:98.09ms +[2025-08-22 14:16:24] [Rank 0] step:8501/10000 train_time:833996ms step_avg:98.11ms +[2025-08-22 14:16:24] [Rank 0] step:8501/10000 train_time:833996ms step_avg:98.11ms +[2025-08-22 14:16:26] [Rank 0] step:8521/10000 train_time:836059ms step_avg:98.12ms +[2025-08-22 14:16:26] [Rank 0] step:8521/10000 train_time:836059ms step_avg:98.12ms +[2025-08-22 14:16:28] [Rank 0] step:8541/10000 train_time:838133ms step_avg:98.13ms +[2025-08-22 14:16:28] [Rank 0] step:8541/10000 train_time:838133ms step_avg:98.13ms +[2025-08-22 14:16:30] [Rank 0] step:8561/10000 train_time:840201ms step_avg:98.14ms +[2025-08-22 14:16:30] [Rank 0] step:8561/10000 train_time:840201ms step_avg:98.14ms +[2025-08-22 14:16:33] [Rank 0] step:8581/10000 train_time:842267ms step_avg:98.15ms +[2025-08-22 14:16:33] [Rank 0] step:8581/10000 train_time:842267ms step_avg:98.15ms +[2025-08-22 14:16:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:16:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:16:48] [Rank 0] PRINT: step:8600/10000 val_loss:3.4701 svd_entropy: attn_qk:H=0.9077,top10E=0.08,eRank=417.3,q75/q25=11.82 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.5,q75/q25=2.86 vo_prod:H=0.8460,top10E=0.13,eRank=279.2,q75/q25=63.58 train_time:844280ms step_avg:98.17ms +[2025-08-22 14:16:48] [Rank 0] PRINT: step:8600/10000 val_loss:3.4701 svd_entropy: attn_qk:H=0.9077,top10E=0.08,eRank=417.3,q75/q25=11.82 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.5,q75/q25=2.86 vo_prod:H=0.8460,top10E=0.13,eRank=279.2,q75/q25=63.58 train_time:844280ms step_avg:98.17ms +[2025-08-22 14:16:48] [Rank 0] step:8601/10000 train_time:844337ms step_avg:98.17ms +[2025-08-22 14:16:48] [Rank 0] step:8601/10000 train_time:844337ms step_avg:98.17ms +[2025-08-22 14:16:51] [Rank 0] step:8621/10000 train_time:846487ms step_avg:98.19ms +[2025-08-22 14:16:51] [Rank 0] step:8621/10000 train_time:846487ms step_avg:98.19ms +[2025-08-22 14:16:53] [Rank 0] step:8641/10000 train_time:848672ms step_avg:98.21ms +[2025-08-22 14:16:53] [Rank 0] step:8641/10000 train_time:848672ms step_avg:98.21ms +[2025-08-22 14:16:55] [Rank 0] step:8661/10000 train_time:850733ms step_avg:98.23ms +[2025-08-22 14:16:55] [Rank 0] step:8661/10000 train_time:850733ms step_avg:98.23ms +[2025-08-22 14:16:57] [Rank 0] step:8681/10000 train_time:852796ms step_avg:98.24ms +[2025-08-22 14:16:57] [Rank 0] step:8681/10000 train_time:852796ms step_avg:98.24ms +[2025-08-22 14:16:59] [Rank 0] step:8701/10000 train_time:854852ms step_avg:98.25ms +[2025-08-22 14:16:59] [Rank 0] step:8701/10000 train_time:854852ms step_avg:98.25ms +[2025-08-22 14:17:01] [Rank 0] step:8721/10000 train_time:856917ms step_avg:98.26ms +[2025-08-22 14:17:01] [Rank 0] step:8721/10000 train_time:856917ms step_avg:98.26ms +[2025-08-22 14:17:03] [Rank 0] step:8741/10000 train_time:858971ms step_avg:98.27ms +[2025-08-22 14:17:03] [Rank 0] step:8741/10000 train_time:858971ms step_avg:98.27ms +[2025-08-22 14:17:05] [Rank 0] step:8761/10000 train_time:861033ms step_avg:98.28ms +[2025-08-22 14:17:05] [Rank 0] step:8761/10000 train_time:861033ms step_avg:98.28ms +[2025-08-22 14:17:07] [Rank 0] step:8781/10000 train_time:863100ms step_avg:98.29ms +[2025-08-22 14:17:07] [Rank 0] step:8781/10000 train_time:863100ms step_avg:98.29ms +[2025-08-22 14:17:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:17:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:17:23] [Rank 0] PRINT: step:8800/10000 val_loss:3.4586 svd_entropy: attn_qk:H=0.9076,top10E=0.08,eRank=417.0,q75/q25=11.85 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.6,q75/q25=2.85 vo_prod:H=0.8461,top10E=0.13,eRank=279.4,q75/q25=63.69 train_time:865120ms step_avg:98.31ms +[2025-08-22 14:17:23] [Rank 0] PRINT: step:8800/10000 val_loss:3.4586 svd_entropy: attn_qk:H=0.9076,top10E=0.08,eRank=417.0,q75/q25=11.85 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9666,top10E=0.05,eRank=615.6,q75/q25=2.85 vo_prod:H=0.8461,top10E=0.13,eRank=279.4,q75/q25=63.69 train_time:865120ms step_avg:98.31ms +[2025-08-22 14:17:23] [Rank 0] step:8801/10000 train_time:865174ms step_avg:98.30ms +[2025-08-22 14:17:23] [Rank 0] step:8801/10000 train_time:865174ms step_avg:98.30ms +[2025-08-22 14:17:25] [Rank 0] step:8821/10000 train_time:867237ms step_avg:98.32ms +[2025-08-22 14:17:25] [Rank 0] step:8821/10000 train_time:867237ms step_avg:98.32ms +[2025-08-22 14:17:27] [Rank 0] step:8841/10000 train_time:869321ms step_avg:98.33ms +[2025-08-22 14:17:27] [Rank 0] step:8841/10000 train_time:869321ms step_avg:98.33ms +[2025-08-22 14:17:29] [Rank 0] step:8861/10000 train_time:871379ms step_avg:98.34ms +[2025-08-22 14:17:29] [Rank 0] step:8861/10000 train_time:871379ms step_avg:98.34ms +[2025-08-22 14:17:31] [Rank 0] step:8881/10000 train_time:873439ms step_avg:98.35ms +[2025-08-22 14:17:31] [Rank 0] step:8881/10000 train_time:873439ms step_avg:98.35ms +[2025-08-22 14:17:33] [Rank 0] step:8901/10000 train_time:875502ms step_avg:98.36ms +[2025-08-22 14:17:33] [Rank 0] step:8901/10000 train_time:875502ms step_avg:98.36ms +[2025-08-22 14:17:35] [Rank 0] step:8921/10000 train_time:877582ms step_avg:98.37ms +[2025-08-22 14:17:35] [Rank 0] step:8921/10000 train_time:877582ms step_avg:98.37ms +[2025-08-22 14:17:37] [Rank 0] step:8941/10000 train_time:879647ms step_avg:98.38ms +[2025-08-22 14:17:37] [Rank 0] step:8941/10000 train_time:879647ms step_avg:98.38ms +[2025-08-22 14:17:40] [Rank 0] step:8961/10000 train_time:881712ms step_avg:98.39ms +[2025-08-22 14:17:40] [Rank 0] step:8961/10000 train_time:881712ms step_avg:98.39ms +[2025-08-22 14:17:42] [Rank 0] step:8981/10000 train_time:883777ms step_avg:98.41ms +[2025-08-22 14:17:42] [Rank 0] step:8981/10000 train_time:883777ms step_avg:98.41ms +[2025-08-22 14:17:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:17:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:17:57] [Rank 0] PRINT: step:9000/10000 val_loss:3.4484 svd_entropy: attn_qk:H=0.9075,top10E=0.08,eRank=416.8,q75/q25=11.87 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.86 vo_prod:H=0.8462,top10E=0.13,eRank=279.6,q75/q25=63.82 train_time:885796ms step_avg:98.42ms +[2025-08-22 14:17:57] [Rank 0] PRINT: step:9000/10000 val_loss:3.4484 svd_entropy: attn_qk:H=0.9075,top10E=0.08,eRank=416.8,q75/q25=11.87 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.86 vo_prod:H=0.8462,top10E=0.13,eRank=279.6,q75/q25=63.82 train_time:885796ms step_avg:98.42ms +[2025-08-22 14:17:58] [Rank 0] step:9001/10000 train_time:885852ms step_avg:98.42ms +[2025-08-22 14:17:58] [Rank 0] step:9001/10000 train_time:885852ms step_avg:98.42ms +[2025-08-22 14:18:00] [Rank 0] step:9021/10000 train_time:887929ms step_avg:98.43ms +[2025-08-22 14:18:00] [Rank 0] step:9021/10000 train_time:887929ms step_avg:98.43ms +[2025-08-22 14:18:02] [Rank 0] step:9041/10000 train_time:889986ms step_avg:98.44ms +[2025-08-22 14:18:02] [Rank 0] step:9041/10000 train_time:889986ms step_avg:98.44ms +[2025-08-22 14:18:04] [Rank 0] step:9061/10000 train_time:892050ms step_avg:98.45ms +[2025-08-22 14:18:04] [Rank 0] step:9061/10000 train_time:892050ms step_avg:98.45ms +[2025-08-22 14:18:06] [Rank 0] step:9081/10000 train_time:894112ms step_avg:98.46ms +[2025-08-22 14:18:06] [Rank 0] step:9081/10000 train_time:894112ms step_avg:98.46ms +[2025-08-22 14:18:08] [Rank 0] step:9101/10000 train_time:896185ms step_avg:98.47ms +[2025-08-22 14:18:08] [Rank 0] step:9101/10000 train_time:896185ms step_avg:98.47ms +[2025-08-22 14:18:10] [Rank 0] step:9121/10000 train_time:898249ms step_avg:98.48ms +[2025-08-22 14:18:10] [Rank 0] step:9121/10000 train_time:898249ms step_avg:98.48ms +[2025-08-22 14:18:12] [Rank 0] step:9141/10000 train_time:900298ms step_avg:98.49ms +[2025-08-22 14:18:12] [Rank 0] step:9141/10000 train_time:900298ms step_avg:98.49ms +[2025-08-22 14:18:14] [Rank 0] step:9161/10000 train_time:902355ms step_avg:98.50ms +[2025-08-22 14:18:14] [Rank 0] step:9161/10000 train_time:902355ms step_avg:98.50ms +[2025-08-22 14:18:16] [Rank 0] step:9181/10000 train_time:904451ms step_avg:98.51ms +[2025-08-22 14:18:16] [Rank 0] step:9181/10000 train_time:904451ms step_avg:98.51ms +[2025-08-22 14:18:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:18:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:18:32] [Rank 0] PRINT: step:9200/10000 val_loss:3.4392 svd_entropy: attn_qk:H=0.9074,top10E=0.08,eRank=416.5,q75/q25=11.88 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.85 vo_prod:H=0.8462,top10E=0.13,eRank=279.7,q75/q25=63.63 train_time:906464ms step_avg:98.53ms +[2025-08-22 14:18:32] [Rank 0] PRINT: step:9200/10000 val_loss:3.4392 svd_entropy: attn_qk:H=0.9074,top10E=0.08,eRank=416.5,q75/q25=11.88 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.69 mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.85 vo_prod:H=0.8462,top10E=0.13,eRank=279.7,q75/q25=63.63 train_time:906464ms step_avg:98.53ms +[2025-08-22 14:18:32] [Rank 0] step:9201/10000 train_time:906517ms step_avg:98.52ms +[2025-08-22 14:18:32] [Rank 0] step:9201/10000 train_time:906517ms step_avg:98.52ms +[2025-08-22 14:18:34] [Rank 0] step:9221/10000 train_time:908610ms step_avg:98.54ms +[2025-08-22 14:18:34] [Rank 0] step:9221/10000 train_time:908610ms step_avg:98.54ms +[2025-08-22 14:18:36] [Rank 0] step:9241/10000 train_time:910678ms step_avg:98.55ms +[2025-08-22 14:18:36] [Rank 0] step:9241/10000 train_time:910678ms step_avg:98.55ms +[2025-08-22 14:18:38] [Rank 0] step:9261/10000 train_time:912743ms step_avg:98.56ms +[2025-08-22 14:18:38] [Rank 0] step:9261/10000 train_time:912743ms step_avg:98.56ms +[2025-08-22 14:18:41] [Rank 0] step:9281/10000 train_time:914791ms step_avg:98.57ms +[2025-08-22 14:18:41] [Rank 0] step:9281/10000 train_time:914791ms step_avg:98.57ms +[2025-08-22 14:18:43] [Rank 0] step:9301/10000 train_time:916848ms step_avg:98.58ms +[2025-08-22 14:18:43] [Rank 0] step:9301/10000 train_time:916848ms step_avg:98.58ms +[2025-08-22 14:18:45] [Rank 0] step:9321/10000 train_time:918911ms step_avg:98.59ms +[2025-08-22 14:18:45] [Rank 0] step:9321/10000 train_time:918911ms step_avg:98.59ms +[2025-08-22 14:18:47] [Rank 0] step:9341/10000 train_time:920970ms step_avg:98.59ms +[2025-08-22 14:18:47] [Rank 0] step:9341/10000 train_time:920970ms step_avg:98.59ms +[2025-08-22 14:18:49] [Rank 0] step:9361/10000 train_time:923034ms step_avg:98.60ms +[2025-08-22 14:18:49] [Rank 0] step:9361/10000 train_time:923034ms step_avg:98.60ms +[2025-08-22 14:18:51] [Rank 0] step:9381/10000 train_time:925112ms step_avg:98.62ms +[2025-08-22 14:18:51] [Rank 0] step:9381/10000 train_time:925112ms step_avg:98.62ms +[2025-08-22 14:18:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:18:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:19:07] [Rank 0] PRINT: step:9400/10000 val_loss:3.4308 svd_entropy: attn_qk:H=0.9074,top10E=0.08,eRank=416.3,q75/q25=11.88 attn_vo:H=0.9164,top10E=0.06,eRank=441.3,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.8,q75/q25=2.85 vo_prod:H=0.8463,top10E=0.13,eRank=279.8,q75/q25=63.43 train_time:927137ms step_avg:98.63ms +[2025-08-22 14:19:07] [Rank 0] PRINT: step:9400/10000 val_loss:3.4308 svd_entropy: attn_qk:H=0.9074,top10E=0.08,eRank=416.3,q75/q25=11.88 attn_vo:H=0.9164,top10E=0.06,eRank=441.3,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.8,q75/q25=2.85 vo_prod:H=0.8463,top10E=0.13,eRank=279.8,q75/q25=63.43 train_time:927137ms step_avg:98.63ms +[2025-08-22 14:19:07] [Rank 0] step:9401/10000 train_time:927193ms step_avg:98.63ms +[2025-08-22 14:19:07] [Rank 0] step:9401/10000 train_time:927193ms step_avg:98.63ms +[2025-08-22 14:19:09] [Rank 0] step:9421/10000 train_time:929255ms step_avg:98.64ms +[2025-08-22 14:19:09] [Rank 0] step:9421/10000 train_time:929255ms step_avg:98.64ms +[2025-08-22 14:19:11] [Rank 0] step:9441/10000 train_time:931317ms step_avg:98.65ms +[2025-08-22 14:19:11] [Rank 0] step:9441/10000 train_time:931317ms step_avg:98.65ms +[2025-08-22 14:19:13] [Rank 0] step:9461/10000 train_time:933380ms step_avg:98.66ms +[2025-08-22 14:19:13] [Rank 0] step:9461/10000 train_time:933380ms step_avg:98.66ms +[2025-08-22 14:19:15] [Rank 0] step:9481/10000 train_time:935449ms step_avg:98.67ms +[2025-08-22 14:19:15] [Rank 0] step:9481/10000 train_time:935449ms step_avg:98.67ms +[2025-08-22 14:19:17] [Rank 0] step:9501/10000 train_time:937520ms step_avg:98.68ms +[2025-08-22 14:19:17] [Rank 0] step:9501/10000 train_time:937520ms step_avg:98.68ms +[2025-08-22 14:19:19] [Rank 0] step:9521/10000 train_time:939575ms step_avg:98.68ms +[2025-08-22 14:19:19] [Rank 0] step:9521/10000 train_time:939575ms step_avg:98.68ms +[2025-08-22 14:19:21] [Rank 0] step:9541/10000 train_time:941640ms step_avg:98.69ms +[2025-08-22 14:19:21] [Rank 0] step:9541/10000 train_time:941640ms step_avg:98.69ms +[2025-08-22 14:19:23] [Rank 0] step:9561/10000 train_time:943698ms step_avg:98.70ms +[2025-08-22 14:19:23] [Rank 0] step:9561/10000 train_time:943698ms step_avg:98.70ms +[2025-08-22 14:19:25] [Rank 0] step:9581/10000 train_time:945760ms step_avg:98.71ms +[2025-08-22 14:19:25] [Rank 0] step:9581/10000 train_time:945760ms step_avg:98.71ms +[2025-08-22 14:19:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:19:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:19:41] [Rank 0] PRINT: step:9600/10000 val_loss:3.4223 svd_entropy: attn_qk:H=0.9073,top10E=0.08,eRank=416.1,q75/q25=11.90 attn_vo:H=0.9164,top10E=0.06,eRank=441.3,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.9,q75/q25=2.85 vo_prod:H=0.8464,top10E=0.13,eRank=279.9,q75/q25=63.10 train_time:947793ms step_avg:98.73ms +[2025-08-22 14:19:41] [Rank 0] PRINT: step:9600/10000 val_loss:3.4223 svd_entropy: attn_qk:H=0.9073,top10E=0.08,eRank=416.1,q75/q25=11.90 attn_vo:H=0.9164,top10E=0.06,eRank=441.3,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.9,q75/q25=2.85 vo_prod:H=0.8464,top10E=0.13,eRank=279.9,q75/q25=63.10 train_time:947793ms step_avg:98.73ms +[2025-08-22 14:19:42] [Rank 0] step:9601/10000 train_time:947848ms step_avg:98.72ms +[2025-08-22 14:19:42] [Rank 0] step:9601/10000 train_time:947848ms step_avg:98.72ms +[2025-08-22 14:19:44] [Rank 0] step:9621/10000 train_time:949940ms step_avg:98.74ms +[2025-08-22 14:19:44] [Rank 0] step:9621/10000 train_time:949940ms step_avg:98.74ms +[2025-08-22 14:19:46] [Rank 0] step:9641/10000 train_time:952007ms step_avg:98.75ms +[2025-08-22 14:19:46] [Rank 0] step:9641/10000 train_time:952007ms step_avg:98.75ms +[2025-08-22 14:19:48] [Rank 0] step:9661/10000 train_time:954102ms step_avg:98.76ms +[2025-08-22 14:19:48] [Rank 0] step:9661/10000 train_time:954102ms step_avg:98.76ms +[2025-08-22 14:19:50] [Rank 0] step:9681/10000 train_time:956190ms step_avg:98.77ms +[2025-08-22 14:19:50] [Rank 0] step:9681/10000 train_time:956190ms step_avg:98.77ms +[2025-08-22 14:19:52] [Rank 0] step:9701/10000 train_time:958292ms step_avg:98.78ms +[2025-08-22 14:19:52] [Rank 0] step:9701/10000 train_time:958292ms step_avg:98.78ms +[2025-08-22 14:19:54] [Rank 0] step:9721/10000 train_time:960386ms step_avg:98.79ms +[2025-08-22 14:19:54] [Rank 0] step:9721/10000 train_time:960386ms step_avg:98.79ms +[2025-08-22 14:19:56] [Rank 0] step:9741/10000 train_time:962491ms step_avg:98.81ms +[2025-08-22 14:19:56] [Rank 0] step:9741/10000 train_time:962491ms step_avg:98.81ms +[2025-08-22 14:19:58] [Rank 0] step:9761/10000 train_time:964587ms step_avg:98.82ms +[2025-08-22 14:19:58] [Rank 0] step:9761/10000 train_time:964587ms step_avg:98.82ms +[2025-08-22 14:20:00] [Rank 0] step:9781/10000 train_time:966740ms step_avg:98.84ms +[2025-08-22 14:20:00] [Rank 0] step:9781/10000 train_time:966740ms step_avg:98.84ms +[2025-08-22 14:20:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:20:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:20:16] [Rank 0] PRINT: step:9800/10000 val_loss:3.4141 svd_entropy: attn_qk:H=0.9072,top10E=0.08,eRank=416.0,q75/q25=11.89 attn_vo:H=0.9165,top10E=0.06,eRank=441.3,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.9,q75/q25=2.85 vo_prod:H=0.8464,top10E=0.13,eRank=280.0,q75/q25=63.21 train_time:968874ms step_avg:98.86ms +[2025-08-22 14:20:16] [Rank 0] PRINT: step:9800/10000 val_loss:3.4141 svd_entropy: attn_qk:H=0.9072,top10E=0.08,eRank=416.0,q75/q25=11.89 attn_vo:H=0.9165,top10E=0.06,eRank=441.3,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.9,q75/q25=2.85 vo_prod:H=0.8464,top10E=0.13,eRank=280.0,q75/q25=63.21 train_time:968874ms step_avg:98.86ms +[2025-08-22 14:20:17] [Rank 0] step:9801/10000 train_time:968928ms step_avg:98.86ms +[2025-08-22 14:20:17] [Rank 0] step:9801/10000 train_time:968928ms step_avg:98.86ms +[2025-08-22 14:20:19] [Rank 0] step:9821/10000 train_time:971027ms step_avg:98.87ms +[2025-08-22 14:20:19] [Rank 0] step:9821/10000 train_time:971027ms step_avg:98.87ms +[2025-08-22 14:20:21] [Rank 0] step:9841/10000 train_time:973119ms step_avg:98.88ms +[2025-08-22 14:20:21] [Rank 0] step:9841/10000 train_time:973119ms step_avg:98.88ms +[2025-08-22 14:20:23] [Rank 0] step:9861/10000 train_time:975197ms step_avg:98.89ms +[2025-08-22 14:20:23] [Rank 0] step:9861/10000 train_time:975197ms step_avg:98.89ms +[2025-08-22 14:20:25] [Rank 0] step:9881/10000 train_time:977271ms step_avg:98.90ms +[2025-08-22 14:20:25] [Rank 0] step:9881/10000 train_time:977271ms step_avg:98.90ms +[2025-08-22 14:20:27] [Rank 0] step:9901/10000 train_time:979369ms step_avg:98.92ms +[2025-08-22 14:20:27] [Rank 0] step:9901/10000 train_time:979369ms step_avg:98.92ms +[2025-08-22 14:20:29] [Rank 0] step:9921/10000 train_time:981448ms step_avg:98.93ms +[2025-08-22 14:20:29] [Rank 0] step:9921/10000 train_time:981448ms step_avg:98.93ms +[2025-08-22 14:20:31] [Rank 0] step:9941/10000 train_time:983548ms step_avg:98.94ms +[2025-08-22 14:20:31] [Rank 0] step:9941/10000 train_time:983548ms step_avg:98.94ms +[2025-08-22 14:20:33] [Rank 0] step:9961/10000 train_time:985628ms step_avg:98.95ms +[2025-08-22 14:20:33] [Rank 0] step:9961/10000 train_time:985628ms step_avg:98.95ms +[2025-08-22 14:20:35] [Rank 0] step:9981/10000 train_time:987720ms step_avg:98.96ms +[2025-08-22 14:20:35] [Rank 0] step:9981/10000 train_time:987720ms step_avg:98.96ms +[2025-08-22 14:20:37] [Rank 0] step:10000/10000 train_time:989708ms step_avg:98.97ms +[2025-08-22 14:20:37] [Rank 0] step:10000/10000 train_time:989708ms step_avg:98.97ms +[2025-08-22 14:20:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:20:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:20:51] [Rank 0] PRINT: step:10000/10000 val_loss:3.4077 svd_entropy: attn_qk:H=0.9072,top10E=0.08,eRank=415.9,q75/q25=11.89 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.9,q75/q25=2.85 vo_prod:H=0.8464,top10E=0.13,eRank=280.0,q75/q25=63.02 train_time:989774ms step_avg:98.98ms +[2025-08-22 14:20:51] [Rank 0] PRINT: step:10000/10000 val_loss:3.4077 svd_entropy: attn_qk:H=0.9072,top10E=0.08,eRank=415.9,q75/q25=11.89 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.68 mlp_w1:H=0.9726,top10E=0.04,eRank=640.4,q75/q25=2.73 mlp_w2:H=0.9667,top10E=0.05,eRank=615.9,q75/q25=2.85 vo_prod:H=0.8464,top10E=0.13,eRank=280.0,q75/q25=63.02 train_time:989774ms step_avg:98.98ms +[2025-08-22 14:20:51] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 14:20:51 2025 --- +[2025-08-22 14:20:51] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 14:20:51 2025 --- +[2025-08-22 14:20:51] [Rank 0] PRINT: Peak memory allocated: 11035 MiB reserved: 16636 MiB +[2025-08-22 14:20:51] [Rank 0] PRINT: Peak memory allocated: 11035 MiB reserved: 16636 MiB diff --git a/logs_svd_gated/mode_0_param_gated_seed_43/config.json b/logs_svd_gated/mode_0_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f2f29068e5ec27edf2a12c7be4aa74f40c0e28fa --- /dev/null +++ b/logs_svd_gated/mode_0_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 0, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "c3a30477-baf7-4c71-b022-68d4607ccaeb", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_0_param_gated_seed_43/training_log_c3a30477-baf7-4c71-b022-68d4607ccaeb.txt b/logs_svd_gated/mode_0_param_gated_seed_43/training_log_c3a30477-baf7-4c71-b022-68d4607ccaeb.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3e5dbdca8c648aecda15e99459fa2d4d3759cbd --- /dev/null +++ b/logs_svd_gated/mode_0_param_gated_seed_43/training_log_c3a30477-baf7-4c71-b022-68d4607ccaeb.txt @@ -0,0 +1,2926 @@ +[2025-08-22 19:05:16] [Rank 0] PRINT: --- Script Start: Fri Aug 22 19:05:16 2025 --- +[2025-08-22 19:05:16] [Rank 0] PRINT: --- Script Start: Fri Aug 22 19:05:16 2025 --- +[2025-08-22 19:05:16] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=0, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 19:05:16] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=0, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 19:05:16] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 19:05:16] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 19:05:16] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 19:05:16] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 19:05:16] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_0_param_gated_seed_43 +[2025-08-22 19:05:16] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_0_param_gated_seed_43 +[2025-08-22 19:05:16] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 19:05:16] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 19:05:16] [Rank 0] PRINT: Constructing model... +[2025-08-22 19:05:16] [Rank 0] PRINT: Constructing model... +[2025-08-22 19:05:18] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 19:05:18] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 19:05:18] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 19:05:18] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 19:05:18] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 19:05:18] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 19:05:18] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-08-22 19:05:18] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 0 +[2025-08-22 19:05:18] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-08-22 19:05:18] [Rank 0] PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices. +[2025-08-22 19:05:18] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 19:05:18] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 19:05:18] [Rank 0] PRINT: Muon optimizer is active with 80 parameters. +[2025-08-22 19:05:18] [Rank 0] PRINT: Muon optimizer is active with 80 parameters. +[2025-08-22 19:05:18] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 19:05:18] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 19:05:18] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 19:05:18] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 19:05:18] [Rank 0] PRINT: Starting warmup... +[2025-08-22 19:05:18] [Rank 0] PRINT: Starting warmup... +[2025-08-22 19:06:03] [Rank 0] PRINT: Warmup complete. +[2025-08-22 19:06:03] [Rank 0] PRINT: Warmup complete. +[2025-08-22 19:06:03] [Rank 0] PRINT: Starting training... +[2025-08-22 19:06:03] [Rank 0] PRINT: Starting training... +[2025-08-22 19:06:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:06:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:06:20] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 19:06:20] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 19:06:22] [Rank 0] step:21/10000 train_time:1940ms step_avg:92.39ms +[2025-08-22 19:06:22] [Rank 0] step:21/10000 train_time:1940ms step_avg:92.39ms +[2025-08-22 19:06:24] [Rank 0] step:41/10000 train_time:3774ms step_avg:92.05ms +[2025-08-22 19:06:24] [Rank 0] step:41/10000 train_time:3774ms step_avg:92.05ms +[2025-08-22 19:06:26] [Rank 0] step:61/10000 train_time:5606ms step_avg:91.91ms +[2025-08-22 19:06:26] [Rank 0] step:61/10000 train_time:5606ms step_avg:91.91ms +[2025-08-22 19:06:28] [Rank 0] step:81/10000 train_time:7440ms step_avg:91.86ms +[2025-08-22 19:06:28] [Rank 0] step:81/10000 train_time:7440ms step_avg:91.86ms +[2025-08-22 19:06:30] [Rank 0] step:101/10000 train_time:9278ms step_avg:91.86ms +[2025-08-22 19:06:30] [Rank 0] step:101/10000 train_time:9278ms step_avg:91.86ms +[2025-08-22 19:06:32] [Rank 0] step:121/10000 train_time:11114ms step_avg:91.85ms +[2025-08-22 19:06:32] [Rank 0] step:121/10000 train_time:11114ms step_avg:91.85ms +[2025-08-22 19:06:33] [Rank 0] step:141/10000 train_time:12951ms step_avg:91.85ms +[2025-08-22 19:06:33] [Rank 0] step:141/10000 train_time:12951ms step_avg:91.85ms +[2025-08-22 19:06:35] [Rank 0] step:161/10000 train_time:14792ms step_avg:91.87ms +[2025-08-22 19:06:35] [Rank 0] step:161/10000 train_time:14792ms step_avg:91.87ms +[2025-08-22 19:06:37] [Rank 0] step:181/10000 train_time:16629ms step_avg:91.88ms +[2025-08-22 19:06:37] [Rank 0] step:181/10000 train_time:16629ms step_avg:91.88ms +[2025-08-22 19:06:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:06:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:06:53] [Rank 0] PRINT: step:200/10000 val_loss:5.3145 svd_entropy: attn_qk:H=0.9232,top10E=0.06,eRank=461.1,q75/q25=10.16 attn_vo:H=0.9143,top10E=0.07,eRank=435.1,q75/q25=11.79 mlp_w1:H=0.9718,top10E=0.04,eRank=637.0,q75/q25=2.92 mlp_w2:H=0.9698,top10E=0.04,eRank=628.4,q75/q25=3.04 vo_prod:H=0.8365,top10E=0.14,eRank=260.5,q75/q25=80.43 train_time:18429ms step_avg:92.14ms +[2025-08-22 19:06:53] [Rank 0] PRINT: step:200/10000 val_loss:5.3145 svd_entropy: attn_qk:H=0.9232,top10E=0.06,eRank=461.1,q75/q25=10.16 attn_vo:H=0.9143,top10E=0.07,eRank=435.1,q75/q25=11.79 mlp_w1:H=0.9718,top10E=0.04,eRank=637.0,q75/q25=2.92 mlp_w2:H=0.9698,top10E=0.04,eRank=628.4,q75/q25=3.04 vo_prod:H=0.8365,top10E=0.14,eRank=260.5,q75/q25=80.43 train_time:18429ms step_avg:92.14ms +[2025-08-22 19:06:53] [Rank 0] step:201/10000 train_time:18480ms step_avg:91.94ms +[2025-08-22 19:06:53] [Rank 0] step:201/10000 train_time:18480ms step_avg:91.94ms +[2025-08-22 19:06:55] [Rank 0] step:221/10000 train_time:20322ms step_avg:91.95ms +[2025-08-22 19:06:55] [Rank 0] step:221/10000 train_time:20322ms step_avg:91.95ms +[2025-08-22 19:06:57] [Rank 0] step:241/10000 train_time:22158ms step_avg:91.94ms +[2025-08-22 19:06:57] [Rank 0] step:241/10000 train_time:22158ms step_avg:91.94ms +[2025-08-22 19:06:58] [Rank 0] step:261/10000 train_time:23996ms step_avg:91.94ms +[2025-08-22 19:06:58] [Rank 0] step:261/10000 train_time:23996ms step_avg:91.94ms +[2025-08-22 19:07:00] [Rank 0] step:281/10000 train_time:25833ms step_avg:91.93ms +[2025-08-22 19:07:00] [Rank 0] step:281/10000 train_time:25833ms step_avg:91.93ms +[2025-08-22 19:07:02] [Rank 0] step:301/10000 train_time:27670ms step_avg:91.93ms +[2025-08-22 19:07:02] [Rank 0] step:301/10000 train_time:27670ms step_avg:91.93ms +[2025-08-22 19:07:04] [Rank 0] step:321/10000 train_time:29509ms step_avg:91.93ms +[2025-08-22 19:07:04] [Rank 0] step:321/10000 train_time:29509ms step_avg:91.93ms +[2025-08-22 19:07:06] [Rank 0] step:341/10000 train_time:31347ms step_avg:91.93ms +[2025-08-22 19:07:06] [Rank 0] step:341/10000 train_time:31347ms step_avg:91.93ms +[2025-08-22 19:07:08] [Rank 0] step:361/10000 train_time:33186ms step_avg:91.93ms +[2025-08-22 19:07:08] [Rank 0] step:361/10000 train_time:33186ms step_avg:91.93ms +[2025-08-22 19:07:09] [Rank 0] step:381/10000 train_time:35027ms step_avg:91.93ms +[2025-08-22 19:07:09] [Rank 0] step:381/10000 train_time:35027ms step_avg:91.93ms +[2025-08-22 19:07:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:07:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:07:25] [Rank 0] PRINT: step:400/10000 val_loss:4.9393 svd_entropy: attn_qk:H=0.9237,top10E=0.06,eRank=462.7,q75/q25=9.91 attn_vo:H=0.9204,top10E=0.06,eRank=452.7,q75/q25=10.43 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.88 mlp_w2:H=0.9698,top10E=0.04,eRank=628.6,q75/q25=2.99 vo_prod:H=0.8480,top10E=0.12,eRank=280.6,q75/q25=59.08 train_time:36827ms step_avg:92.07ms +[2025-08-22 19:07:25] [Rank 0] PRINT: step:400/10000 val_loss:4.9393 svd_entropy: attn_qk:H=0.9237,top10E=0.06,eRank=462.7,q75/q25=9.91 attn_vo:H=0.9204,top10E=0.06,eRank=452.7,q75/q25=10.43 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.88 mlp_w2:H=0.9698,top10E=0.04,eRank=628.6,q75/q25=2.99 vo_prod:H=0.8480,top10E=0.12,eRank=280.6,q75/q25=59.08 train_time:36827ms step_avg:92.07ms +[2025-08-22 19:07:25] [Rank 0] step:401/10000 train_time:36877ms step_avg:91.96ms +[2025-08-22 19:07:25] [Rank 0] step:401/10000 train_time:36877ms step_avg:91.96ms +[2025-08-22 19:07:27] [Rank 0] step:421/10000 train_time:38736ms step_avg:92.01ms +[2025-08-22 19:07:27] [Rank 0] step:421/10000 train_time:38736ms step_avg:92.01ms +[2025-08-22 19:07:29] [Rank 0] step:441/10000 train_time:40570ms step_avg:91.99ms +[2025-08-22 19:07:29] [Rank 0] step:441/10000 train_time:40570ms step_avg:91.99ms +[2025-08-22 19:07:31] [Rank 0] step:461/10000 train_time:42405ms step_avg:91.98ms +[2025-08-22 19:07:31] [Rank 0] step:461/10000 train_time:42405ms step_avg:91.98ms +[2025-08-22 19:07:32] [Rank 0] step:481/10000 train_time:44241ms step_avg:91.98ms +[2025-08-22 19:07:32] [Rank 0] step:481/10000 train_time:44241ms step_avg:91.98ms +[2025-08-22 19:07:34] [Rank 0] step:501/10000 train_time:46079ms step_avg:91.97ms +[2025-08-22 19:07:34] [Rank 0] step:501/10000 train_time:46079ms step_avg:91.97ms +[2025-08-22 19:07:36] [Rank 0] step:521/10000 train_time:47919ms step_avg:91.97ms +[2025-08-22 19:07:36] [Rank 0] step:521/10000 train_time:47919ms step_avg:91.97ms +[2025-08-22 19:07:38] [Rank 0] step:541/10000 train_time:49756ms step_avg:91.97ms +[2025-08-22 19:07:38] [Rank 0] step:541/10000 train_time:49756ms step_avg:91.97ms +[2025-08-22 19:07:40] [Rank 0] step:561/10000 train_time:51595ms step_avg:91.97ms +[2025-08-22 19:07:40] [Rank 0] step:561/10000 train_time:51595ms step_avg:91.97ms +[2025-08-22 19:07:42] [Rank 0] step:581/10000 train_time:53435ms step_avg:91.97ms +[2025-08-22 19:07:42] [Rank 0] step:581/10000 train_time:53435ms step_avg:91.97ms +[2025-08-22 19:07:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:07:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:07:57] [Rank 0] PRINT: step:600/10000 val_loss:4.7360 svd_entropy: attn_qk:H=0.9211,top10E=0.06,eRank=454.6,q75/q25=10.26 attn_vo:H=0.9187,top10E=0.06,eRank=447.6,q75/q25=10.77 mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.87 mlp_w2:H=0.9689,top10E=0.04,eRank=624.6,q75/q25=2.99 vo_prod:H=0.8454,top10E=0.13,eRank=275.9,q75/q25=61.94 train_time:55235ms step_avg:92.06ms +[2025-08-22 19:07:57] [Rank 0] PRINT: step:600/10000 val_loss:4.7360 svd_entropy: attn_qk:H=0.9211,top10E=0.06,eRank=454.6,q75/q25=10.26 attn_vo:H=0.9187,top10E=0.06,eRank=447.6,q75/q25=10.77 mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.87 mlp_w2:H=0.9689,top10E=0.04,eRank=624.6,q75/q25=2.99 vo_prod:H=0.8454,top10E=0.13,eRank=275.9,q75/q25=61.94 train_time:55235ms step_avg:92.06ms +[2025-08-22 19:07:57] [Rank 0] step:601/10000 train_time:55285ms step_avg:91.99ms +[2025-08-22 19:07:57] [Rank 0] step:601/10000 train_time:55285ms step_avg:91.99ms +[2025-08-22 19:07:59] [Rank 0] step:621/10000 train_time:57138ms step_avg:92.01ms +[2025-08-22 19:07:59] [Rank 0] step:621/10000 train_time:57138ms step_avg:92.01ms +[2025-08-22 19:08:01] [Rank 0] step:641/10000 train_time:58976ms step_avg:92.01ms +[2025-08-22 19:08:01] [Rank 0] step:641/10000 train_time:58976ms step_avg:92.01ms +[2025-08-22 19:08:03] [Rank 0] step:661/10000 train_time:60816ms step_avg:92.01ms +[2025-08-22 19:08:03] [Rank 0] step:661/10000 train_time:60816ms step_avg:92.01ms +[2025-08-22 19:08:05] [Rank 0] step:681/10000 train_time:62656ms step_avg:92.01ms +[2025-08-22 19:08:05] [Rank 0] step:681/10000 train_time:62656ms step_avg:92.01ms +[2025-08-22 19:08:06] [Rank 0] step:701/10000 train_time:64496ms step_avg:92.01ms +[2025-08-22 19:08:06] [Rank 0] step:701/10000 train_time:64496ms step_avg:92.01ms +[2025-08-22 19:08:08] [Rank 0] step:721/10000 train_time:66338ms step_avg:92.01ms +[2025-08-22 19:08:08] [Rank 0] step:721/10000 train_time:66338ms step_avg:92.01ms +[2025-08-22 19:08:10] [Rank 0] step:741/10000 train_time:68182ms step_avg:92.01ms +[2025-08-22 19:08:10] [Rank 0] step:741/10000 train_time:68182ms step_avg:92.01ms +[2025-08-22 19:08:12] [Rank 0] step:761/10000 train_time:70038ms step_avg:92.03ms +[2025-08-22 19:08:12] [Rank 0] step:761/10000 train_time:70038ms step_avg:92.03ms +[2025-08-22 19:08:14] [Rank 0] step:781/10000 train_time:71895ms step_avg:92.05ms +[2025-08-22 19:08:14] [Rank 0] step:781/10000 train_time:71895ms step_avg:92.05ms +[2025-08-22 19:08:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:08:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:08:29] [Rank 0] PRINT: step:800/10000 val_loss:4.5587 svd_entropy: attn_qk:H=0.9193,top10E=0.06,eRank=449.3,q75/q25=10.51 attn_vo:H=0.9176,top10E=0.06,eRank=444.6,q75/q25=10.95 mlp_w1:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.87 mlp_w2:H=0.9682,top10E=0.05,eRank=621.8,q75/q25=2.99 vo_prod:H=0.8441,top10E=0.13,eRank=273.8,q75/q25=64.00 train_time:73713ms step_avg:92.14ms +[2025-08-22 19:08:29] [Rank 0] PRINT: step:800/10000 val_loss:4.5587 svd_entropy: attn_qk:H=0.9193,top10E=0.06,eRank=449.3,q75/q25=10.51 attn_vo:H=0.9176,top10E=0.06,eRank=444.6,q75/q25=10.95 mlp_w1:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.87 mlp_w2:H=0.9682,top10E=0.05,eRank=621.8,q75/q25=2.99 vo_prod:H=0.8441,top10E=0.13,eRank=273.8,q75/q25=64.00 train_time:73713ms step_avg:92.14ms +[2025-08-22 19:08:29] [Rank 0] step:801/10000 train_time:73763ms step_avg:92.09ms +[2025-08-22 19:08:29] [Rank 0] step:801/10000 train_time:73763ms step_avg:92.09ms +[2025-08-22 19:08:31] [Rank 0] step:821/10000 train_time:75633ms step_avg:92.12ms +[2025-08-22 19:08:31] [Rank 0] step:821/10000 train_time:75633ms step_avg:92.12ms +[2025-08-22 19:08:33] [Rank 0] step:841/10000 train_time:77486ms step_avg:92.14ms +[2025-08-22 19:08:33] [Rank 0] step:841/10000 train_time:77486ms step_avg:92.14ms +[2025-08-22 19:08:35] [Rank 0] step:861/10000 train_time:79336ms step_avg:92.14ms +[2025-08-22 19:08:35] [Rank 0] step:861/10000 train_time:79336ms step_avg:92.14ms +[2025-08-22 19:08:37] [Rank 0] step:881/10000 train_time:81185ms step_avg:92.15ms +[2025-08-22 19:08:37] [Rank 0] step:881/10000 train_time:81185ms step_avg:92.15ms +[2025-08-22 19:08:39] [Rank 0] step:901/10000 train_time:83034ms step_avg:92.16ms +[2025-08-22 19:08:39] [Rank 0] step:901/10000 train_time:83034ms step_avg:92.16ms +[2025-08-22 19:08:41] [Rank 0] step:921/10000 train_time:84885ms step_avg:92.17ms +[2025-08-22 19:08:41] [Rank 0] step:921/10000 train_time:84885ms step_avg:92.17ms +[2025-08-22 19:08:42] [Rank 0] step:941/10000 train_time:86736ms step_avg:92.17ms +[2025-08-22 19:08:42] [Rank 0] step:941/10000 train_time:86736ms step_avg:92.17ms +[2025-08-22 19:08:44] [Rank 0] step:961/10000 train_time:88587ms step_avg:92.18ms +[2025-08-22 19:08:44] [Rank 0] step:961/10000 train_time:88587ms step_avg:92.18ms +[2025-08-22 19:08:46] [Rank 0] step:981/10000 train_time:90439ms step_avg:92.19ms +[2025-08-22 19:08:46] [Rank 0] step:981/10000 train_time:90439ms step_avg:92.19ms +[2025-08-22 19:08:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:08:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:09:02] [Rank 0] PRINT: step:1000/10000 val_loss:4.3974 svd_entropy: attn_qk:H=0.9181,top10E=0.06,eRank=445.8,q75/q25=10.65 attn_vo:H=0.9168,top10E=0.06,eRank=442.0,q75/q25=11.13 mlp_w1:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.86 mlp_w2:H=0.9678,top10E=0.05,eRank=620.4,q75/q25=2.99 vo_prod:H=0.8428,top10E=0.13,eRank=271.5,q75/q25=65.50 train_time:92248ms step_avg:92.25ms +[2025-08-22 19:09:02] [Rank 0] PRINT: step:1000/10000 val_loss:4.3974 svd_entropy: attn_qk:H=0.9181,top10E=0.06,eRank=445.8,q75/q25=10.65 attn_vo:H=0.9168,top10E=0.06,eRank=442.0,q75/q25=11.13 mlp_w1:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.86 mlp_w2:H=0.9678,top10E=0.05,eRank=620.4,q75/q25=2.99 vo_prod:H=0.8428,top10E=0.13,eRank=271.5,q75/q25=65.50 train_time:92248ms step_avg:92.25ms +[2025-08-22 19:09:02] [Rank 0] step:1001/10000 train_time:92299ms step_avg:92.21ms +[2025-08-22 19:09:02] [Rank 0] step:1001/10000 train_time:92299ms step_avg:92.21ms +[2025-08-22 19:09:04] [Rank 0] step:1021/10000 train_time:94166ms step_avg:92.23ms +[2025-08-22 19:09:04] [Rank 0] step:1021/10000 train_time:94166ms step_avg:92.23ms +[2025-08-22 19:09:05] [Rank 0] step:1041/10000 train_time:96012ms step_avg:92.23ms +[2025-08-22 19:09:05] [Rank 0] step:1041/10000 train_time:96012ms step_avg:92.23ms +[2025-08-22 19:09:07] [Rank 0] step:1061/10000 train_time:97861ms step_avg:92.23ms +[2025-08-22 19:09:07] [Rank 0] step:1061/10000 train_time:97861ms step_avg:92.23ms +[2025-08-22 19:09:09] [Rank 0] step:1081/10000 train_time:99708ms step_avg:92.24ms +[2025-08-22 19:09:09] [Rank 0] step:1081/10000 train_time:99708ms step_avg:92.24ms +[2025-08-22 19:09:11] [Rank 0] step:1101/10000 train_time:101557ms step_avg:92.24ms +[2025-08-22 19:09:11] [Rank 0] step:1101/10000 train_time:101557ms step_avg:92.24ms +[2025-08-22 19:09:13] [Rank 0] step:1121/10000 train_time:103409ms step_avg:92.25ms +[2025-08-22 19:09:13] [Rank 0] step:1121/10000 train_time:103409ms step_avg:92.25ms +[2025-08-22 19:09:15] [Rank 0] step:1141/10000 train_time:105259ms step_avg:92.25ms +[2025-08-22 19:09:15] [Rank 0] step:1141/10000 train_time:105259ms step_avg:92.25ms +[2025-08-22 19:09:17] [Rank 0] step:1161/10000 train_time:107111ms step_avg:92.26ms +[2025-08-22 19:09:17] [Rank 0] step:1161/10000 train_time:107111ms step_avg:92.26ms +[2025-08-22 19:09:18] [Rank 0] step:1181/10000 train_time:108964ms step_avg:92.26ms +[2025-08-22 19:09:18] [Rank 0] step:1181/10000 train_time:108964ms step_avg:92.26ms +[2025-08-22 19:09:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:09:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:09:34] [Rank 0] PRINT: step:1200/10000 val_loss:4.2900 svd_entropy: attn_qk:H=0.9173,top10E=0.06,eRank=443.4,q75/q25=10.78 attn_vo:H=0.9162,top10E=0.06,eRank=440.4,q75/q25=11.21 mlp_w1:H=0.9716,top10E=0.04,eRank=636.1,q75/q25=2.86 mlp_w2:H=0.9675,top10E=0.05,eRank=619.0,q75/q25=2.98 vo_prod:H=0.8422,top10E=0.13,eRank=270.6,q75/q25=65.42 train_time:110778ms step_avg:92.31ms +[2025-08-22 19:09:34] [Rank 0] PRINT: step:1200/10000 val_loss:4.2900 svd_entropy: attn_qk:H=0.9173,top10E=0.06,eRank=443.4,q75/q25=10.78 attn_vo:H=0.9162,top10E=0.06,eRank=440.4,q75/q25=11.21 mlp_w1:H=0.9716,top10E=0.04,eRank=636.1,q75/q25=2.86 mlp_w2:H=0.9675,top10E=0.05,eRank=619.0,q75/q25=2.98 vo_prod:H=0.8422,top10E=0.13,eRank=270.6,q75/q25=65.42 train_time:110778ms step_avg:92.31ms +[2025-08-22 19:09:34] [Rank 0] step:1201/10000 train_time:110828ms step_avg:92.28ms +[2025-08-22 19:09:34] [Rank 0] step:1201/10000 train_time:110828ms step_avg:92.28ms +[2025-08-22 19:09:36] [Rank 0] step:1221/10000 train_time:112703ms step_avg:92.30ms +[2025-08-22 19:09:36] [Rank 0] step:1221/10000 train_time:112703ms step_avg:92.30ms +[2025-08-22 19:09:38] [Rank 0] step:1241/10000 train_time:114553ms step_avg:92.31ms +[2025-08-22 19:09:38] [Rank 0] step:1241/10000 train_time:114553ms step_avg:92.31ms +[2025-08-22 19:09:40] [Rank 0] step:1261/10000 train_time:116406ms step_avg:92.31ms +[2025-08-22 19:09:40] [Rank 0] step:1261/10000 train_time:116406ms step_avg:92.31ms +[2025-08-22 19:09:41] [Rank 0] step:1281/10000 train_time:118262ms step_avg:92.32ms +[2025-08-22 19:09:41] [Rank 0] step:1281/10000 train_time:118262ms step_avg:92.32ms +[2025-08-22 19:09:43] [Rank 0] step:1301/10000 train_time:120119ms step_avg:92.33ms +[2025-08-22 19:09:43] [Rank 0] step:1301/10000 train_time:120119ms step_avg:92.33ms +[2025-08-22 19:09:45] [Rank 0] step:1321/10000 train_time:121976ms step_avg:92.34ms +[2025-08-22 19:09:45] [Rank 0] step:1321/10000 train_time:121976ms step_avg:92.34ms +[2025-08-22 19:09:47] [Rank 0] step:1341/10000 train_time:123836ms step_avg:92.35ms +[2025-08-22 19:09:47] [Rank 0] step:1341/10000 train_time:123836ms step_avg:92.35ms +[2025-08-22 19:09:49] [Rank 0] step:1361/10000 train_time:125695ms step_avg:92.35ms +[2025-08-22 19:09:49] [Rank 0] step:1361/10000 train_time:125695ms step_avg:92.35ms +[2025-08-22 19:09:51] [Rank 0] step:1381/10000 train_time:127553ms step_avg:92.36ms +[2025-08-22 19:09:51] [Rank 0] step:1381/10000 train_time:127553ms step_avg:92.36ms +[2025-08-22 19:09:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:09:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:10:06] [Rank 0] PRINT: step:1400/10000 val_loss:4.2290 svd_entropy: attn_qk:H=0.9166,top10E=0.07,eRank=441.4,q75/q25=10.83 attn_vo:H=0.9159,top10E=0.06,eRank=439.4,q75/q25=11.26 mlp_w1:H=0.9716,top10E=0.04,eRank=635.9,q75/q25=2.85 mlp_w2:H=0.9673,top10E=0.05,eRank=618.0,q75/q25=2.97 vo_prod:H=0.8420,top10E=0.13,eRank=270.4,q75/q25=66.19 train_time:129503ms step_avg:92.50ms +[2025-08-22 19:10:06] [Rank 0] PRINT: step:1400/10000 val_loss:4.2290 svd_entropy: attn_qk:H=0.9166,top10E=0.07,eRank=441.4,q75/q25=10.83 attn_vo:H=0.9159,top10E=0.06,eRank=439.4,q75/q25=11.26 mlp_w1:H=0.9716,top10E=0.04,eRank=635.9,q75/q25=2.85 mlp_w2:H=0.9673,top10E=0.05,eRank=618.0,q75/q25=2.97 vo_prod:H=0.8420,top10E=0.13,eRank=270.4,q75/q25=66.19 train_time:129503ms step_avg:92.50ms +[2025-08-22 19:10:06] [Rank 0] step:1401/10000 train_time:129553ms step_avg:92.47ms +[2025-08-22 19:10:06] [Rank 0] step:1401/10000 train_time:129553ms step_avg:92.47ms +[2025-08-22 19:10:08] [Rank 0] step:1421/10000 train_time:131427ms step_avg:92.49ms +[2025-08-22 19:10:08] [Rank 0] step:1421/10000 train_time:131427ms step_avg:92.49ms +[2025-08-22 19:10:10] [Rank 0] step:1441/10000 train_time:133274ms step_avg:92.49ms +[2025-08-22 19:10:10] [Rank 0] step:1441/10000 train_time:133274ms step_avg:92.49ms +[2025-08-22 19:10:12] [Rank 0] step:1461/10000 train_time:135122ms step_avg:92.49ms +[2025-08-22 19:10:12] [Rank 0] step:1461/10000 train_time:135122ms step_avg:92.49ms +[2025-08-22 19:10:14] [Rank 0] step:1481/10000 train_time:136972ms step_avg:92.49ms +[2025-08-22 19:10:14] [Rank 0] step:1481/10000 train_time:136972ms step_avg:92.49ms +[2025-08-22 19:10:16] [Rank 0] step:1501/10000 train_time:138833ms step_avg:92.49ms +[2025-08-22 19:10:16] [Rank 0] step:1501/10000 train_time:138833ms step_avg:92.49ms +[2025-08-22 19:10:18] [Rank 0] step:1521/10000 train_time:140693ms step_avg:92.50ms +[2025-08-22 19:10:18] [Rank 0] step:1521/10000 train_time:140693ms step_avg:92.50ms +[2025-08-22 19:10:19] [Rank 0] step:1541/10000 train_time:142555ms step_avg:92.51ms +[2025-08-22 19:10:19] [Rank 0] step:1541/10000 train_time:142555ms step_avg:92.51ms +[2025-08-22 19:10:21] [Rank 0] step:1561/10000 train_time:144418ms step_avg:92.52ms +[2025-08-22 19:10:21] [Rank 0] step:1561/10000 train_time:144418ms step_avg:92.52ms +[2025-08-22 19:10:23] [Rank 0] step:1581/10000 train_time:146279ms step_avg:92.52ms +[2025-08-22 19:10:23] [Rank 0] step:1581/10000 train_time:146279ms step_avg:92.52ms +[2025-08-22 19:10:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:10:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:10:39] [Rank 0] PRINT: step:1600/10000 val_loss:4.1379 svd_entropy: attn_qk:H=0.9160,top10E=0.07,eRank=439.7,q75/q25=10.87 attn_vo:H=0.9157,top10E=0.06,eRank=438.8,q75/q25=11.36 mlp_w1:H=0.9716,top10E=0.04,eRank=636.0,q75/q25=2.84 mlp_w2:H=0.9671,top10E=0.05,eRank=617.2,q75/q25=2.97 vo_prod:H=0.8419,top10E=0.13,eRank=270.3,q75/q25=66.37 train_time:148102ms step_avg:92.56ms +[2025-08-22 19:10:39] [Rank 0] PRINT: step:1600/10000 val_loss:4.1379 svd_entropy: attn_qk:H=0.9160,top10E=0.07,eRank=439.7,q75/q25=10.87 attn_vo:H=0.9157,top10E=0.06,eRank=438.8,q75/q25=11.36 mlp_w1:H=0.9716,top10E=0.04,eRank=636.0,q75/q25=2.84 mlp_w2:H=0.9671,top10E=0.05,eRank=617.2,q75/q25=2.97 vo_prod:H=0.8419,top10E=0.13,eRank=270.3,q75/q25=66.37 train_time:148102ms step_avg:92.56ms +[2025-08-22 19:10:39] [Rank 0] step:1601/10000 train_time:148153ms step_avg:92.54ms +[2025-08-22 19:10:39] [Rank 0] step:1601/10000 train_time:148153ms step_avg:92.54ms +[2025-08-22 19:10:41] [Rank 0] step:1621/10000 train_time:150031ms step_avg:92.55ms +[2025-08-22 19:10:41] [Rank 0] step:1621/10000 train_time:150031ms step_avg:92.55ms +[2025-08-22 19:10:43] [Rank 0] step:1641/10000 train_time:151889ms step_avg:92.56ms +[2025-08-22 19:10:43] [Rank 0] step:1641/10000 train_time:151889ms step_avg:92.56ms +[2025-08-22 19:10:44] [Rank 0] step:1661/10000 train_time:153749ms step_avg:92.56ms +[2025-08-22 19:10:44] [Rank 0] step:1661/10000 train_time:153749ms step_avg:92.56ms +[2025-08-22 19:10:46] [Rank 0] step:1681/10000 train_time:155608ms step_avg:92.57ms +[2025-08-22 19:10:46] [Rank 0] step:1681/10000 train_time:155608ms step_avg:92.57ms +[2025-08-22 19:10:48] [Rank 0] step:1701/10000 train_time:157468ms step_avg:92.57ms +[2025-08-22 19:10:48] [Rank 0] step:1701/10000 train_time:157468ms step_avg:92.57ms +[2025-08-22 19:10:50] [Rank 0] step:1721/10000 train_time:159328ms step_avg:92.58ms +[2025-08-22 19:10:50] [Rank 0] step:1721/10000 train_time:159328ms step_avg:92.58ms +[2025-08-22 19:10:52] [Rank 0] step:1741/10000 train_time:161190ms step_avg:92.58ms +[2025-08-22 19:10:52] [Rank 0] step:1741/10000 train_time:161190ms step_avg:92.58ms +[2025-08-22 19:10:54] [Rank 0] step:1761/10000 train_time:163052ms step_avg:92.59ms +[2025-08-22 19:10:54] [Rank 0] step:1761/10000 train_time:163052ms step_avg:92.59ms +[2025-08-22 19:10:56] [Rank 0] step:1781/10000 train_time:165019ms step_avg:92.66ms +[2025-08-22 19:10:56] [Rank 0] step:1781/10000 train_time:165019ms step_avg:92.66ms +[2025-08-22 19:10:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:10:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:11:11] [Rank 0] PRINT: step:1800/10000 val_loss:4.0796 svd_entropy: attn_qk:H=0.9154,top10E=0.07,eRank=438.0,q75/q25=10.92 attn_vo:H=0.9155,top10E=0.06,eRank=438.5,q75/q25=11.36 mlp_w1:H=0.9716,top10E=0.04,eRank=636.0,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.6,q75/q25=2.96 vo_prod:H=0.8419,top10E=0.13,eRank=270.4,q75/q25=66.87 train_time:166966ms step_avg:92.76ms +[2025-08-22 19:11:11] [Rank 0] PRINT: step:1800/10000 val_loss:4.0796 svd_entropy: attn_qk:H=0.9154,top10E=0.07,eRank=438.0,q75/q25=10.92 attn_vo:H=0.9155,top10E=0.06,eRank=438.5,q75/q25=11.36 mlp_w1:H=0.9716,top10E=0.04,eRank=636.0,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.6,q75/q25=2.96 vo_prod:H=0.8419,top10E=0.13,eRank=270.4,q75/q25=66.87 train_time:166966ms step_avg:92.76ms +[2025-08-22 19:11:11] [Rank 0] step:1801/10000 train_time:167017ms step_avg:92.74ms +[2025-08-22 19:11:11] [Rank 0] step:1801/10000 train_time:167017ms step_avg:92.74ms +[2025-08-22 19:11:13] [Rank 0] step:1821/10000 train_time:168878ms step_avg:92.74ms +[2025-08-22 19:11:13] [Rank 0] step:1821/10000 train_time:168878ms step_avg:92.74ms +[2025-08-22 19:11:15] [Rank 0] step:1841/10000 train_time:170735ms step_avg:92.74ms +[2025-08-22 19:11:15] [Rank 0] step:1841/10000 train_time:170735ms step_avg:92.74ms +[2025-08-22 19:11:17] [Rank 0] step:1861/10000 train_time:172594ms step_avg:92.74ms +[2025-08-22 19:11:17] [Rank 0] step:1861/10000 train_time:172594ms step_avg:92.74ms +[2025-08-22 19:11:19] [Rank 0] step:1881/10000 train_time:174456ms step_avg:92.75ms +[2025-08-22 19:11:19] [Rank 0] step:1881/10000 train_time:174456ms step_avg:92.75ms +[2025-08-22 19:11:21] [Rank 0] step:1901/10000 train_time:176319ms step_avg:92.75ms +[2025-08-22 19:11:21] [Rank 0] step:1901/10000 train_time:176319ms step_avg:92.75ms +[2025-08-22 19:11:23] [Rank 0] step:1921/10000 train_time:178182ms step_avg:92.75ms +[2025-08-22 19:11:23] [Rank 0] step:1921/10000 train_time:178182ms step_avg:92.75ms +[2025-08-22 19:11:25] [Rank 0] step:1941/10000 train_time:180046ms step_avg:92.76ms +[2025-08-22 19:11:25] [Rank 0] step:1941/10000 train_time:180046ms step_avg:92.76ms +[2025-08-22 19:11:26] [Rank 0] step:1961/10000 train_time:181910ms step_avg:92.76ms +[2025-08-22 19:11:26] [Rank 0] step:1961/10000 train_time:181910ms step_avg:92.76ms +[2025-08-22 19:11:28] [Rank 0] step:1981/10000 train_time:183776ms step_avg:92.77ms +[2025-08-22 19:11:28] [Rank 0] step:1981/10000 train_time:183776ms step_avg:92.77ms +[2025-08-22 19:11:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:11:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:11:44] [Rank 0] PRINT: step:2000/10000 val_loss:4.0484 svd_entropy: attn_qk:H=0.9150,top10E=0.07,eRank=436.7,q75/q25=10.97 attn_vo:H=0.9155,top10E=0.06,eRank=438.3,q75/q25=11.39 mlp_w1:H=0.9716,top10E=0.04,eRank=636.1,q75/q25=2.84 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=2.96 vo_prod:H=0.8421,top10E=0.13,eRank=270.9,q75/q25=67.23 train_time:185600ms step_avg:92.80ms +[2025-08-22 19:11:44] [Rank 0] PRINT: step:2000/10000 val_loss:4.0484 svd_entropy: attn_qk:H=0.9150,top10E=0.07,eRank=436.7,q75/q25=10.97 attn_vo:H=0.9155,top10E=0.06,eRank=438.3,q75/q25=11.39 mlp_w1:H=0.9716,top10E=0.04,eRank=636.1,q75/q25=2.84 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=2.96 vo_prod:H=0.8421,top10E=0.13,eRank=270.9,q75/q25=67.23 train_time:185600ms step_avg:92.80ms +[2025-08-22 19:11:44] [Rank 0] step:2001/10000 train_time:185650ms step_avg:92.78ms +[2025-08-22 19:11:44] [Rank 0] step:2001/10000 train_time:185650ms step_avg:92.78ms +[2025-08-22 19:11:46] [Rank 0] step:2021/10000 train_time:187532ms step_avg:92.79ms +[2025-08-22 19:11:46] [Rank 0] step:2021/10000 train_time:187532ms step_avg:92.79ms +[2025-08-22 19:11:48] [Rank 0] step:2041/10000 train_time:189686ms step_avg:92.94ms +[2025-08-22 19:11:48] [Rank 0] step:2041/10000 train_time:189686ms step_avg:92.94ms +[2025-08-22 19:11:50] [Rank 0] step:2061/10000 train_time:191550ms step_avg:92.94ms +[2025-08-22 19:11:50] [Rank 0] step:2061/10000 train_time:191550ms step_avg:92.94ms +[2025-08-22 19:11:52] [Rank 0] step:2081/10000 train_time:193417ms step_avg:92.94ms +[2025-08-22 19:11:52] [Rank 0] step:2081/10000 train_time:193417ms step_avg:92.94ms +[2025-08-22 19:11:54] [Rank 0] step:2101/10000 train_time:195282ms step_avg:92.95ms +[2025-08-22 19:11:54] [Rank 0] step:2101/10000 train_time:195282ms step_avg:92.95ms +[2025-08-22 19:11:55] [Rank 0] step:2121/10000 train_time:197150ms step_avg:92.95ms +[2025-08-22 19:11:55] [Rank 0] step:2121/10000 train_time:197150ms step_avg:92.95ms +[2025-08-22 19:11:57] [Rank 0] step:2141/10000 train_time:199019ms step_avg:92.96ms +[2025-08-22 19:11:57] [Rank 0] step:2141/10000 train_time:199019ms step_avg:92.96ms +[2025-08-22 19:11:59] [Rank 0] step:2161/10000 train_time:201012ms step_avg:93.02ms +[2025-08-22 19:11:59] [Rank 0] step:2161/10000 train_time:201012ms step_avg:93.02ms +[2025-08-22 19:12:01] [Rank 0] step:2181/10000 train_time:202994ms step_avg:93.07ms +[2025-08-22 19:12:01] [Rank 0] step:2181/10000 train_time:202994ms step_avg:93.07ms +[2025-08-22 19:12:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:12:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:12:17] [Rank 0] PRINT: step:2200/10000 val_loss:4.0060 svd_entropy: attn_qk:H=0.9145,top10E=0.07,eRank=435.5,q75/q25=11.00 attn_vo:H=0.9155,top10E=0.06,eRank=438.3,q75/q25=11.40 mlp_w1:H=0.9717,top10E=0.04,eRank=636.3,q75/q25=2.82 mlp_w2:H=0.9668,top10E=0.05,eRank=616.0,q75/q25=2.95 vo_prod:H=0.8422,top10E=0.13,eRank=271.1,q75/q25=68.00 train_time:204821ms step_avg:93.10ms +[2025-08-22 19:12:17] [Rank 0] PRINT: step:2200/10000 val_loss:4.0060 svd_entropy: attn_qk:H=0.9145,top10E=0.07,eRank=435.5,q75/q25=11.00 attn_vo:H=0.9155,top10E=0.06,eRank=438.3,q75/q25=11.40 mlp_w1:H=0.9717,top10E=0.04,eRank=636.3,q75/q25=2.82 mlp_w2:H=0.9668,top10E=0.05,eRank=616.0,q75/q25=2.95 vo_prod:H=0.8422,top10E=0.13,eRank=271.1,q75/q25=68.00 train_time:204821ms step_avg:93.10ms +[2025-08-22 19:12:17] [Rank 0] step:2201/10000 train_time:204872ms step_avg:93.08ms +[2025-08-22 19:12:17] [Rank 0] step:2201/10000 train_time:204872ms step_avg:93.08ms +[2025-08-22 19:12:19] [Rank 0] step:2221/10000 train_time:206741ms step_avg:93.08ms +[2025-08-22 19:12:19] [Rank 0] step:2221/10000 train_time:206741ms step_avg:93.08ms +[2025-08-22 19:12:21] [Rank 0] step:2241/10000 train_time:208638ms step_avg:93.10ms +[2025-08-22 19:12:21] [Rank 0] step:2241/10000 train_time:208638ms step_avg:93.10ms +[2025-08-22 19:12:22] [Rank 0] step:2261/10000 train_time:210542ms step_avg:93.12ms +[2025-08-22 19:12:22] [Rank 0] step:2261/10000 train_time:210542ms step_avg:93.12ms +[2025-08-22 19:12:24] [Rank 0] step:2281/10000 train_time:212446ms step_avg:93.14ms +[2025-08-22 19:12:24] [Rank 0] step:2281/10000 train_time:212446ms step_avg:93.14ms +[2025-08-22 19:12:26] [Rank 0] step:2301/10000 train_time:214351ms step_avg:93.16ms +[2025-08-22 19:12:26] [Rank 0] step:2301/10000 train_time:214351ms step_avg:93.16ms +[2025-08-22 19:12:28] [Rank 0] step:2321/10000 train_time:216256ms step_avg:93.17ms +[2025-08-22 19:12:28] [Rank 0] step:2321/10000 train_time:216256ms step_avg:93.17ms +[2025-08-22 19:12:30] [Rank 0] step:2341/10000 train_time:218162ms step_avg:93.19ms +[2025-08-22 19:12:30] [Rank 0] step:2341/10000 train_time:218162ms step_avg:93.19ms +[2025-08-22 19:12:32] [Rank 0] step:2361/10000 train_time:220069ms step_avg:93.21ms +[2025-08-22 19:12:32] [Rank 0] step:2361/10000 train_time:220069ms step_avg:93.21ms +[2025-08-22 19:12:34] [Rank 0] step:2381/10000 train_time:221977ms step_avg:93.23ms +[2025-08-22 19:12:34] [Rank 0] step:2381/10000 train_time:221977ms step_avg:93.23ms +[2025-08-22 19:12:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:12:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:12:50] [Rank 0] PRINT: step:2400/10000 val_loss:3.9351 svd_entropy: attn_qk:H=0.9140,top10E=0.07,eRank=434.0,q75/q25=11.08 attn_vo:H=0.9155,top10E=0.06,eRank=438.4,q75/q25=11.43 mlp_w1:H=0.9717,top10E=0.04,eRank=636.5,q75/q25=2.82 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.95 vo_prod:H=0.8423,top10E=0.13,eRank=271.5,q75/q25=67.31 train_time:223845ms step_avg:93.27ms +[2025-08-22 19:12:50] [Rank 0] PRINT: step:2400/10000 val_loss:3.9351 svd_entropy: attn_qk:H=0.9140,top10E=0.07,eRank=434.0,q75/q25=11.08 attn_vo:H=0.9155,top10E=0.06,eRank=438.4,q75/q25=11.43 mlp_w1:H=0.9717,top10E=0.04,eRank=636.5,q75/q25=2.82 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=2.95 vo_prod:H=0.8423,top10E=0.13,eRank=271.5,q75/q25=67.31 train_time:223845ms step_avg:93.27ms +[2025-08-22 19:12:50] [Rank 0] step:2401/10000 train_time:223896ms step_avg:93.25ms +[2025-08-22 19:12:50] [Rank 0] step:2401/10000 train_time:223896ms step_avg:93.25ms +[2025-08-22 19:12:52] [Rank 0] step:2421/10000 train_time:225825ms step_avg:93.28ms +[2025-08-22 19:12:52] [Rank 0] step:2421/10000 train_time:225825ms step_avg:93.28ms +[2025-08-22 19:12:53] [Rank 0] step:2441/10000 train_time:227726ms step_avg:93.29ms +[2025-08-22 19:12:53] [Rank 0] step:2441/10000 train_time:227726ms step_avg:93.29ms +[2025-08-22 19:12:55] [Rank 0] step:2461/10000 train_time:229628ms step_avg:93.31ms +[2025-08-22 19:12:55] [Rank 0] step:2461/10000 train_time:229628ms step_avg:93.31ms +[2025-08-22 19:12:57] [Rank 0] step:2481/10000 train_time:231529ms step_avg:93.32ms +[2025-08-22 19:12:57] [Rank 0] step:2481/10000 train_time:231529ms step_avg:93.32ms +[2025-08-22 19:12:59] [Rank 0] step:2501/10000 train_time:233434ms step_avg:93.34ms +[2025-08-22 19:12:59] [Rank 0] step:2501/10000 train_time:233434ms step_avg:93.34ms +[2025-08-22 19:13:01] [Rank 0] step:2521/10000 train_time:235340ms step_avg:93.35ms +[2025-08-22 19:13:01] [Rank 0] step:2521/10000 train_time:235340ms step_avg:93.35ms +[2025-08-22 19:13:03] [Rank 0] step:2541/10000 train_time:237339ms step_avg:93.40ms +[2025-08-22 19:13:03] [Rank 0] step:2541/10000 train_time:237339ms step_avg:93.40ms +[2025-08-22 19:13:05] [Rank 0] step:2561/10000 train_time:239408ms step_avg:93.48ms +[2025-08-22 19:13:05] [Rank 0] step:2561/10000 train_time:239408ms step_avg:93.48ms +[2025-08-22 19:13:07] [Rank 0] step:2581/10000 train_time:241314ms step_avg:93.50ms +[2025-08-22 19:13:07] [Rank 0] step:2581/10000 train_time:241314ms step_avg:93.50ms +[2025-08-22 19:13:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:13:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:13:22] [Rank 0] PRINT: step:2600/10000 val_loss:3.9105 svd_entropy: attn_qk:H=0.9137,top10E=0.07,eRank=433.1,q75/q25=11.09 attn_vo:H=0.9156,top10E=0.06,eRank=438.7,q75/q25=11.41 mlp_w1:H=0.9717,top10E=0.04,eRank=636.6,q75/q25=2.81 mlp_w2:H=0.9666,top10E=0.05,eRank=615.5,q75/q25=2.94 vo_prod:H=0.8427,top10E=0.13,eRank=272.4,q75/q25=66.83 train_time:243178ms step_avg:93.53ms +[2025-08-22 19:13:22] [Rank 0] PRINT: step:2600/10000 val_loss:3.9105 svd_entropy: attn_qk:H=0.9137,top10E=0.07,eRank=433.1,q75/q25=11.09 attn_vo:H=0.9156,top10E=0.06,eRank=438.7,q75/q25=11.41 mlp_w1:H=0.9717,top10E=0.04,eRank=636.6,q75/q25=2.81 mlp_w2:H=0.9666,top10E=0.05,eRank=615.5,q75/q25=2.94 vo_prod:H=0.8427,top10E=0.13,eRank=272.4,q75/q25=66.83 train_time:243178ms step_avg:93.53ms +[2025-08-22 19:13:23] [Rank 0] step:2601/10000 train_time:243229ms step_avg:93.51ms +[2025-08-22 19:13:23] [Rank 0] step:2601/10000 train_time:243229ms step_avg:93.51ms +[2025-08-22 19:13:24] [Rank 0] step:2621/10000 train_time:245156ms step_avg:93.54ms +[2025-08-22 19:13:24] [Rank 0] step:2621/10000 train_time:245156ms step_avg:93.54ms +[2025-08-22 19:13:26] [Rank 0] step:2641/10000 train_time:247057ms step_avg:93.55ms +[2025-08-22 19:13:26] [Rank 0] step:2641/10000 train_time:247057ms step_avg:93.55ms +[2025-08-22 19:13:28] [Rank 0] step:2661/10000 train_time:248960ms step_avg:93.56ms +[2025-08-22 19:13:28] [Rank 0] step:2661/10000 train_time:248960ms step_avg:93.56ms +[2025-08-22 19:13:30] [Rank 0] step:2681/10000 train_time:250863ms step_avg:93.57ms +[2025-08-22 19:13:30] [Rank 0] step:2681/10000 train_time:250863ms step_avg:93.57ms +[2025-08-22 19:13:32] [Rank 0] step:2701/10000 train_time:252767ms step_avg:93.58ms +[2025-08-22 19:13:32] [Rank 0] step:2701/10000 train_time:252767ms step_avg:93.58ms +[2025-08-22 19:13:34] [Rank 0] step:2721/10000 train_time:254674ms step_avg:93.60ms +[2025-08-22 19:13:34] [Rank 0] step:2721/10000 train_time:254674ms step_avg:93.60ms +[2025-08-22 19:13:36] [Rank 0] step:2741/10000 train_time:256579ms step_avg:93.61ms +[2025-08-22 19:13:36] [Rank 0] step:2741/10000 train_time:256579ms step_avg:93.61ms +[2025-08-22 19:13:38] [Rank 0] step:2761/10000 train_time:258488ms step_avg:93.62ms +[2025-08-22 19:13:38] [Rank 0] step:2761/10000 train_time:258488ms step_avg:93.62ms +[2025-08-22 19:13:40] [Rank 0] step:2781/10000 train_time:260397ms step_avg:93.63ms +[2025-08-22 19:13:40] [Rank 0] step:2781/10000 train_time:260397ms step_avg:93.63ms +[2025-08-22 19:13:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:13:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:13:55] [Rank 0] PRINT: step:2800/10000 val_loss:3.8912 svd_entropy: attn_qk:H=0.9133,top10E=0.07,eRank=432.1,q75/q25=11.16 attn_vo:H=0.9157,top10E=0.06,eRank=438.9,q75/q25=11.45 mlp_w1:H=0.9718,top10E=0.04,eRank=636.7,q75/q25=2.81 mlp_w2:H=0.9666,top10E=0.05,eRank=615.4,q75/q25=2.94 vo_prod:H=0.8431,top10E=0.13,eRank=273.1,q75/q25=67.04 train_time:262264ms step_avg:93.67ms +[2025-08-22 19:13:55] [Rank 0] PRINT: step:2800/10000 val_loss:3.8912 svd_entropy: attn_qk:H=0.9133,top10E=0.07,eRank=432.1,q75/q25=11.16 attn_vo:H=0.9157,top10E=0.06,eRank=438.9,q75/q25=11.45 mlp_w1:H=0.9718,top10E=0.04,eRank=636.7,q75/q25=2.81 mlp_w2:H=0.9666,top10E=0.05,eRank=615.4,q75/q25=2.94 vo_prod:H=0.8431,top10E=0.13,eRank=273.1,q75/q25=67.04 train_time:262264ms step_avg:93.67ms +[2025-08-22 19:13:55] [Rank 0] step:2801/10000 train_time:262317ms step_avg:93.65ms +[2025-08-22 19:13:55] [Rank 0] step:2801/10000 train_time:262317ms step_avg:93.65ms +[2025-08-22 19:13:57] [Rank 0] step:2821/10000 train_time:264228ms step_avg:93.66ms +[2025-08-22 19:13:57] [Rank 0] step:2821/10000 train_time:264228ms step_avg:93.66ms +[2025-08-22 19:13:59] [Rank 0] step:2841/10000 train_time:266134ms step_avg:93.68ms +[2025-08-22 19:13:59] [Rank 0] step:2841/10000 train_time:266134ms step_avg:93.68ms +[2025-08-22 19:14:01] [Rank 0] step:2861/10000 train_time:268043ms step_avg:93.69ms +[2025-08-22 19:14:01] [Rank 0] step:2861/10000 train_time:268043ms step_avg:93.69ms +[2025-08-22 19:14:03] [Rank 0] step:2881/10000 train_time:269951ms step_avg:93.70ms +[2025-08-22 19:14:03] [Rank 0] step:2881/10000 train_time:269951ms step_avg:93.70ms +[2025-08-22 19:14:05] [Rank 0] step:2901/10000 train_time:272046ms step_avg:93.78ms +[2025-08-22 19:14:05] [Rank 0] step:2901/10000 train_time:272046ms step_avg:93.78ms +[2025-08-22 19:14:07] [Rank 0] step:2921/10000 train_time:273989ms step_avg:93.80ms +[2025-08-22 19:14:07] [Rank 0] step:2921/10000 train_time:273989ms step_avg:93.80ms +[2025-08-22 19:14:09] [Rank 0] step:2941/10000 train_time:275957ms step_avg:93.83ms +[2025-08-22 19:14:09] [Rank 0] step:2941/10000 train_time:275957ms step_avg:93.83ms +[2025-08-22 19:14:11] [Rank 0] step:2961/10000 train_time:277869ms step_avg:93.84ms +[2025-08-22 19:14:11] [Rank 0] step:2961/10000 train_time:277869ms step_avg:93.84ms +[2025-08-22 19:14:13] [Rank 0] step:2981/10000 train_time:279792ms step_avg:93.86ms +[2025-08-22 19:14:13] [Rank 0] step:2981/10000 train_time:279792ms step_avg:93.86ms +[2025-08-22 19:14:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:14:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:14:28] [Rank 0] PRINT: step:3000/10000 val_loss:3.8598 svd_entropy: attn_qk:H=0.9130,top10E=0.07,eRank=431.1,q75/q25=11.19 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.43 mlp_w1:H=0.9718,top10E=0.04,eRank=636.8,q75/q25=2.80 mlp_w2:H=0.9666,top10E=0.05,eRank=615.2,q75/q25=2.93 vo_prod:H=0.8433,top10E=0.13,eRank=273.5,q75/q25=66.71 train_time:281670ms step_avg:93.89ms +[2025-08-22 19:14:28] [Rank 0] PRINT: step:3000/10000 val_loss:3.8598 svd_entropy: attn_qk:H=0.9130,top10E=0.07,eRank=431.1,q75/q25=11.19 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.43 mlp_w1:H=0.9718,top10E=0.04,eRank=636.8,q75/q25=2.80 mlp_w2:H=0.9666,top10E=0.05,eRank=615.2,q75/q25=2.93 vo_prod:H=0.8433,top10E=0.13,eRank=273.5,q75/q25=66.71 train_time:281670ms step_avg:93.89ms +[2025-08-22 19:14:29] [Rank 0] step:3001/10000 train_time:281722ms step_avg:93.88ms +[2025-08-22 19:14:29] [Rank 0] step:3001/10000 train_time:281722ms step_avg:93.88ms +[2025-08-22 19:14:30] [Rank 0] step:3021/10000 train_time:283655ms step_avg:93.89ms +[2025-08-22 19:14:30] [Rank 0] step:3021/10000 train_time:283655ms step_avg:93.89ms +[2025-08-22 19:14:32] [Rank 0] step:3041/10000 train_time:285566ms step_avg:93.91ms +[2025-08-22 19:14:32] [Rank 0] step:3041/10000 train_time:285566ms step_avg:93.91ms +[2025-08-22 19:14:34] [Rank 0] step:3061/10000 train_time:287478ms step_avg:93.92ms +[2025-08-22 19:14:34] [Rank 0] step:3061/10000 train_time:287478ms step_avg:93.92ms +[2025-08-22 19:14:36] [Rank 0] step:3081/10000 train_time:289390ms step_avg:93.93ms +[2025-08-22 19:14:36] [Rank 0] step:3081/10000 train_time:289390ms step_avg:93.93ms +[2025-08-22 19:14:38] [Rank 0] step:3101/10000 train_time:291305ms step_avg:93.94ms +[2025-08-22 19:14:38] [Rank 0] step:3101/10000 train_time:291305ms step_avg:93.94ms +[2025-08-22 19:14:40] [Rank 0] step:3121/10000 train_time:293218ms step_avg:93.95ms +[2025-08-22 19:14:40] [Rank 0] step:3121/10000 train_time:293218ms step_avg:93.95ms +[2025-08-22 19:14:42] [Rank 0] step:3141/10000 train_time:295133ms step_avg:93.96ms +[2025-08-22 19:14:42] [Rank 0] step:3141/10000 train_time:295133ms step_avg:93.96ms +[2025-08-22 19:14:44] [Rank 0] step:3161/10000 train_time:297048ms step_avg:93.97ms +[2025-08-22 19:14:44] [Rank 0] step:3161/10000 train_time:297048ms step_avg:93.97ms +[2025-08-22 19:14:46] [Rank 0] step:3181/10000 train_time:298964ms step_avg:93.98ms +[2025-08-22 19:14:46] [Rank 0] step:3181/10000 train_time:298964ms step_avg:93.98ms +[2025-08-22 19:14:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:14:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:15:01] [Rank 0] PRINT: step:3200/10000 val_loss:3.8312 svd_entropy: attn_qk:H=0.9127,top10E=0.07,eRank=430.2,q75/q25=11.16 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.47 mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.80 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=2.93 vo_prod:H=0.8434,top10E=0.13,eRank=273.7,q75/q25=66.79 train_time:300840ms step_avg:94.01ms +[2025-08-22 19:15:01] [Rank 0] PRINT: step:3200/10000 val_loss:3.8312 svd_entropy: attn_qk:H=0.9127,top10E=0.07,eRank=430.2,q75/q25=11.16 attn_vo:H=0.9157,top10E=0.06,eRank=439.1,q75/q25=11.47 mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.80 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=2.93 vo_prod:H=0.8434,top10E=0.13,eRank=273.7,q75/q25=66.79 train_time:300840ms step_avg:94.01ms +[2025-08-22 19:15:01] [Rank 0] step:3201/10000 train_time:300891ms step_avg:94.00ms +[2025-08-22 19:15:01] [Rank 0] step:3201/10000 train_time:300891ms step_avg:94.00ms +[2025-08-22 19:15:03] [Rank 0] step:3221/10000 train_time:302830ms step_avg:94.02ms +[2025-08-22 19:15:03] [Rank 0] step:3221/10000 train_time:302830ms step_avg:94.02ms +[2025-08-22 19:15:05] [Rank 0] step:3241/10000 train_time:304739ms step_avg:94.03ms +[2025-08-22 19:15:05] [Rank 0] step:3241/10000 train_time:304739ms step_avg:94.03ms +[2025-08-22 19:15:07] [Rank 0] step:3261/10000 train_time:306648ms step_avg:94.04ms +[2025-08-22 19:15:07] [Rank 0] step:3261/10000 train_time:306648ms step_avg:94.04ms +[2025-08-22 19:15:09] [Rank 0] step:3281/10000 train_time:308642ms step_avg:94.07ms +[2025-08-22 19:15:09] [Rank 0] step:3281/10000 train_time:308642ms step_avg:94.07ms +[2025-08-22 19:15:11] [Rank 0] step:3301/10000 train_time:310667ms step_avg:94.11ms +[2025-08-22 19:15:11] [Rank 0] step:3301/10000 train_time:310667ms step_avg:94.11ms +[2025-08-22 19:15:13] [Rank 0] step:3321/10000 train_time:312581ms step_avg:94.12ms +[2025-08-22 19:15:13] [Rank 0] step:3321/10000 train_time:312581ms step_avg:94.12ms +[2025-08-22 19:15:15] [Rank 0] step:3341/10000 train_time:314495ms step_avg:94.13ms +[2025-08-22 19:15:15] [Rank 0] step:3341/10000 train_time:314495ms step_avg:94.13ms +[2025-08-22 19:15:17] [Rank 0] step:3361/10000 train_time:316409ms step_avg:94.14ms +[2025-08-22 19:15:17] [Rank 0] step:3361/10000 train_time:316409ms step_avg:94.14ms +[2025-08-22 19:15:19] [Rank 0] step:3381/10000 train_time:318326ms step_avg:94.15ms +[2025-08-22 19:15:19] [Rank 0] step:3381/10000 train_time:318326ms step_avg:94.15ms +[2025-08-22 19:15:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:15:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:15:35] [Rank 0] PRINT: step:3400/10000 val_loss:3.8096 svd_entropy: attn_qk:H=0.9124,top10E=0.07,eRank=429.5,q75/q25=11.24 attn_vo:H=0.9158,top10E=0.06,eRank=439.4,q75/q25=11.46 mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.80 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=2.93 vo_prod:H=0.8436,top10E=0.13,eRank=274.2,q75/q25=65.68 train_time:320199ms step_avg:94.18ms +[2025-08-22 19:15:35] [Rank 0] PRINT: step:3400/10000 val_loss:3.8096 svd_entropy: attn_qk:H=0.9124,top10E=0.07,eRank=429.5,q75/q25=11.24 attn_vo:H=0.9158,top10E=0.06,eRank=439.4,q75/q25=11.46 mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.80 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=2.93 vo_prod:H=0.8436,top10E=0.13,eRank=274.2,q75/q25=65.68 train_time:320199ms step_avg:94.18ms +[2025-08-22 19:15:35] [Rank 0] step:3401/10000 train_time:320251ms step_avg:94.16ms +[2025-08-22 19:15:35] [Rank 0] step:3401/10000 train_time:320251ms step_avg:94.16ms +[2025-08-22 19:15:37] [Rank 0] step:3421/10000 train_time:322173ms step_avg:94.18ms +[2025-08-22 19:15:37] [Rank 0] step:3421/10000 train_time:322173ms step_avg:94.18ms +[2025-08-22 19:15:38] [Rank 0] step:3441/10000 train_time:324082ms step_avg:94.18ms +[2025-08-22 19:15:38] [Rank 0] step:3441/10000 train_time:324082ms step_avg:94.18ms +[2025-08-22 19:15:40] [Rank 0] step:3461/10000 train_time:325991ms step_avg:94.19ms +[2025-08-22 19:15:40] [Rank 0] step:3461/10000 train_time:325991ms step_avg:94.19ms +[2025-08-22 19:15:42] [Rank 0] step:3481/10000 train_time:327905ms step_avg:94.20ms +[2025-08-22 19:15:42] [Rank 0] step:3481/10000 train_time:327905ms step_avg:94.20ms +[2025-08-22 19:15:44] [Rank 0] step:3501/10000 train_time:329820ms step_avg:94.21ms +[2025-08-22 19:15:44] [Rank 0] step:3501/10000 train_time:329820ms step_avg:94.21ms +[2025-08-22 19:15:46] [Rank 0] step:3521/10000 train_time:331736ms step_avg:94.22ms +[2025-08-22 19:15:46] [Rank 0] step:3521/10000 train_time:331736ms step_avg:94.22ms +[2025-08-22 19:15:48] [Rank 0] step:3541/10000 train_time:333651ms step_avg:94.23ms +[2025-08-22 19:15:48] [Rank 0] step:3541/10000 train_time:333651ms step_avg:94.23ms +[2025-08-22 19:15:50] [Rank 0] step:3561/10000 train_time:335566ms step_avg:94.23ms +[2025-08-22 19:15:50] [Rank 0] step:3561/10000 train_time:335566ms step_avg:94.23ms +[2025-08-22 19:15:52] [Rank 0] step:3581/10000 train_time:337482ms step_avg:94.24ms +[2025-08-22 19:15:52] [Rank 0] step:3581/10000 train_time:337482ms step_avg:94.24ms +[2025-08-22 19:15:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:15:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:16:07] [Rank 0] PRINT: step:3600/10000 val_loss:3.8032 svd_entropy: attn_qk:H=0.9121,top10E=0.07,eRank=428.7,q75/q25=11.25 attn_vo:H=0.9159,top10E=0.06,eRank=439.6,q75/q25=11.49 mlp_w1:H=0.9719,top10E=0.04,eRank=637.1,q75/q25=2.80 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.92 vo_prod:H=0.8438,top10E=0.13,eRank=274.6,q75/q25=66.52 train_time:339358ms step_avg:94.27ms +[2025-08-22 19:16:07] [Rank 0] PRINT: step:3600/10000 val_loss:3.8032 svd_entropy: attn_qk:H=0.9121,top10E=0.07,eRank=428.7,q75/q25=11.25 attn_vo:H=0.9159,top10E=0.06,eRank=439.6,q75/q25=11.49 mlp_w1:H=0.9719,top10E=0.04,eRank=637.1,q75/q25=2.80 mlp_w2:H=0.9665,top10E=0.05,eRank=615.0,q75/q25=2.92 vo_prod:H=0.8438,top10E=0.13,eRank=274.6,q75/q25=66.52 train_time:339358ms step_avg:94.27ms +[2025-08-22 19:16:08] [Rank 0] step:3601/10000 train_time:339410ms step_avg:94.25ms +[2025-08-22 19:16:08] [Rank 0] step:3601/10000 train_time:339410ms step_avg:94.25ms +[2025-08-22 19:16:10] [Rank 0] step:3621/10000 train_time:341324ms step_avg:94.26ms +[2025-08-22 19:16:10] [Rank 0] step:3621/10000 train_time:341324ms step_avg:94.26ms +[2025-08-22 19:16:12] [Rank 0] step:3641/10000 train_time:343377ms step_avg:94.31ms +[2025-08-22 19:16:12] [Rank 0] step:3641/10000 train_time:343377ms step_avg:94.31ms +[2025-08-22 19:16:14] [Rank 0] step:3661/10000 train_time:345394ms step_avg:94.34ms +[2025-08-22 19:16:14] [Rank 0] step:3661/10000 train_time:345394ms step_avg:94.34ms +[2025-08-22 19:16:16] [Rank 0] step:3681/10000 train_time:347311ms step_avg:94.35ms +[2025-08-22 19:16:16] [Rank 0] step:3681/10000 train_time:347311ms step_avg:94.35ms +[2025-08-22 19:16:17] [Rank 0] step:3701/10000 train_time:349229ms step_avg:94.36ms +[2025-08-22 19:16:17] [Rank 0] step:3701/10000 train_time:349229ms step_avg:94.36ms +[2025-08-22 19:16:19] [Rank 0] step:3721/10000 train_time:351177ms step_avg:94.38ms +[2025-08-22 19:16:19] [Rank 0] step:3721/10000 train_time:351177ms step_avg:94.38ms +[2025-08-22 19:16:21] [Rank 0] step:3741/10000 train_time:353133ms step_avg:94.40ms +[2025-08-22 19:16:21] [Rank 0] step:3741/10000 train_time:353133ms step_avg:94.40ms +[2025-08-22 19:16:23] [Rank 0] step:3761/10000 train_time:355088ms step_avg:94.41ms +[2025-08-22 19:16:23] [Rank 0] step:3761/10000 train_time:355088ms step_avg:94.41ms +[2025-08-22 19:16:25] [Rank 0] step:3781/10000 train_time:357044ms step_avg:94.43ms +[2025-08-22 19:16:25] [Rank 0] step:3781/10000 train_time:357044ms step_avg:94.43ms +[2025-08-22 19:16:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:16:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:16:41] [Rank 0] PRINT: step:3800/10000 val_loss:3.7742 svd_entropy: attn_qk:H=0.9118,top10E=0.07,eRank=427.9,q75/q25=11.31 attn_vo:H=0.9159,top10E=0.06,eRank=439.7,q75/q25=11.51 mlp_w1:H=0.9719,top10E=0.04,eRank=637.1,q75/q25=2.79 mlp_w2:H=0.9665,top10E=0.05,eRank=614.8,q75/q25=2.92 vo_prod:H=0.8440,top10E=0.13,eRank=275.0,q75/q25=65.97 train_time:358960ms step_avg:94.46ms +[2025-08-22 19:16:41] [Rank 0] PRINT: step:3800/10000 val_loss:3.7742 svd_entropy: attn_qk:H=0.9118,top10E=0.07,eRank=427.9,q75/q25=11.31 attn_vo:H=0.9159,top10E=0.06,eRank=439.7,q75/q25=11.51 mlp_w1:H=0.9719,top10E=0.04,eRank=637.1,q75/q25=2.79 mlp_w2:H=0.9665,top10E=0.05,eRank=614.8,q75/q25=2.92 vo_prod:H=0.8440,top10E=0.13,eRank=275.0,q75/q25=65.97 train_time:358960ms step_avg:94.46ms +[2025-08-22 19:16:41] [Rank 0] step:3801/10000 train_time:359013ms step_avg:94.45ms +[2025-08-22 19:16:41] [Rank 0] step:3801/10000 train_time:359013ms step_avg:94.45ms +[2025-08-22 19:16:43] [Rank 0] step:3821/10000 train_time:360978ms step_avg:94.47ms +[2025-08-22 19:16:43] [Rank 0] step:3821/10000 train_time:360978ms step_avg:94.47ms +[2025-08-22 19:16:45] [Rank 0] step:3841/10000 train_time:362932ms step_avg:94.49ms +[2025-08-22 19:16:45] [Rank 0] step:3841/10000 train_time:362932ms step_avg:94.49ms +[2025-08-22 19:16:47] [Rank 0] step:3861/10000 train_time:364883ms step_avg:94.50ms +[2025-08-22 19:16:47] [Rank 0] step:3861/10000 train_time:364883ms step_avg:94.50ms +[2025-08-22 19:16:49] [Rank 0] step:3881/10000 train_time:366832ms step_avg:94.52ms +[2025-08-22 19:16:49] [Rank 0] step:3881/10000 train_time:366832ms step_avg:94.52ms +[2025-08-22 19:16:51] [Rank 0] step:3901/10000 train_time:368782ms step_avg:94.54ms +[2025-08-22 19:16:51] [Rank 0] step:3901/10000 train_time:368782ms step_avg:94.54ms +[2025-08-22 19:16:53] [Rank 0] step:3921/10000 train_time:370732ms step_avg:94.55ms +[2025-08-22 19:16:53] [Rank 0] step:3921/10000 train_time:370732ms step_avg:94.55ms +[2025-08-22 19:16:55] [Rank 0] step:3941/10000 train_time:372683ms step_avg:94.57ms +[2025-08-22 19:16:55] [Rank 0] step:3941/10000 train_time:372683ms step_avg:94.57ms +[2025-08-22 19:16:57] [Rank 0] step:3961/10000 train_time:374634ms step_avg:94.58ms +[2025-08-22 19:16:57] [Rank 0] step:3961/10000 train_time:374634ms step_avg:94.58ms +[2025-08-22 19:16:59] [Rank 0] step:3981/10000 train_time:376584ms step_avg:94.60ms +[2025-08-22 19:16:59] [Rank 0] step:3981/10000 train_time:376584ms step_avg:94.60ms +[2025-08-22 19:17:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:17:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:17:14] [Rank 0] PRINT: step:4000/10000 val_loss:3.7550 svd_entropy: attn_qk:H=0.9115,top10E=0.07,eRank=427.1,q75/q25=11.32 attn_vo:H=0.9159,top10E=0.06,eRank=439.7,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.2,q75/q25=2.79 mlp_w2:H=0.9664,top10E=0.05,eRank=614.7,q75/q25=2.92 vo_prod:H=0.8441,top10E=0.13,eRank=275.2,q75/q25=65.90 train_time:378492ms step_avg:94.62ms +[2025-08-22 19:17:14] [Rank 0] PRINT: step:4000/10000 val_loss:3.7550 svd_entropy: attn_qk:H=0.9115,top10E=0.07,eRank=427.1,q75/q25=11.32 attn_vo:H=0.9159,top10E=0.06,eRank=439.7,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.2,q75/q25=2.79 mlp_w2:H=0.9664,top10E=0.05,eRank=614.7,q75/q25=2.92 vo_prod:H=0.8441,top10E=0.13,eRank=275.2,q75/q25=65.90 train_time:378492ms step_avg:94.62ms +[2025-08-22 19:17:14] [Rank 0] step:4001/10000 train_time:378544ms step_avg:94.61ms +[2025-08-22 19:17:14] [Rank 0] step:4001/10000 train_time:378544ms step_avg:94.61ms +[2025-08-22 19:17:16] [Rank 0] step:4021/10000 train_time:380654ms step_avg:94.67ms +[2025-08-22 19:17:16] [Rank 0] step:4021/10000 train_time:380654ms step_avg:94.67ms +[2025-08-22 19:17:18] [Rank 0] step:4041/10000 train_time:382643ms step_avg:94.69ms +[2025-08-22 19:17:18] [Rank 0] step:4041/10000 train_time:382643ms step_avg:94.69ms +[2025-08-22 19:17:20] [Rank 0] step:4061/10000 train_time:384589ms step_avg:94.70ms +[2025-08-22 19:17:20] [Rank 0] step:4061/10000 train_time:384589ms step_avg:94.70ms +[2025-08-22 19:17:23] [Rank 0] step:4081/10000 train_time:386836ms step_avg:94.79ms +[2025-08-22 19:17:23] [Rank 0] step:4081/10000 train_time:386836ms step_avg:94.79ms +[2025-08-22 19:17:25] [Rank 0] step:4101/10000 train_time:388784ms step_avg:94.80ms +[2025-08-22 19:17:25] [Rank 0] step:4101/10000 train_time:388784ms step_avg:94.80ms +[2025-08-22 19:17:27] [Rank 0] step:4121/10000 train_time:390732ms step_avg:94.81ms +[2025-08-22 19:17:27] [Rank 0] step:4121/10000 train_time:390732ms step_avg:94.81ms +[2025-08-22 19:17:28] [Rank 0] step:4141/10000 train_time:392682ms step_avg:94.83ms +[2025-08-22 19:17:28] [Rank 0] step:4141/10000 train_time:392682ms step_avg:94.83ms +[2025-08-22 19:17:30] [Rank 0] step:4161/10000 train_time:394631ms step_avg:94.84ms +[2025-08-22 19:17:30] [Rank 0] step:4161/10000 train_time:394631ms step_avg:94.84ms +[2025-08-22 19:17:32] [Rank 0] step:4181/10000 train_time:396584ms step_avg:94.85ms +[2025-08-22 19:17:32] [Rank 0] step:4181/10000 train_time:396584ms step_avg:94.85ms +[2025-08-22 19:17:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:17:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:17:48] [Rank 0] PRINT: step:4200/10000 val_loss:3.7425 svd_entropy: attn_qk:H=0.9113,top10E=0.07,eRank=426.4,q75/q25=11.34 attn_vo:H=0.9159,top10E=0.06,eRank=439.8,q75/q25=11.47 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.79 mlp_w2:H=0.9664,top10E=0.05,eRank=614.7,q75/q25=2.91 vo_prod:H=0.8442,top10E=0.13,eRank=275.5,q75/q25=65.68 train_time:398491ms step_avg:94.88ms +[2025-08-22 19:17:48] [Rank 0] PRINT: step:4200/10000 val_loss:3.7425 svd_entropy: attn_qk:H=0.9113,top10E=0.07,eRank=426.4,q75/q25=11.34 attn_vo:H=0.9159,top10E=0.06,eRank=439.8,q75/q25=11.47 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.79 mlp_w2:H=0.9664,top10E=0.05,eRank=614.7,q75/q25=2.91 vo_prod:H=0.8442,top10E=0.13,eRank=275.5,q75/q25=65.68 train_time:398491ms step_avg:94.88ms +[2025-08-22 19:17:48] [Rank 0] step:4201/10000 train_time:398544ms step_avg:94.87ms +[2025-08-22 19:17:48] [Rank 0] step:4201/10000 train_time:398544ms step_avg:94.87ms +[2025-08-22 19:17:50] [Rank 0] step:4221/10000 train_time:400497ms step_avg:94.88ms +[2025-08-22 19:17:50] [Rank 0] step:4221/10000 train_time:400497ms step_avg:94.88ms +[2025-08-22 19:17:52] [Rank 0] step:4241/10000 train_time:402447ms step_avg:94.89ms +[2025-08-22 19:17:52] [Rank 0] step:4241/10000 train_time:402447ms step_avg:94.89ms +[2025-08-22 19:17:54] [Rank 0] step:4261/10000 train_time:404396ms step_avg:94.91ms +[2025-08-22 19:17:54] [Rank 0] step:4261/10000 train_time:404396ms step_avg:94.91ms +[2025-08-22 19:17:56] [Rank 0] step:4281/10000 train_time:406345ms step_avg:94.92ms +[2025-08-22 19:17:56] [Rank 0] step:4281/10000 train_time:406345ms step_avg:94.92ms +[2025-08-22 19:17:58] [Rank 0] step:4301/10000 train_time:408295ms step_avg:94.93ms +[2025-08-22 19:17:58] [Rank 0] step:4301/10000 train_time:408295ms step_avg:94.93ms +[2025-08-22 19:18:00] [Rank 0] step:4321/10000 train_time:410248ms step_avg:94.94ms +[2025-08-22 19:18:00] [Rank 0] step:4321/10000 train_time:410248ms step_avg:94.94ms +[2025-08-22 19:18:02] [Rank 0] step:4341/10000 train_time:412197ms step_avg:94.95ms +[2025-08-22 19:18:02] [Rank 0] step:4341/10000 train_time:412197ms step_avg:94.95ms +[2025-08-22 19:18:04] [Rank 0] step:4361/10000 train_time:414151ms step_avg:94.97ms +[2025-08-22 19:18:04] [Rank 0] step:4361/10000 train_time:414151ms step_avg:94.97ms +[2025-08-22 19:18:06] [Rank 0] step:4381/10000 train_time:416104ms step_avg:94.98ms +[2025-08-22 19:18:06] [Rank 0] step:4381/10000 train_time:416104ms step_avg:94.98ms +[2025-08-22 19:18:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:18:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:18:21] [Rank 0] PRINT: step:4400/10000 val_loss:3.7290 svd_entropy: attn_qk:H=0.9110,top10E=0.07,eRank=425.8,q75/q25=11.34 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.7,q75/q25=2.91 vo_prod:H=0.8444,top10E=0.13,eRank=275.9,q75/q25=65.71 train_time:418016ms step_avg:95.00ms +[2025-08-22 19:18:21] [Rank 0] PRINT: step:4400/10000 val_loss:3.7290 svd_entropy: attn_qk:H=0.9110,top10E=0.07,eRank=425.8,q75/q25=11.34 attn_vo:H=0.9160,top10E=0.06,eRank=439.9,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.7,q75/q25=2.91 vo_prod:H=0.8444,top10E=0.13,eRank=275.9,q75/q25=65.71 train_time:418016ms step_avg:95.00ms +[2025-08-22 19:18:21] [Rank 0] step:4401/10000 train_time:418069ms step_avg:94.99ms +[2025-08-22 19:18:21] [Rank 0] step:4401/10000 train_time:418069ms step_avg:94.99ms +[2025-08-22 19:18:23] [Rank 0] step:4421/10000 train_time:420046ms step_avg:95.01ms +[2025-08-22 19:18:23] [Rank 0] step:4421/10000 train_time:420046ms step_avg:95.01ms +[2025-08-22 19:18:25] [Rank 0] step:4441/10000 train_time:421995ms step_avg:95.02ms +[2025-08-22 19:18:25] [Rank 0] step:4441/10000 train_time:421995ms step_avg:95.02ms +[2025-08-22 19:18:27] [Rank 0] step:4461/10000 train_time:423951ms step_avg:95.04ms +[2025-08-22 19:18:27] [Rank 0] step:4461/10000 train_time:423951ms step_avg:95.04ms +[2025-08-22 19:18:29] [Rank 0] step:4481/10000 train_time:425911ms step_avg:95.05ms +[2025-08-22 19:18:29] [Rank 0] step:4481/10000 train_time:425911ms step_avg:95.05ms +[2025-08-22 19:18:31] [Rank 0] step:4501/10000 train_time:427869ms step_avg:95.06ms +[2025-08-22 19:18:31] [Rank 0] step:4501/10000 train_time:427869ms step_avg:95.06ms +[2025-08-22 19:18:33] [Rank 0] step:4521/10000 train_time:429828ms step_avg:95.07ms +[2025-08-22 19:18:33] [Rank 0] step:4521/10000 train_time:429828ms step_avg:95.07ms +[2025-08-22 19:18:35] [Rank 0] step:4541/10000 train_time:431790ms step_avg:95.09ms +[2025-08-22 19:18:35] [Rank 0] step:4541/10000 train_time:431790ms step_avg:95.09ms +[2025-08-22 19:18:37] [Rank 0] step:4561/10000 train_time:433749ms step_avg:95.10ms +[2025-08-22 19:18:37] [Rank 0] step:4561/10000 train_time:433749ms step_avg:95.10ms +[2025-08-22 19:18:39] [Rank 0] step:4581/10000 train_time:435713ms step_avg:95.11ms +[2025-08-22 19:18:39] [Rank 0] step:4581/10000 train_time:435713ms step_avg:95.11ms +[2025-08-22 19:18:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:18:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:18:55] [Rank 0] PRINT: step:4600/10000 val_loss:3.7128 svd_entropy: attn_qk:H=0.9108,top10E=0.07,eRank=425.1,q75/q25=11.37 attn_vo:H=0.9160,top10E=0.06,eRank=440.1,q75/q25=11.52 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.6,q75/q25=2.91 vo_prod:H=0.8445,top10E=0.13,eRank=276.2,q75/q25=65.72 train_time:437631ms step_avg:95.14ms +[2025-08-22 19:18:55] [Rank 0] PRINT: step:4600/10000 val_loss:3.7128 svd_entropy: attn_qk:H=0.9108,top10E=0.07,eRank=425.1,q75/q25=11.37 attn_vo:H=0.9160,top10E=0.06,eRank=440.1,q75/q25=11.52 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.6,q75/q25=2.91 vo_prod:H=0.8445,top10E=0.13,eRank=276.2,q75/q25=65.72 train_time:437631ms step_avg:95.14ms +[2025-08-22 19:18:55] [Rank 0] step:4601/10000 train_time:437684ms step_avg:95.13ms +[2025-08-22 19:18:55] [Rank 0] step:4601/10000 train_time:437684ms step_avg:95.13ms +[2025-08-22 19:18:57] [Rank 0] step:4621/10000 train_time:439644ms step_avg:95.14ms +[2025-08-22 19:18:57] [Rank 0] step:4621/10000 train_time:439644ms step_avg:95.14ms +[2025-08-22 19:18:59] [Rank 0] step:4641/10000 train_time:441599ms step_avg:95.15ms +[2025-08-22 19:18:59] [Rank 0] step:4641/10000 train_time:441599ms step_avg:95.15ms +[2025-08-22 19:19:01] [Rank 0] step:4661/10000 train_time:443552ms step_avg:95.16ms +[2025-08-22 19:19:01] [Rank 0] step:4661/10000 train_time:443552ms step_avg:95.16ms +[2025-08-22 19:19:03] [Rank 0] step:4681/10000 train_time:445508ms step_avg:95.17ms +[2025-08-22 19:19:03] [Rank 0] step:4681/10000 train_time:445508ms step_avg:95.17ms +[2025-08-22 19:19:05] [Rank 0] step:4701/10000 train_time:447466ms step_avg:95.19ms +[2025-08-22 19:19:05] [Rank 0] step:4701/10000 train_time:447466ms step_avg:95.19ms +[2025-08-22 19:19:07] [Rank 0] step:4721/10000 train_time:449422ms step_avg:95.20ms +[2025-08-22 19:19:07] [Rank 0] step:4721/10000 train_time:449422ms step_avg:95.20ms +[2025-08-22 19:19:09] [Rank 0] step:4741/10000 train_time:451378ms step_avg:95.21ms +[2025-08-22 19:19:09] [Rank 0] step:4741/10000 train_time:451378ms step_avg:95.21ms +[2025-08-22 19:19:11] [Rank 0] step:4761/10000 train_time:453337ms step_avg:95.22ms +[2025-08-22 19:19:11] [Rank 0] step:4761/10000 train_time:453337ms step_avg:95.22ms +[2025-08-22 19:19:13] [Rank 0] step:4781/10000 train_time:455291ms step_avg:95.23ms +[2025-08-22 19:19:13] [Rank 0] step:4781/10000 train_time:455291ms step_avg:95.23ms +[2025-08-22 19:19:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:19:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:19:28] [Rank 0] PRINT: step:4800/10000 val_loss:3.7035 svd_entropy: attn_qk:H=0.9105,top10E=0.07,eRank=424.5,q75/q25=11.44 attn_vo:H=0.9161,top10E=0.06,eRank=440.3,q75/q25=11.49 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.6,q75/q25=2.91 vo_prod:H=0.8447,top10E=0.13,eRank=276.6,q75/q25=65.67 train_time:457207ms step_avg:95.25ms +[2025-08-22 19:19:28] [Rank 0] PRINT: step:4800/10000 val_loss:3.7035 svd_entropy: attn_qk:H=0.9105,top10E=0.07,eRank=424.5,q75/q25=11.44 attn_vo:H=0.9161,top10E=0.06,eRank=440.3,q75/q25=11.49 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.6,q75/q25=2.91 vo_prod:H=0.8447,top10E=0.13,eRank=276.6,q75/q25=65.67 train_time:457207ms step_avg:95.25ms +[2025-08-22 19:19:29] [Rank 0] step:4801/10000 train_time:457260ms step_avg:95.24ms +[2025-08-22 19:19:29] [Rank 0] step:4801/10000 train_time:457260ms step_avg:95.24ms +[2025-08-22 19:19:30] [Rank 0] step:4821/10000 train_time:459236ms step_avg:95.26ms +[2025-08-22 19:19:30] [Rank 0] step:4821/10000 train_time:459236ms step_avg:95.26ms +[2025-08-22 19:19:32] [Rank 0] step:4841/10000 train_time:461186ms step_avg:95.27ms +[2025-08-22 19:19:32] [Rank 0] step:4841/10000 train_time:461186ms step_avg:95.27ms +[2025-08-22 19:19:34] [Rank 0] step:4861/10000 train_time:463141ms step_avg:95.28ms +[2025-08-22 19:19:34] [Rank 0] step:4861/10000 train_time:463141ms step_avg:95.28ms +[2025-08-22 19:19:36] [Rank 0] step:4881/10000 train_time:465096ms step_avg:95.29ms +[2025-08-22 19:19:36] [Rank 0] step:4881/10000 train_time:465096ms step_avg:95.29ms +[2025-08-22 19:19:38] [Rank 0] step:4901/10000 train_time:467048ms step_avg:95.30ms +[2025-08-22 19:19:38] [Rank 0] step:4901/10000 train_time:467048ms step_avg:95.30ms +[2025-08-22 19:19:40] [Rank 0] step:4921/10000 train_time:469005ms step_avg:95.31ms +[2025-08-22 19:19:40] [Rank 0] step:4921/10000 train_time:469005ms step_avg:95.31ms +[2025-08-22 19:19:42] [Rank 0] step:4941/10000 train_time:470959ms step_avg:95.32ms +[2025-08-22 19:19:42] [Rank 0] step:4941/10000 train_time:470959ms step_avg:95.32ms +[2025-08-22 19:19:44] [Rank 0] step:4961/10000 train_time:472914ms step_avg:95.33ms +[2025-08-22 19:19:44] [Rank 0] step:4961/10000 train_time:472914ms step_avg:95.33ms +[2025-08-22 19:19:46] [Rank 0] step:4981/10000 train_time:474871ms step_avg:95.34ms +[2025-08-22 19:19:46] [Rank 0] step:4981/10000 train_time:474871ms step_avg:95.34ms +[2025-08-22 19:19:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:19:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:20:02] [Rank 0] PRINT: step:5000/10000 val_loss:3.6931 svd_entropy: attn_qk:H=0.9103,top10E=0.07,eRank=423.9,q75/q25=11.48 attn_vo:H=0.9161,top10E=0.06,eRank=440.4,q75/q25=11.47 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.6,q75/q25=2.91 vo_prod:H=0.8449,top10E=0.13,eRank=276.9,q75/q25=64.80 train_time:476786ms step_avg:95.36ms +[2025-08-22 19:20:02] [Rank 0] PRINT: step:5000/10000 val_loss:3.6931 svd_entropy: attn_qk:H=0.9103,top10E=0.07,eRank=423.9,q75/q25=11.48 attn_vo:H=0.9161,top10E=0.06,eRank=440.4,q75/q25=11.47 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.6,q75/q25=2.91 vo_prod:H=0.8449,top10E=0.13,eRank=276.9,q75/q25=64.80 train_time:476786ms step_avg:95.36ms +[2025-08-22 19:20:02] [Rank 0] step:5001/10000 train_time:476839ms step_avg:95.35ms +[2025-08-22 19:20:02] [Rank 0] step:5001/10000 train_time:476839ms step_avg:95.35ms +[2025-08-22 19:20:04] [Rank 0] step:5021/10000 train_time:478805ms step_avg:95.36ms +[2025-08-22 19:20:04] [Rank 0] step:5021/10000 train_time:478805ms step_avg:95.36ms +[2025-08-22 19:20:06] [Rank 0] step:5041/10000 train_time:480762ms step_avg:95.37ms +[2025-08-22 19:20:06] [Rank 0] step:5041/10000 train_time:480762ms step_avg:95.37ms +[2025-08-22 19:20:08] [Rank 0] step:5061/10000 train_time:482714ms step_avg:95.38ms +[2025-08-22 19:20:08] [Rank 0] step:5061/10000 train_time:482714ms step_avg:95.38ms +[2025-08-22 19:20:10] [Rank 0] step:5081/10000 train_time:484670ms step_avg:95.39ms +[2025-08-22 19:20:10] [Rank 0] step:5081/10000 train_time:484670ms step_avg:95.39ms +[2025-08-22 19:20:12] [Rank 0] step:5101/10000 train_time:486625ms step_avg:95.40ms +[2025-08-22 19:20:12] [Rank 0] step:5101/10000 train_time:486625ms step_avg:95.40ms +[2025-08-22 19:20:14] [Rank 0] step:5121/10000 train_time:488581ms step_avg:95.41ms +[2025-08-22 19:20:14] [Rank 0] step:5121/10000 train_time:488581ms step_avg:95.41ms +[2025-08-22 19:20:16] [Rank 0] step:5141/10000 train_time:490541ms step_avg:95.42ms +[2025-08-22 19:20:16] [Rank 0] step:5141/10000 train_time:490541ms step_avg:95.42ms +[2025-08-22 19:20:18] [Rank 0] step:5161/10000 train_time:492499ms step_avg:95.43ms +[2025-08-22 19:20:18] [Rank 0] step:5161/10000 train_time:492499ms step_avg:95.43ms +[2025-08-22 19:20:19] [Rank 0] step:5181/10000 train_time:494458ms step_avg:95.44ms +[2025-08-22 19:20:19] [Rank 0] step:5181/10000 train_time:494458ms step_avg:95.44ms +[2025-08-22 19:20:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:20:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:20:35] [Rank 0] PRINT: step:5200/10000 val_loss:3.6813 svd_entropy: attn_qk:H=0.9101,top10E=0.07,eRank=423.4,q75/q25=11.49 attn_vo:H=0.9162,top10E=0.06,eRank=440.5,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.5,q75/q25=2.90 vo_prod:H=0.8450,top10E=0.13,eRank=277.2,q75/q25=64.81 train_time:496398ms step_avg:95.46ms +[2025-08-22 19:20:35] [Rank 0] PRINT: step:5200/10000 val_loss:3.6813 svd_entropy: attn_qk:H=0.9101,top10E=0.07,eRank=423.4,q75/q25=11.49 attn_vo:H=0.9162,top10E=0.06,eRank=440.5,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.5,q75/q25=2.90 vo_prod:H=0.8450,top10E=0.13,eRank=277.2,q75/q25=64.81 train_time:496398ms step_avg:95.46ms +[2025-08-22 19:20:35] [Rank 0] step:5201/10000 train_time:496452ms step_avg:95.45ms +[2025-08-22 19:20:35] [Rank 0] step:5201/10000 train_time:496452ms step_avg:95.45ms +[2025-08-22 19:20:37] [Rank 0] step:5221/10000 train_time:498464ms step_avg:95.47ms +[2025-08-22 19:20:37] [Rank 0] step:5221/10000 train_time:498464ms step_avg:95.47ms +[2025-08-22 19:20:39] [Rank 0] step:5241/10000 train_time:500450ms step_avg:95.49ms +[2025-08-22 19:20:39] [Rank 0] step:5241/10000 train_time:500450ms step_avg:95.49ms +[2025-08-22 19:20:41] [Rank 0] step:5261/10000 train_time:502436ms step_avg:95.50ms +[2025-08-22 19:20:41] [Rank 0] step:5261/10000 train_time:502436ms step_avg:95.50ms +[2025-08-22 19:20:43] [Rank 0] step:5281/10000 train_time:504422ms step_avg:95.52ms +[2025-08-22 19:20:43] [Rank 0] step:5281/10000 train_time:504422ms step_avg:95.52ms +[2025-08-22 19:20:45] [Rank 0] step:5301/10000 train_time:506419ms step_avg:95.53ms +[2025-08-22 19:20:45] [Rank 0] step:5301/10000 train_time:506419ms step_avg:95.53ms +[2025-08-22 19:20:47] [Rank 0] step:5321/10000 train_time:508408ms step_avg:95.55ms +[2025-08-22 19:20:47] [Rank 0] step:5321/10000 train_time:508408ms step_avg:95.55ms +[2025-08-22 19:20:49] [Rank 0] step:5341/10000 train_time:510398ms step_avg:95.56ms +[2025-08-22 19:20:49] [Rank 0] step:5341/10000 train_time:510398ms step_avg:95.56ms +[2025-08-22 19:20:51] [Rank 0] step:5361/10000 train_time:512390ms step_avg:95.58ms +[2025-08-22 19:20:51] [Rank 0] step:5361/10000 train_time:512390ms step_avg:95.58ms +[2025-08-22 19:20:53] [Rank 0] step:5381/10000 train_time:514382ms step_avg:95.59ms +[2025-08-22 19:20:53] [Rank 0] step:5381/10000 train_time:514382ms step_avg:95.59ms +[2025-08-22 19:20:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:20:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:21:09] [Rank 0] PRINT: step:5400/10000 val_loss:3.6691 svd_entropy: attn_qk:H=0.9099,top10E=0.08,eRank=422.7,q75/q25=11.49 attn_vo:H=0.9162,top10E=0.06,eRank=440.6,q75/q25=11.48 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.5,q75/q25=2.90 vo_prod:H=0.8452,top10E=0.13,eRank=277.5,q75/q25=65.12 train_time:516331ms step_avg:95.62ms +[2025-08-22 19:21:09] [Rank 0] PRINT: step:5400/10000 val_loss:3.6691 svd_entropy: attn_qk:H=0.9099,top10E=0.08,eRank=422.7,q75/q25=11.49 attn_vo:H=0.9162,top10E=0.06,eRank=440.6,q75/q25=11.48 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.78 mlp_w2:H=0.9664,top10E=0.05,eRank=614.5,q75/q25=2.90 vo_prod:H=0.8452,top10E=0.13,eRank=277.5,q75/q25=65.12 train_time:516331ms step_avg:95.62ms +[2025-08-22 19:21:09] [Rank 0] step:5401/10000 train_time:516384ms step_avg:95.61ms +[2025-08-22 19:21:09] [Rank 0] step:5401/10000 train_time:516384ms step_avg:95.61ms +[2025-08-22 19:21:11] [Rank 0] step:5421/10000 train_time:518397ms step_avg:95.63ms +[2025-08-22 19:21:11] [Rank 0] step:5421/10000 train_time:518397ms step_avg:95.63ms +[2025-08-22 19:21:13] [Rank 0] step:5441/10000 train_time:520384ms step_avg:95.64ms +[2025-08-22 19:21:13] [Rank 0] step:5441/10000 train_time:520384ms step_avg:95.64ms +[2025-08-22 19:21:15] [Rank 0] step:5461/10000 train_time:522378ms step_avg:95.66ms +[2025-08-22 19:21:15] [Rank 0] step:5461/10000 train_time:522378ms step_avg:95.66ms +[2025-08-22 19:21:17] [Rank 0] step:5481/10000 train_time:524369ms step_avg:95.67ms +[2025-08-22 19:21:17] [Rank 0] step:5481/10000 train_time:524369ms step_avg:95.67ms +[2025-08-22 19:21:19] [Rank 0] step:5501/10000 train_time:526368ms step_avg:95.69ms +[2025-08-22 19:21:19] [Rank 0] step:5501/10000 train_time:526368ms step_avg:95.69ms +[2025-08-22 19:21:21] [Rank 0] step:5521/10000 train_time:528365ms step_avg:95.70ms +[2025-08-22 19:21:21] [Rank 0] step:5521/10000 train_time:528365ms step_avg:95.70ms +[2025-08-22 19:21:23] [Rank 0] step:5541/10000 train_time:530361ms step_avg:95.72ms +[2025-08-22 19:21:23] [Rank 0] step:5541/10000 train_time:530361ms step_avg:95.72ms +[2025-08-22 19:21:25] [Rank 0] step:5561/10000 train_time:532360ms step_avg:95.73ms +[2025-08-22 19:21:25] [Rank 0] step:5561/10000 train_time:532360ms step_avg:95.73ms +[2025-08-22 19:21:27] [Rank 0] step:5581/10000 train_time:534358ms step_avg:95.75ms +[2025-08-22 19:21:27] [Rank 0] step:5581/10000 train_time:534358ms step_avg:95.75ms +[2025-08-22 19:21:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:21:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:21:43] [Rank 0] PRINT: step:5600/10000 val_loss:3.6618 svd_entropy: attn_qk:H=0.9097,top10E=0.08,eRank=422.2,q75/q25=11.56 attn_vo:H=0.9162,top10E=0.06,eRank=440.7,q75/q25=11.48 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.4,q75/q25=2.90 vo_prod:H=0.8452,top10E=0.13,eRank=277.7,q75/q25=64.72 train_time:536429ms step_avg:95.79ms +[2025-08-22 19:21:43] [Rank 0] PRINT: step:5600/10000 val_loss:3.6618 svd_entropy: attn_qk:H=0.9097,top10E=0.08,eRank=422.2,q75/q25=11.56 attn_vo:H=0.9162,top10E=0.06,eRank=440.7,q75/q25=11.48 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.4,q75/q25=2.90 vo_prod:H=0.8452,top10E=0.13,eRank=277.7,q75/q25=64.72 train_time:536429ms step_avg:95.79ms +[2025-08-22 19:21:43] [Rank 0] step:5601/10000 train_time:536483ms step_avg:95.78ms +[2025-08-22 19:21:43] [Rank 0] step:5601/10000 train_time:536483ms step_avg:95.78ms +[2025-08-22 19:21:45] [Rank 0] step:5621/10000 train_time:538505ms step_avg:95.80ms +[2025-08-22 19:21:45] [Rank 0] step:5621/10000 train_time:538505ms step_avg:95.80ms +[2025-08-22 19:21:47] [Rank 0] step:5641/10000 train_time:540489ms step_avg:95.81ms +[2025-08-22 19:21:47] [Rank 0] step:5641/10000 train_time:540489ms step_avg:95.81ms +[2025-08-22 19:21:49] [Rank 0] step:5661/10000 train_time:542476ms step_avg:95.83ms +[2025-08-22 19:21:49] [Rank 0] step:5661/10000 train_time:542476ms step_avg:95.83ms +[2025-08-22 19:21:51] [Rank 0] step:5681/10000 train_time:544468ms step_avg:95.84ms +[2025-08-22 19:21:51] [Rank 0] step:5681/10000 train_time:544468ms step_avg:95.84ms +[2025-08-22 19:21:53] [Rank 0] step:5701/10000 train_time:546456ms step_avg:95.85ms +[2025-08-22 19:21:53] [Rank 0] step:5701/10000 train_time:546456ms step_avg:95.85ms +[2025-08-22 19:21:55] [Rank 0] step:5721/10000 train_time:548451ms step_avg:95.87ms +[2025-08-22 19:21:55] [Rank 0] step:5721/10000 train_time:548451ms step_avg:95.87ms +[2025-08-22 19:21:57] [Rank 0] step:5741/10000 train_time:550440ms step_avg:95.88ms +[2025-08-22 19:21:57] [Rank 0] step:5741/10000 train_time:550440ms step_avg:95.88ms +[2025-08-22 19:21:59] [Rank 0] step:5761/10000 train_time:552430ms step_avg:95.89ms +[2025-08-22 19:21:59] [Rank 0] step:5761/10000 train_time:552430ms step_avg:95.89ms +[2025-08-22 19:22:01] [Rank 0] step:5781/10000 train_time:554420ms step_avg:95.90ms +[2025-08-22 19:22:01] [Rank 0] step:5781/10000 train_time:554420ms step_avg:95.90ms +[2025-08-22 19:22:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:22:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:22:17] [Rank 0] PRINT: step:5800/10000 val_loss:3.6582 svd_entropy: attn_qk:H=0.9095,top10E=0.08,eRank=421.7,q75/q25=11.54 attn_vo:H=0.9163,top10E=0.06,eRank=440.8,q75/q25=11.49 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.4,q75/q25=2.90 vo_prod:H=0.8454,top10E=0.13,eRank=277.9,q75/q25=64.49 train_time:556369ms step_avg:95.93ms +[2025-08-22 19:22:17] [Rank 0] PRINT: step:5800/10000 val_loss:3.6582 svd_entropy: attn_qk:H=0.9095,top10E=0.08,eRank=421.7,q75/q25=11.54 attn_vo:H=0.9163,top10E=0.06,eRank=440.8,q75/q25=11.49 mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.4,q75/q25=2.90 vo_prod:H=0.8454,top10E=0.13,eRank=277.9,q75/q25=64.49 train_time:556369ms step_avg:95.93ms +[2025-08-22 19:22:17] [Rank 0] step:5801/10000 train_time:556422ms step_avg:95.92ms +[2025-08-22 19:22:17] [Rank 0] step:5801/10000 train_time:556422ms step_avg:95.92ms +[2025-08-22 19:22:19] [Rank 0] step:5821/10000 train_time:558421ms step_avg:95.93ms +[2025-08-22 19:22:19] [Rank 0] step:5821/10000 train_time:558421ms step_avg:95.93ms +[2025-08-22 19:22:21] [Rank 0] step:5841/10000 train_time:560402ms step_avg:95.94ms +[2025-08-22 19:22:21] [Rank 0] step:5841/10000 train_time:560402ms step_avg:95.94ms +[2025-08-22 19:22:23] [Rank 0] step:5861/10000 train_time:562391ms step_avg:95.95ms +[2025-08-22 19:22:23] [Rank 0] step:5861/10000 train_time:562391ms step_avg:95.95ms +[2025-08-22 19:22:25] [Rank 0] step:5881/10000 train_time:564376ms step_avg:95.97ms +[2025-08-22 19:22:25] [Rank 0] step:5881/10000 train_time:564376ms step_avg:95.97ms +[2025-08-22 19:22:27] [Rank 0] step:5901/10000 train_time:566363ms step_avg:95.98ms +[2025-08-22 19:22:27] [Rank 0] step:5901/10000 train_time:566363ms step_avg:95.98ms +[2025-08-22 19:22:29] [Rank 0] step:5921/10000 train_time:568349ms step_avg:95.99ms +[2025-08-22 19:22:29] [Rank 0] step:5921/10000 train_time:568349ms step_avg:95.99ms +[2025-08-22 19:22:31] [Rank 0] step:5941/10000 train_time:570501ms step_avg:96.03ms +[2025-08-22 19:22:31] [Rank 0] step:5941/10000 train_time:570501ms step_avg:96.03ms +[2025-08-22 19:22:33] [Rank 0] step:5961/10000 train_time:572573ms step_avg:96.05ms +[2025-08-22 19:22:33] [Rank 0] step:5961/10000 train_time:572573ms step_avg:96.05ms +[2025-08-22 19:22:35] [Rank 0] step:5981/10000 train_time:574563ms step_avg:96.06ms +[2025-08-22 19:22:35] [Rank 0] step:5981/10000 train_time:574563ms step_avg:96.06ms +[2025-08-22 19:22:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:22:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:22:51] [Rank 0] PRINT: step:6000/10000 val_loss:3.6399 svd_entropy: attn_qk:H=0.9093,top10E=0.08,eRank=421.2,q75/q25=11.56 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.52 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.3,q75/q25=2.90 vo_prod:H=0.8455,top10E=0.13,eRank=278.1,q75/q25=64.11 train_time:576508ms step_avg:96.08ms +[2025-08-22 19:22:51] [Rank 0] PRINT: step:6000/10000 val_loss:3.6399 svd_entropy: attn_qk:H=0.9093,top10E=0.08,eRank=421.2,q75/q25=11.56 attn_vo:H=0.9163,top10E=0.06,eRank=440.9,q75/q25=11.52 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.3,q75/q25=2.90 vo_prod:H=0.8455,top10E=0.13,eRank=278.1,q75/q25=64.11 train_time:576508ms step_avg:96.08ms +[2025-08-22 19:22:51] [Rank 0] step:6001/10000 train_time:576562ms step_avg:96.08ms +[2025-08-22 19:22:51] [Rank 0] step:6001/10000 train_time:576562ms step_avg:96.08ms +[2025-08-22 19:22:53] [Rank 0] step:6021/10000 train_time:578565ms step_avg:96.09ms +[2025-08-22 19:22:53] [Rank 0] step:6021/10000 train_time:578565ms step_avg:96.09ms +[2025-08-22 19:22:55] [Rank 0] step:6041/10000 train_time:580557ms step_avg:96.10ms +[2025-08-22 19:22:55] [Rank 0] step:6041/10000 train_time:580557ms step_avg:96.10ms +[2025-08-22 19:22:57] [Rank 0] step:6061/10000 train_time:582549ms step_avg:96.11ms +[2025-08-22 19:22:57] [Rank 0] step:6061/10000 train_time:582549ms step_avg:96.11ms +[2025-08-22 19:22:59] [Rank 0] step:6081/10000 train_time:584540ms step_avg:96.13ms +[2025-08-22 19:22:59] [Rank 0] step:6081/10000 train_time:584540ms step_avg:96.13ms +[2025-08-22 19:23:01] [Rank 0] step:6101/10000 train_time:586537ms step_avg:96.14ms +[2025-08-22 19:23:01] [Rank 0] step:6101/10000 train_time:586537ms step_avg:96.14ms +[2025-08-22 19:23:03] [Rank 0] step:6121/10000 train_time:588817ms step_avg:96.20ms +[2025-08-22 19:23:03] [Rank 0] step:6121/10000 train_time:588817ms step_avg:96.20ms +[2025-08-22 19:23:05] [Rank 0] step:6141/10000 train_time:590822ms step_avg:96.21ms +[2025-08-22 19:23:05] [Rank 0] step:6141/10000 train_time:590822ms step_avg:96.21ms +[2025-08-22 19:23:07] [Rank 0] step:6161/10000 train_time:592816ms step_avg:96.22ms +[2025-08-22 19:23:07] [Rank 0] step:6161/10000 train_time:592816ms step_avg:96.22ms +[2025-08-22 19:23:09] [Rank 0] step:6181/10000 train_time:594810ms step_avg:96.23ms +[2025-08-22 19:23:09] [Rank 0] step:6181/10000 train_time:594810ms step_avg:96.23ms +[2025-08-22 19:23:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:23:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:23:25] [Rank 0] PRINT: step:6200/10000 val_loss:3.6279 svd_entropy: attn_qk:H=0.9091,top10E=0.08,eRank=420.7,q75/q25=11.60 attn_vo:H=0.9163,top10E=0.06,eRank=441.0,q75/q25=11.52 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.2,q75/q25=2.90 vo_prod:H=0.8455,top10E=0.13,eRank=278.3,q75/q25=63.91 train_time:596764ms step_avg:96.25ms +[2025-08-22 19:23:25] [Rank 0] PRINT: step:6200/10000 val_loss:3.6279 svd_entropy: attn_qk:H=0.9091,top10E=0.08,eRank=420.7,q75/q25=11.60 attn_vo:H=0.9163,top10E=0.06,eRank=441.0,q75/q25=11.52 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.2,q75/q25=2.90 vo_prod:H=0.8455,top10E=0.13,eRank=278.3,q75/q25=63.91 train_time:596764ms step_avg:96.25ms +[2025-08-22 19:23:25] [Rank 0] step:6201/10000 train_time:596818ms step_avg:96.25ms +[2025-08-22 19:23:25] [Rank 0] step:6201/10000 train_time:596818ms step_avg:96.25ms +[2025-08-22 19:23:27] [Rank 0] step:6221/10000 train_time:598824ms step_avg:96.26ms +[2025-08-22 19:23:27] [Rank 0] step:6221/10000 train_time:598824ms step_avg:96.26ms +[2025-08-22 19:23:29] [Rank 0] step:6241/10000 train_time:600816ms step_avg:96.27ms +[2025-08-22 19:23:29] [Rank 0] step:6241/10000 train_time:600816ms step_avg:96.27ms +[2025-08-22 19:23:31] [Rank 0] step:6261/10000 train_time:602812ms step_avg:96.28ms +[2025-08-22 19:23:31] [Rank 0] step:6261/10000 train_time:602812ms step_avg:96.28ms +[2025-08-22 19:23:33] [Rank 0] step:6281/10000 train_time:604810ms step_avg:96.29ms +[2025-08-22 19:23:33] [Rank 0] step:6281/10000 train_time:604810ms step_avg:96.29ms +[2025-08-22 19:23:35] [Rank 0] step:6301/10000 train_time:606885ms step_avg:96.32ms +[2025-08-22 19:23:35] [Rank 0] step:6301/10000 train_time:606885ms step_avg:96.32ms +[2025-08-22 19:23:37] [Rank 0] step:6321/10000 train_time:608973ms step_avg:96.34ms +[2025-08-22 19:23:37] [Rank 0] step:6321/10000 train_time:608973ms step_avg:96.34ms +[2025-08-22 19:23:39] [Rank 0] step:6341/10000 train_time:610969ms step_avg:96.35ms +[2025-08-22 19:23:39] [Rank 0] step:6341/10000 train_time:610969ms step_avg:96.35ms +[2025-08-22 19:23:41] [Rank 0] step:6361/10000 train_time:612975ms step_avg:96.36ms +[2025-08-22 19:23:41] [Rank 0] step:6361/10000 train_time:612975ms step_avg:96.36ms +[2025-08-22 19:23:43] [Rank 0] step:6381/10000 train_time:614977ms step_avg:96.38ms +[2025-08-22 19:23:43] [Rank 0] step:6381/10000 train_time:614977ms step_avg:96.38ms +[2025-08-22 19:23:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:23:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:23:59] [Rank 0] PRINT: step:6400/10000 val_loss:3.6160 svd_entropy: attn_qk:H=0.9089,top10E=0.08,eRank=420.2,q75/q25=11.61 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.48 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8456,top10E=0.13,eRank=278.5,q75/q25=63.70 train_time:616933ms step_avg:96.40ms +[2025-08-22 19:23:59] [Rank 0] PRINT: step:6400/10000 val_loss:3.6160 svd_entropy: attn_qk:H=0.9089,top10E=0.08,eRank=420.2,q75/q25=11.61 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.48 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.05,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8456,top10E=0.13,eRank=278.5,q75/q25=63.70 train_time:616933ms step_avg:96.40ms +[2025-08-22 19:23:59] [Rank 0] step:6401/10000 train_time:616987ms step_avg:96.39ms +[2025-08-22 19:23:59] [Rank 0] step:6401/10000 train_time:616987ms step_avg:96.39ms +[2025-08-22 19:24:01] [Rank 0] step:6421/10000 train_time:619004ms step_avg:96.40ms +[2025-08-22 19:24:01] [Rank 0] step:6421/10000 train_time:619004ms step_avg:96.40ms +[2025-08-22 19:24:03] [Rank 0] step:6441/10000 train_time:620995ms step_avg:96.41ms +[2025-08-22 19:24:03] [Rank 0] step:6441/10000 train_time:620995ms step_avg:96.41ms +[2025-08-22 19:24:05] [Rank 0] step:6461/10000 train_time:622993ms step_avg:96.42ms +[2025-08-22 19:24:05] [Rank 0] step:6461/10000 train_time:622993ms step_avg:96.42ms +[2025-08-22 19:24:07] [Rank 0] step:6481/10000 train_time:624990ms step_avg:96.43ms +[2025-08-22 19:24:07] [Rank 0] step:6481/10000 train_time:624990ms step_avg:96.43ms +[2025-08-22 19:24:09] [Rank 0] step:6501/10000 train_time:626981ms step_avg:96.44ms +[2025-08-22 19:24:09] [Rank 0] step:6501/10000 train_time:626981ms step_avg:96.44ms +[2025-08-22 19:24:11] [Rank 0] step:6521/10000 train_time:628972ms step_avg:96.45ms +[2025-08-22 19:24:11] [Rank 0] step:6521/10000 train_time:628972ms step_avg:96.45ms +[2025-08-22 19:24:13] [Rank 0] step:6541/10000 train_time:630967ms step_avg:96.46ms +[2025-08-22 19:24:13] [Rank 0] step:6541/10000 train_time:630967ms step_avg:96.46ms +[2025-08-22 19:24:15] [Rank 0] step:6561/10000 train_time:632962ms step_avg:96.47ms +[2025-08-22 19:24:15] [Rank 0] step:6561/10000 train_time:632962ms step_avg:96.47ms +[2025-08-22 19:24:17] [Rank 0] step:6581/10000 train_time:634954ms step_avg:96.48ms +[2025-08-22 19:24:17] [Rank 0] step:6581/10000 train_time:634954ms step_avg:96.48ms +[2025-08-22 19:24:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:24:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:24:33] [Rank 0] PRINT: step:6600/10000 val_loss:3.6021 svd_entropy: attn_qk:H=0.9087,top10E=0.08,eRank=419.7,q75/q25=11.62 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8457,top10E=0.13,eRank=278.7,q75/q25=64.20 train_time:636907ms step_avg:96.50ms +[2025-08-22 19:24:33] [Rank 0] PRINT: step:6600/10000 val_loss:3.6021 svd_entropy: attn_qk:H=0.9087,top10E=0.08,eRank=419.7,q75/q25=11.62 attn_vo:H=0.9164,top10E=0.06,eRank=441.1,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8457,top10E=0.13,eRank=278.7,q75/q25=64.20 train_time:636907ms step_avg:96.50ms +[2025-08-22 19:24:33] [Rank 0] step:6601/10000 train_time:636961ms step_avg:96.49ms +[2025-08-22 19:24:33] [Rank 0] step:6601/10000 train_time:636961ms step_avg:96.49ms +[2025-08-22 19:24:35] [Rank 0] step:6621/10000 train_time:638952ms step_avg:96.50ms +[2025-08-22 19:24:35] [Rank 0] step:6621/10000 train_time:638952ms step_avg:96.50ms +[2025-08-22 19:24:37] [Rank 0] step:6641/10000 train_time:641071ms step_avg:96.53ms +[2025-08-22 19:24:37] [Rank 0] step:6641/10000 train_time:641071ms step_avg:96.53ms +[2025-08-22 19:24:39] [Rank 0] step:6661/10000 train_time:643119ms step_avg:96.55ms +[2025-08-22 19:24:39] [Rank 0] step:6661/10000 train_time:643119ms step_avg:96.55ms +[2025-08-22 19:24:41] [Rank 0] step:6681/10000 train_time:645128ms step_avg:96.56ms +[2025-08-22 19:24:41] [Rank 0] step:6681/10000 train_time:645128ms step_avg:96.56ms +[2025-08-22 19:24:43] [Rank 0] step:6701/10000 train_time:647155ms step_avg:96.58ms +[2025-08-22 19:24:43] [Rank 0] step:6701/10000 train_time:647155ms step_avg:96.58ms +[2025-08-22 19:24:45] [Rank 0] step:6721/10000 train_time:649177ms step_avg:96.59ms +[2025-08-22 19:24:45] [Rank 0] step:6721/10000 train_time:649177ms step_avg:96.59ms +[2025-08-22 19:24:47] [Rank 0] step:6741/10000 train_time:651196ms step_avg:96.60ms +[2025-08-22 19:24:47] [Rank 0] step:6741/10000 train_time:651196ms step_avg:96.60ms +[2025-08-22 19:24:49] [Rank 0] step:6761/10000 train_time:653217ms step_avg:96.62ms +[2025-08-22 19:24:49] [Rank 0] step:6761/10000 train_time:653217ms step_avg:96.62ms +[2025-08-22 19:24:51] [Rank 0] step:6781/10000 train_time:655244ms step_avg:96.63ms +[2025-08-22 19:24:51] [Rank 0] step:6781/10000 train_time:655244ms step_avg:96.63ms +[2025-08-22 19:24:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:24:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:25:07] [Rank 0] PRINT: step:6800/10000 val_loss:3.5865 svd_entropy: attn_qk:H=0.9085,top10E=0.08,eRank=419.1,q75/q25=11.69 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8459,top10E=0.13,eRank=279.0,q75/q25=63.67 train_time:657224ms step_avg:96.65ms +[2025-08-22 19:25:07] [Rank 0] PRINT: step:6800/10000 val_loss:3.5865 svd_entropy: attn_qk:H=0.9085,top10E=0.08,eRank=419.1,q75/q25=11.69 attn_vo:H=0.9164,top10E=0.06,eRank=441.2,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8459,top10E=0.13,eRank=279.0,q75/q25=63.67 train_time:657224ms step_avg:96.65ms +[2025-08-22 19:25:08] [Rank 0] step:6801/10000 train_time:657279ms step_avg:96.64ms +[2025-08-22 19:25:08] [Rank 0] step:6801/10000 train_time:657279ms step_avg:96.64ms +[2025-08-22 19:25:10] [Rank 0] step:6821/10000 train_time:659319ms step_avg:96.66ms +[2025-08-22 19:25:10] [Rank 0] step:6821/10000 train_time:659319ms step_avg:96.66ms +[2025-08-22 19:25:12] [Rank 0] step:6841/10000 train_time:661337ms step_avg:96.67ms +[2025-08-22 19:25:12] [Rank 0] step:6841/10000 train_time:661337ms step_avg:96.67ms +[2025-08-22 19:25:14] [Rank 0] step:6861/10000 train_time:663349ms step_avg:96.68ms +[2025-08-22 19:25:14] [Rank 0] step:6861/10000 train_time:663349ms step_avg:96.68ms +[2025-08-22 19:25:16] [Rank 0] step:6881/10000 train_time:665375ms step_avg:96.70ms +[2025-08-22 19:25:16] [Rank 0] step:6881/10000 train_time:665375ms step_avg:96.70ms +[2025-08-22 19:25:18] [Rank 0] step:6901/10000 train_time:667398ms step_avg:96.71ms +[2025-08-22 19:25:18] [Rank 0] step:6901/10000 train_time:667398ms step_avg:96.71ms +[2025-08-22 19:25:20] [Rank 0] step:6921/10000 train_time:669415ms step_avg:96.72ms +[2025-08-22 19:25:20] [Rank 0] step:6921/10000 train_time:669415ms step_avg:96.72ms +[2025-08-22 19:25:22] [Rank 0] step:6941/10000 train_time:671444ms step_avg:96.74ms +[2025-08-22 19:25:22] [Rank 0] step:6941/10000 train_time:671444ms step_avg:96.74ms +[2025-08-22 19:25:24] [Rank 0] step:6961/10000 train_time:673483ms step_avg:96.75ms +[2025-08-22 19:25:24] [Rank 0] step:6961/10000 train_time:673483ms step_avg:96.75ms +[2025-08-22 19:25:26] [Rank 0] step:6981/10000 train_time:675512ms step_avg:96.76ms +[2025-08-22 19:25:26] [Rank 0] step:6981/10000 train_time:675512ms step_avg:96.76ms +[2025-08-22 19:25:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:25:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:25:42] [Rank 0] PRINT: step:7000/10000 val_loss:3.5717 svd_entropy: attn_qk:H=0.9084,top10E=0.08,eRank=418.7,q75/q25=11.69 attn_vo:H=0.9164,top10E=0.06,eRank=441.3,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8460,top10E=0.13,eRank=279.2,q75/q25=63.63 train_time:677498ms step_avg:96.79ms +[2025-08-22 19:25:42] [Rank 0] PRINT: step:7000/10000 val_loss:3.5717 svd_entropy: attn_qk:H=0.9084,top10E=0.08,eRank=418.7,q75/q25=11.69 attn_vo:H=0.9164,top10E=0.06,eRank=441.3,q75/q25=11.50 mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8460,top10E=0.13,eRank=279.2,q75/q25=63.63 train_time:677498ms step_avg:96.79ms +[2025-08-22 19:25:42] [Rank 0] step:7001/10000 train_time:677554ms step_avg:96.78ms +[2025-08-22 19:25:42] [Rank 0] step:7001/10000 train_time:677554ms step_avg:96.78ms +[2025-08-22 19:25:44] [Rank 0] step:7021/10000 train_time:679654ms step_avg:96.80ms +[2025-08-22 19:25:44] [Rank 0] step:7021/10000 train_time:679654ms step_avg:96.80ms +[2025-08-22 19:25:46] [Rank 0] step:7041/10000 train_time:681671ms step_avg:96.81ms +[2025-08-22 19:25:46] [Rank 0] step:7041/10000 train_time:681671ms step_avg:96.81ms +[2025-08-22 19:25:48] [Rank 0] step:7061/10000 train_time:683698ms step_avg:96.83ms +[2025-08-22 19:25:48] [Rank 0] step:7061/10000 train_time:683698ms step_avg:96.83ms +[2025-08-22 19:25:50] [Rank 0] step:7081/10000 train_time:685720ms step_avg:96.84ms +[2025-08-22 19:25:50] [Rank 0] step:7081/10000 train_time:685720ms step_avg:96.84ms +[2025-08-22 19:25:52] [Rank 0] step:7101/10000 train_time:687753ms step_avg:96.85ms +[2025-08-22 19:25:52] [Rank 0] step:7101/10000 train_time:687753ms step_avg:96.85ms +[2025-08-22 19:25:54] [Rank 0] step:7121/10000 train_time:689772ms step_avg:96.86ms +[2025-08-22 19:25:54] [Rank 0] step:7121/10000 train_time:689772ms step_avg:96.86ms +[2025-08-22 19:25:56] [Rank 0] step:7141/10000 train_time:691796ms step_avg:96.88ms +[2025-08-22 19:25:56] [Rank 0] step:7141/10000 train_time:691796ms step_avg:96.88ms +[2025-08-22 19:25:58] [Rank 0] step:7161/10000 train_time:693821ms step_avg:96.89ms +[2025-08-22 19:25:58] [Rank 0] step:7161/10000 train_time:693821ms step_avg:96.89ms +[2025-08-22 19:26:00] [Rank 0] step:7181/10000 train_time:695848ms step_avg:96.90ms +[2025-08-22 19:26:00] [Rank 0] step:7181/10000 train_time:695848ms step_avg:96.90ms +[2025-08-22 19:26:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:26:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:26:16] [Rank 0] PRINT: step:7200/10000 val_loss:3.5598 svd_entropy: attn_qk:H=0.9082,top10E=0.08,eRank=418.2,q75/q25=11.69 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.6,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8461,top10E=0.13,eRank=279.5,q75/q25=63.65 train_time:697834ms step_avg:96.92ms +[2025-08-22 19:26:16] [Rank 0] PRINT: step:7200/10000 val_loss:3.5598 svd_entropy: attn_qk:H=0.9082,top10E=0.08,eRank=418.2,q75/q25=11.69 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.6,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.89 vo_prod:H=0.8461,top10E=0.13,eRank=279.5,q75/q25=63.65 train_time:697834ms step_avg:96.92ms +[2025-08-22 19:26:16] [Rank 0] step:7201/10000 train_time:697888ms step_avg:96.92ms +[2025-08-22 19:26:16] [Rank 0] step:7201/10000 train_time:697888ms step_avg:96.92ms +[2025-08-22 19:26:18] [Rank 0] step:7221/10000 train_time:699925ms step_avg:96.93ms +[2025-08-22 19:26:18] [Rank 0] step:7221/10000 train_time:699925ms step_avg:96.93ms +[2025-08-22 19:26:20] [Rank 0] step:7241/10000 train_time:701949ms step_avg:96.94ms +[2025-08-22 19:26:20] [Rank 0] step:7241/10000 train_time:701949ms step_avg:96.94ms +[2025-08-22 19:26:22] [Rank 0] step:7261/10000 train_time:703966ms step_avg:96.95ms +[2025-08-22 19:26:22] [Rank 0] step:7261/10000 train_time:703966ms step_avg:96.95ms +[2025-08-22 19:26:24] [Rank 0] step:7281/10000 train_time:706007ms step_avg:96.97ms +[2025-08-22 19:26:24] [Rank 0] step:7281/10000 train_time:706007ms step_avg:96.97ms +[2025-08-22 19:26:26] [Rank 0] step:7301/10000 train_time:708040ms step_avg:96.98ms +[2025-08-22 19:26:26] [Rank 0] step:7301/10000 train_time:708040ms step_avg:96.98ms +[2025-08-22 19:26:28] [Rank 0] step:7321/10000 train_time:710077ms step_avg:96.99ms +[2025-08-22 19:26:28] [Rank 0] step:7321/10000 train_time:710077ms step_avg:96.99ms +[2025-08-22 19:26:31] [Rank 0] step:7341/10000 train_time:712108ms step_avg:97.00ms +[2025-08-22 19:26:31] [Rank 0] step:7341/10000 train_time:712108ms step_avg:97.00ms +[2025-08-22 19:26:33] [Rank 0] step:7361/10000 train_time:714150ms step_avg:97.02ms +[2025-08-22 19:26:33] [Rank 0] step:7361/10000 train_time:714150ms step_avg:97.02ms +[2025-08-22 19:26:35] [Rank 0] step:7381/10000 train_time:716183ms step_avg:97.03ms +[2025-08-22 19:26:35] [Rank 0] step:7381/10000 train_time:716183ms step_avg:97.03ms +[2025-08-22 19:26:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:26:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:26:51] [Rank 0] PRINT: step:7400/10000 val_loss:3.5445 svd_entropy: attn_qk:H=0.9080,top10E=0.08,eRank=417.8,q75/q25=11.76 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.6,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.88 vo_prod:H=0.8463,top10E=0.13,eRank=279.7,q75/q25=63.56 train_time:718153ms step_avg:97.05ms +[2025-08-22 19:26:51] [Rank 0] PRINT: step:7400/10000 val_loss:3.5445 svd_entropy: attn_qk:H=0.9080,top10E=0.08,eRank=417.8,q75/q25=11.76 attn_vo:H=0.9165,top10E=0.06,eRank=441.4,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.6,q75/q25=2.77 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.88 vo_prod:H=0.8463,top10E=0.13,eRank=279.7,q75/q25=63.56 train_time:718153ms step_avg:97.05ms +[2025-08-22 19:26:51] [Rank 0] step:7401/10000 train_time:718207ms step_avg:97.04ms +[2025-08-22 19:26:51] [Rank 0] step:7401/10000 train_time:718207ms step_avg:97.04ms +[2025-08-22 19:26:53] [Rank 0] step:7421/10000 train_time:720260ms step_avg:97.06ms +[2025-08-22 19:26:53] [Rank 0] step:7421/10000 train_time:720260ms step_avg:97.06ms +[2025-08-22 19:26:55] [Rank 0] step:7441/10000 train_time:722281ms step_avg:97.07ms +[2025-08-22 19:26:55] [Rank 0] step:7441/10000 train_time:722281ms step_avg:97.07ms +[2025-08-22 19:26:57] [Rank 0] step:7461/10000 train_time:724310ms step_avg:97.08ms +[2025-08-22 19:26:57] [Rank 0] step:7461/10000 train_time:724310ms step_avg:97.08ms +[2025-08-22 19:26:59] [Rank 0] step:7481/10000 train_time:726343ms step_avg:97.09ms +[2025-08-22 19:26:59] [Rank 0] step:7481/10000 train_time:726343ms step_avg:97.09ms +[2025-08-22 19:27:01] [Rank 0] step:7501/10000 train_time:728372ms step_avg:97.10ms +[2025-08-22 19:27:01] [Rank 0] step:7501/10000 train_time:728372ms step_avg:97.10ms +[2025-08-22 19:27:03] [Rank 0] step:7521/10000 train_time:730398ms step_avg:97.11ms +[2025-08-22 19:27:03] [Rank 0] step:7521/10000 train_time:730398ms step_avg:97.11ms +[2025-08-22 19:27:05] [Rank 0] step:7541/10000 train_time:732440ms step_avg:97.13ms +[2025-08-22 19:27:05] [Rank 0] step:7541/10000 train_time:732440ms step_avg:97.13ms +[2025-08-22 19:27:07] [Rank 0] step:7561/10000 train_time:734459ms step_avg:97.14ms +[2025-08-22 19:27:07] [Rank 0] step:7561/10000 train_time:734459ms step_avg:97.14ms +[2025-08-22 19:27:09] [Rank 0] step:7581/10000 train_time:736492ms step_avg:97.15ms +[2025-08-22 19:27:09] [Rank 0] step:7581/10000 train_time:736492ms step_avg:97.15ms +[2025-08-22 19:27:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:27:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:27:25] [Rank 0] PRINT: step:7600/10000 val_loss:3.5352 svd_entropy: attn_qk:H=0.9079,top10E=0.08,eRank=417.3,q75/q25=11.78 attn_vo:H=0.9165,top10E=0.06,eRank=441.5,q75/q25=11.47 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.88 vo_prod:H=0.8463,top10E=0.13,eRank=279.9,q75/q25=63.37 train_time:738488ms step_avg:97.17ms +[2025-08-22 19:27:25] [Rank 0] PRINT: step:7600/10000 val_loss:3.5352 svd_entropy: attn_qk:H=0.9079,top10E=0.08,eRank=417.3,q75/q25=11.78 attn_vo:H=0.9165,top10E=0.06,eRank=441.5,q75/q25=11.47 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.2,q75/q25=2.88 vo_prod:H=0.8463,top10E=0.13,eRank=279.9,q75/q25=63.37 train_time:738488ms step_avg:97.17ms +[2025-08-22 19:27:25] [Rank 0] step:7601/10000 train_time:738542ms step_avg:97.16ms +[2025-08-22 19:27:25] [Rank 0] step:7601/10000 train_time:738542ms step_avg:97.16ms +[2025-08-22 19:27:27] [Rank 0] step:7621/10000 train_time:740586ms step_avg:97.18ms +[2025-08-22 19:27:27] [Rank 0] step:7621/10000 train_time:740586ms step_avg:97.18ms +[2025-08-22 19:27:29] [Rank 0] step:7641/10000 train_time:742605ms step_avg:97.19ms +[2025-08-22 19:27:29] [Rank 0] step:7641/10000 train_time:742605ms step_avg:97.19ms +[2025-08-22 19:27:31] [Rank 0] step:7661/10000 train_time:744631ms step_avg:97.20ms +[2025-08-22 19:27:31] [Rank 0] step:7661/10000 train_time:744631ms step_avg:97.20ms +[2025-08-22 19:27:33] [Rank 0] step:7681/10000 train_time:746651ms step_avg:97.21ms +[2025-08-22 19:27:33] [Rank 0] step:7681/10000 train_time:746651ms step_avg:97.21ms +[2025-08-22 19:27:35] [Rank 0] step:7701/10000 train_time:748673ms step_avg:97.22ms +[2025-08-22 19:27:35] [Rank 0] step:7701/10000 train_time:748673ms step_avg:97.22ms +[2025-08-22 19:27:37] [Rank 0] step:7721/10000 train_time:750710ms step_avg:97.23ms +[2025-08-22 19:27:37] [Rank 0] step:7721/10000 train_time:750710ms step_avg:97.23ms +[2025-08-22 19:27:39] [Rank 0] step:7741/10000 train_time:752736ms step_avg:97.24ms +[2025-08-22 19:27:39] [Rank 0] step:7741/10000 train_time:752736ms step_avg:97.24ms +[2025-08-22 19:27:41] [Rank 0] step:7761/10000 train_time:754774ms step_avg:97.25ms +[2025-08-22 19:27:41] [Rank 0] step:7761/10000 train_time:754774ms step_avg:97.25ms +[2025-08-22 19:27:43] [Rank 0] step:7781/10000 train_time:756805ms step_avg:97.26ms +[2025-08-22 19:27:43] [Rank 0] step:7781/10000 train_time:756805ms step_avg:97.26ms +[2025-08-22 19:27:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:27:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:27:59] [Rank 0] PRINT: step:7800/10000 val_loss:3.5214 svd_entropy: attn_qk:H=0.9077,top10E=0.08,eRank=416.9,q75/q25=11.82 attn_vo:H=0.9165,top10E=0.06,eRank=441.5,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.3,q75/q25=2.88 vo_prod:H=0.8465,top10E=0.13,eRank=280.1,q75/q25=63.11 train_time:758797ms step_avg:97.28ms +[2025-08-22 19:27:59] [Rank 0] PRINT: step:7800/10000 val_loss:3.5214 svd_entropy: attn_qk:H=0.9077,top10E=0.08,eRank=416.9,q75/q25=11.82 attn_vo:H=0.9165,top10E=0.06,eRank=441.5,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.3,q75/q25=2.88 vo_prod:H=0.8465,top10E=0.13,eRank=280.1,q75/q25=63.11 train_time:758797ms step_avg:97.28ms +[2025-08-22 19:27:59] [Rank 0] step:7801/10000 train_time:758851ms step_avg:97.28ms +[2025-08-22 19:27:59] [Rank 0] step:7801/10000 train_time:758851ms step_avg:97.28ms +[2025-08-22 19:28:01] [Rank 0] step:7821/10000 train_time:760883ms step_avg:97.29ms +[2025-08-22 19:28:01] [Rank 0] step:7821/10000 train_time:760883ms step_avg:97.29ms +[2025-08-22 19:28:03] [Rank 0] step:7841/10000 train_time:762901ms step_avg:97.30ms +[2025-08-22 19:28:03] [Rank 0] step:7841/10000 train_time:762901ms step_avg:97.30ms +[2025-08-22 19:28:05] [Rank 0] step:7861/10000 train_time:764926ms step_avg:97.31ms +[2025-08-22 19:28:05] [Rank 0] step:7861/10000 train_time:764926ms step_avg:97.31ms +[2025-08-22 19:28:07] [Rank 0] step:7881/10000 train_time:766954ms step_avg:97.32ms +[2025-08-22 19:28:07] [Rank 0] step:7881/10000 train_time:766954ms step_avg:97.32ms +[2025-08-22 19:28:09] [Rank 0] step:7901/10000 train_time:768971ms step_avg:97.33ms +[2025-08-22 19:28:09] [Rank 0] step:7901/10000 train_time:768971ms step_avg:97.33ms +[2025-08-22 19:28:12] [Rank 0] step:7921/10000 train_time:771001ms step_avg:97.34ms +[2025-08-22 19:28:12] [Rank 0] step:7921/10000 train_time:771001ms step_avg:97.34ms +[2025-08-22 19:28:14] [Rank 0] step:7941/10000 train_time:773038ms step_avg:97.35ms +[2025-08-22 19:28:14] [Rank 0] step:7941/10000 train_time:773038ms step_avg:97.35ms +[2025-08-22 19:28:16] [Rank 0] step:7961/10000 train_time:775068ms step_avg:97.36ms +[2025-08-22 19:28:16] [Rank 0] step:7961/10000 train_time:775068ms step_avg:97.36ms +[2025-08-22 19:28:18] [Rank 0] step:7981/10000 train_time:777087ms step_avg:97.37ms +[2025-08-22 19:28:18] [Rank 0] step:7981/10000 train_time:777087ms step_avg:97.37ms +[2025-08-22 19:28:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:28:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:28:34] [Rank 0] PRINT: step:8000/10000 val_loss:3.5068 svd_entropy: attn_qk:H=0.9075,top10E=0.08,eRank=416.5,q75/q25=11.83 attn_vo:H=0.9165,top10E=0.06,eRank=441.6,q75/q25=11.52 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.3,q75/q25=2.88 vo_prod:H=0.8466,top10E=0.13,eRank=280.4,q75/q25=62.81 train_time:779077ms step_avg:97.38ms +[2025-08-22 19:28:34] [Rank 0] PRINT: step:8000/10000 val_loss:3.5068 svd_entropy: attn_qk:H=0.9075,top10E=0.08,eRank=416.5,q75/q25=11.83 attn_vo:H=0.9165,top10E=0.06,eRank=441.6,q75/q25=11.52 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.3,q75/q25=2.88 vo_prod:H=0.8466,top10E=0.13,eRank=280.4,q75/q25=62.81 train_time:779077ms step_avg:97.38ms +[2025-08-22 19:28:34] [Rank 0] step:8001/10000 train_time:779131ms step_avg:97.38ms +[2025-08-22 19:28:34] [Rank 0] step:8001/10000 train_time:779131ms step_avg:97.38ms +[2025-08-22 19:28:36] [Rank 0] step:8021/10000 train_time:781162ms step_avg:97.39ms +[2025-08-22 19:28:36] [Rank 0] step:8021/10000 train_time:781162ms step_avg:97.39ms +[2025-08-22 19:28:38] [Rank 0] step:8041/10000 train_time:783199ms step_avg:97.40ms +[2025-08-22 19:28:38] [Rank 0] step:8041/10000 train_time:783199ms step_avg:97.40ms +[2025-08-22 19:28:40] [Rank 0] step:8061/10000 train_time:785232ms step_avg:97.41ms +[2025-08-22 19:28:40] [Rank 0] step:8061/10000 train_time:785232ms step_avg:97.41ms +[2025-08-22 19:28:42] [Rank 0] step:8081/10000 train_time:787254ms step_avg:97.42ms +[2025-08-22 19:28:42] [Rank 0] step:8081/10000 train_time:787254ms step_avg:97.42ms +[2025-08-22 19:28:44] [Rank 0] step:8101/10000 train_time:789291ms step_avg:97.43ms +[2025-08-22 19:28:44] [Rank 0] step:8101/10000 train_time:789291ms step_avg:97.43ms +[2025-08-22 19:28:46] [Rank 0] step:8121/10000 train_time:791326ms step_avg:97.44ms +[2025-08-22 19:28:46] [Rank 0] step:8121/10000 train_time:791326ms step_avg:97.44ms +[2025-08-22 19:28:49] [Rank 0] step:8141/10000 train_time:794053ms step_avg:97.54ms +[2025-08-22 19:28:49] [Rank 0] step:8141/10000 train_time:794053ms step_avg:97.54ms +[2025-08-22 19:28:51] [Rank 0] step:8161/10000 train_time:796251ms step_avg:97.57ms +[2025-08-22 19:28:51] [Rank 0] step:8161/10000 train_time:796251ms step_avg:97.57ms +[2025-08-22 19:28:53] [Rank 0] step:8181/10000 train_time:798446ms step_avg:97.60ms +[2025-08-22 19:28:53] [Rank 0] step:8181/10000 train_time:798446ms step_avg:97.60ms +[2025-08-22 19:28:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:28:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:29:09] [Rank 0] PRINT: step:8200/10000 val_loss:3.4941 svd_entropy: attn_qk:H=0.9074,top10E=0.08,eRank=416.1,q75/q25=11.84 attn_vo:H=0.9166,top10E=0.06,eRank=441.7,q75/q25=11.52 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.4,q75/q25=2.88 vo_prod:H=0.8467,top10E=0.13,eRank=280.5,q75/q25=62.69 train_time:800487ms step_avg:97.62ms +[2025-08-22 19:29:09] [Rank 0] PRINT: step:8200/10000 val_loss:3.4941 svd_entropy: attn_qk:H=0.9074,top10E=0.08,eRank=416.1,q75/q25=11.84 attn_vo:H=0.9166,top10E=0.06,eRank=441.7,q75/q25=11.52 mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.4,q75/q25=2.88 vo_prod:H=0.8467,top10E=0.13,eRank=280.5,q75/q25=62.69 train_time:800487ms step_avg:97.62ms +[2025-08-22 19:29:09] [Rank 0] step:8201/10000 train_time:800544ms step_avg:97.62ms +[2025-08-22 19:29:09] [Rank 0] step:8201/10000 train_time:800544ms step_avg:97.62ms +[2025-08-22 19:29:11] [Rank 0] step:8221/10000 train_time:802627ms step_avg:97.63ms +[2025-08-22 19:29:11] [Rank 0] step:8221/10000 train_time:802627ms step_avg:97.63ms +[2025-08-22 19:29:13] [Rank 0] step:8241/10000 train_time:804685ms step_avg:97.64ms +[2025-08-22 19:29:13] [Rank 0] step:8241/10000 train_time:804685ms step_avg:97.64ms +[2025-08-22 19:29:15] [Rank 0] step:8261/10000 train_time:806749ms step_avg:97.66ms +[2025-08-22 19:29:15] [Rank 0] step:8261/10000 train_time:806749ms step_avg:97.66ms +[2025-08-22 19:29:17] [Rank 0] step:8281/10000 train_time:808802ms step_avg:97.67ms +[2025-08-22 19:29:17] [Rank 0] step:8281/10000 train_time:808802ms step_avg:97.67ms +[2025-08-22 19:29:19] [Rank 0] step:8301/10000 train_time:810860ms step_avg:97.68ms +[2025-08-22 19:29:19] [Rank 0] step:8301/10000 train_time:810860ms step_avg:97.68ms +[2025-08-22 19:29:21] [Rank 0] step:8321/10000 train_time:812912ms step_avg:97.69ms +[2025-08-22 19:29:21] [Rank 0] step:8321/10000 train_time:812912ms step_avg:97.69ms +[2025-08-22 19:29:24] [Rank 0] step:8341/10000 train_time:814975ms step_avg:97.71ms +[2025-08-22 19:29:24] [Rank 0] step:8341/10000 train_time:814975ms step_avg:97.71ms +[2025-08-22 19:29:26] [Rank 0] step:8361/10000 train_time:817029ms step_avg:97.72ms +[2025-08-22 19:29:26] [Rank 0] step:8361/10000 train_time:817029ms step_avg:97.72ms +[2025-08-22 19:29:28] [Rank 0] step:8381/10000 train_time:819085ms step_avg:97.73ms +[2025-08-22 19:29:28] [Rank 0] step:8381/10000 train_time:819085ms step_avg:97.73ms +[2025-08-22 19:29:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:29:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:29:44] [Rank 0] PRINT: step:8400/10000 val_loss:3.4814 svd_entropy: attn_qk:H=0.9073,top10E=0.08,eRank=415.8,q75/q25=11.84 attn_vo:H=0.9166,top10E=0.06,eRank=441.7,q75/q25=11.52 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.4,q75/q25=2.88 vo_prod:H=0.8468,top10E=0.13,eRank=280.8,q75/q25=62.48 train_time:821096ms step_avg:97.75ms +[2025-08-22 19:29:44] [Rank 0] PRINT: step:8400/10000 val_loss:3.4814 svd_entropy: attn_qk:H=0.9073,top10E=0.08,eRank=415.8,q75/q25=11.84 attn_vo:H=0.9166,top10E=0.06,eRank=441.7,q75/q25=11.52 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9663,top10E=0.06,eRank=614.4,q75/q25=2.88 vo_prod:H=0.8468,top10E=0.13,eRank=280.8,q75/q25=62.48 train_time:821096ms step_avg:97.75ms +[2025-08-22 19:29:44] [Rank 0] step:8401/10000 train_time:821153ms step_avg:97.74ms +[2025-08-22 19:29:44] [Rank 0] step:8401/10000 train_time:821153ms step_avg:97.74ms +[2025-08-22 19:29:46] [Rank 0] step:8421/10000 train_time:823206ms step_avg:97.76ms +[2025-08-22 19:29:46] [Rank 0] step:8421/10000 train_time:823206ms step_avg:97.76ms +[2025-08-22 19:29:48] [Rank 0] step:8441/10000 train_time:825255ms step_avg:97.77ms +[2025-08-22 19:29:48] [Rank 0] step:8441/10000 train_time:825255ms step_avg:97.77ms +[2025-08-22 19:29:50] [Rank 0] step:8461/10000 train_time:827301ms step_avg:97.78ms +[2025-08-22 19:29:50] [Rank 0] step:8461/10000 train_time:827301ms step_avg:97.78ms +[2025-08-22 19:29:52] [Rank 0] step:8481/10000 train_time:829361ms step_avg:97.79ms +[2025-08-22 19:29:52] [Rank 0] step:8481/10000 train_time:829361ms step_avg:97.79ms +[2025-08-22 19:29:54] [Rank 0] step:8501/10000 train_time:831536ms step_avg:97.82ms +[2025-08-22 19:29:54] [Rank 0] step:8501/10000 train_time:831536ms step_avg:97.82ms +[2025-08-22 19:29:56] [Rank 0] step:8521/10000 train_time:833724ms step_avg:97.84ms +[2025-08-22 19:29:56] [Rank 0] step:8521/10000 train_time:833724ms step_avg:97.84ms +[2025-08-22 19:29:58] [Rank 0] step:8541/10000 train_time:835793ms step_avg:97.86ms +[2025-08-22 19:29:58] [Rank 0] step:8541/10000 train_time:835793ms step_avg:97.86ms +[2025-08-22 19:30:00] [Rank 0] step:8561/10000 train_time:837853ms step_avg:97.87ms +[2025-08-22 19:30:00] [Rank 0] step:8561/10000 train_time:837853ms step_avg:97.87ms +[2025-08-22 19:30:02] [Rank 0] step:8581/10000 train_time:839910ms step_avg:97.88ms +[2025-08-22 19:30:02] [Rank 0] step:8581/10000 train_time:839910ms step_avg:97.88ms +[2025-08-22 19:30:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:30:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:30:18] [Rank 0] PRINT: step:8600/10000 val_loss:3.4720 svd_entropy: attn_qk:H=0.9072,top10E=0.08,eRank=415.5,q75/q25=11.86 attn_vo:H=0.9166,top10E=0.06,eRank=441.8,q75/q25=11.48 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.5,q75/q25=2.88 vo_prod:H=0.8469,top10E=0.13,eRank=281.0,q75/q25=62.28 train_time:841917ms step_avg:97.90ms +[2025-08-22 19:30:18] [Rank 0] PRINT: step:8600/10000 val_loss:3.4720 svd_entropy: attn_qk:H=0.9072,top10E=0.08,eRank=415.5,q75/q25=11.86 attn_vo:H=0.9166,top10E=0.06,eRank=441.8,q75/q25=11.48 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.5,q75/q25=2.88 vo_prod:H=0.8469,top10E=0.13,eRank=281.0,q75/q25=62.28 train_time:841917ms step_avg:97.90ms +[2025-08-22 19:30:18] [Rank 0] step:8601/10000 train_time:841974ms step_avg:97.89ms +[2025-08-22 19:30:18] [Rank 0] step:8601/10000 train_time:841974ms step_avg:97.89ms +[2025-08-22 19:30:21] [Rank 0] step:8621/10000 train_time:844034ms step_avg:97.90ms +[2025-08-22 19:30:21] [Rank 0] step:8621/10000 train_time:844034ms step_avg:97.90ms +[2025-08-22 19:30:23] [Rank 0] step:8641/10000 train_time:846082ms step_avg:97.91ms +[2025-08-22 19:30:23] [Rank 0] step:8641/10000 train_time:846082ms step_avg:97.91ms +[2025-08-22 19:30:25] [Rank 0] step:8661/10000 train_time:848133ms step_avg:97.93ms +[2025-08-22 19:30:25] [Rank 0] step:8661/10000 train_time:848133ms step_avg:97.93ms +[2025-08-22 19:30:27] [Rank 0] step:8681/10000 train_time:850191ms step_avg:97.94ms +[2025-08-22 19:30:27] [Rank 0] step:8681/10000 train_time:850191ms step_avg:97.94ms +[2025-08-22 19:30:29] [Rank 0] step:8701/10000 train_time:852237ms step_avg:97.95ms +[2025-08-22 19:30:29] [Rank 0] step:8701/10000 train_time:852237ms step_avg:97.95ms +[2025-08-22 19:30:31] [Rank 0] step:8721/10000 train_time:854296ms step_avg:97.96ms +[2025-08-22 19:30:31] [Rank 0] step:8721/10000 train_time:854296ms step_avg:97.96ms +[2025-08-22 19:30:33] [Rank 0] step:8741/10000 train_time:856343ms step_avg:97.97ms +[2025-08-22 19:30:33] [Rank 0] step:8741/10000 train_time:856343ms step_avg:97.97ms +[2025-08-22 19:30:35] [Rank 0] step:8761/10000 train_time:858406ms step_avg:97.98ms +[2025-08-22 19:30:35] [Rank 0] step:8761/10000 train_time:858406ms step_avg:97.98ms +[2025-08-22 19:30:37] [Rank 0] step:8781/10000 train_time:860467ms step_avg:97.99ms +[2025-08-22 19:30:37] [Rank 0] step:8781/10000 train_time:860467ms step_avg:97.99ms +[2025-08-22 19:30:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:30:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:30:53] [Rank 0] PRINT: step:8800/10000 val_loss:3.4608 svd_entropy: attn_qk:H=0.9070,top10E=0.08,eRank=415.2,q75/q25=11.90 attn_vo:H=0.9166,top10E=0.06,eRank=441.9,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.5,q75/q25=2.88 vo_prod:H=0.8470,top10E=0.13,eRank=281.1,q75/q25=62.10 train_time:862478ms step_avg:98.01ms +[2025-08-22 19:30:53] [Rank 0] PRINT: step:8800/10000 val_loss:3.4608 svd_entropy: attn_qk:H=0.9070,top10E=0.08,eRank=415.2,q75/q25=11.90 attn_vo:H=0.9166,top10E=0.06,eRank=441.9,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.5,q75/q25=2.88 vo_prod:H=0.8470,top10E=0.13,eRank=281.1,q75/q25=62.10 train_time:862478ms step_avg:98.01ms +[2025-08-22 19:30:53] [Rank 0] step:8801/10000 train_time:862532ms step_avg:98.00ms +[2025-08-22 19:30:53] [Rank 0] step:8801/10000 train_time:862532ms step_avg:98.00ms +[2025-08-22 19:30:55] [Rank 0] step:8821/10000 train_time:864601ms step_avg:98.02ms +[2025-08-22 19:30:55] [Rank 0] step:8821/10000 train_time:864601ms step_avg:98.02ms +[2025-08-22 19:30:57] [Rank 0] step:8841/10000 train_time:866752ms step_avg:98.04ms +[2025-08-22 19:30:57] [Rank 0] step:8841/10000 train_time:866752ms step_avg:98.04ms +[2025-08-22 19:31:00] [Rank 0] step:8861/10000 train_time:868960ms step_avg:98.07ms +[2025-08-22 19:31:00] [Rank 0] step:8861/10000 train_time:868960ms step_avg:98.07ms +[2025-08-22 19:31:02] [Rank 0] step:8881/10000 train_time:871012ms step_avg:98.08ms +[2025-08-22 19:31:02] [Rank 0] step:8881/10000 train_time:871012ms step_avg:98.08ms +[2025-08-22 19:31:04] [Rank 0] step:8901/10000 train_time:873070ms step_avg:98.09ms +[2025-08-22 19:31:04] [Rank 0] step:8901/10000 train_time:873070ms step_avg:98.09ms +[2025-08-22 19:31:06] [Rank 0] step:8921/10000 train_time:875144ms step_avg:98.10ms +[2025-08-22 19:31:06] [Rank 0] step:8921/10000 train_time:875144ms step_avg:98.10ms +[2025-08-22 19:31:08] [Rank 0] step:8941/10000 train_time:877203ms step_avg:98.11ms +[2025-08-22 19:31:08] [Rank 0] step:8941/10000 train_time:877203ms step_avg:98.11ms +[2025-08-22 19:31:10] [Rank 0] step:8961/10000 train_time:879258ms step_avg:98.12ms +[2025-08-22 19:31:10] [Rank 0] step:8961/10000 train_time:879258ms step_avg:98.12ms +[2025-08-22 19:31:12] [Rank 0] step:8981/10000 train_time:881314ms step_avg:98.13ms +[2025-08-22 19:31:12] [Rank 0] step:8981/10000 train_time:881314ms step_avg:98.13ms +[2025-08-22 19:31:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:31:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:31:28] [Rank 0] PRINT: step:9000/10000 val_loss:3.4501 svd_entropy: attn_qk:H=0.9069,top10E=0.08,eRank=414.9,q75/q25=11.93 attn_vo:H=0.9167,top10E=0.06,eRank=441.9,q75/q25=11.48 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.6,q75/q25=2.88 vo_prod:H=0.8471,top10E=0.13,eRank=281.3,q75/q25=61.55 train_time:883329ms step_avg:98.15ms +[2025-08-22 19:31:28] [Rank 0] PRINT: step:9000/10000 val_loss:3.4501 svd_entropy: attn_qk:H=0.9069,top10E=0.08,eRank=414.9,q75/q25=11.93 attn_vo:H=0.9167,top10E=0.06,eRank=441.9,q75/q25=11.48 mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.6,q75/q25=2.88 vo_prod:H=0.8471,top10E=0.13,eRank=281.3,q75/q25=61.55 train_time:883329ms step_avg:98.15ms +[2025-08-22 19:31:28] [Rank 0] step:9001/10000 train_time:883385ms step_avg:98.14ms +[2025-08-22 19:31:28] [Rank 0] step:9001/10000 train_time:883385ms step_avg:98.14ms +[2025-08-22 19:31:30] [Rank 0] step:9021/10000 train_time:885465ms step_avg:98.16ms +[2025-08-22 19:31:30] [Rank 0] step:9021/10000 train_time:885465ms step_avg:98.16ms +[2025-08-22 19:31:32] [Rank 0] step:9041/10000 train_time:887524ms step_avg:98.17ms +[2025-08-22 19:31:32] [Rank 0] step:9041/10000 train_time:887524ms step_avg:98.17ms +[2025-08-22 19:31:34] [Rank 0] step:9061/10000 train_time:889591ms step_avg:98.18ms +[2025-08-22 19:31:34] [Rank 0] step:9061/10000 train_time:889591ms step_avg:98.18ms +[2025-08-22 19:31:36] [Rank 0] step:9081/10000 train_time:891657ms step_avg:98.19ms +[2025-08-22 19:31:36] [Rank 0] step:9081/10000 train_time:891657ms step_avg:98.19ms +[2025-08-22 19:31:38] [Rank 0] step:9101/10000 train_time:893732ms step_avg:98.20ms +[2025-08-22 19:31:38] [Rank 0] step:9101/10000 train_time:893732ms step_avg:98.20ms +[2025-08-22 19:31:40] [Rank 0] step:9121/10000 train_time:895802ms step_avg:98.21ms +[2025-08-22 19:31:40] [Rank 0] step:9121/10000 train_time:895802ms step_avg:98.21ms +[2025-08-22 19:31:42] [Rank 0] step:9141/10000 train_time:897854ms step_avg:98.22ms +[2025-08-22 19:31:42] [Rank 0] step:9141/10000 train_time:897854ms step_avg:98.22ms +[2025-08-22 19:31:44] [Rank 0] step:9161/10000 train_time:899909ms step_avg:98.23ms +[2025-08-22 19:31:44] [Rank 0] step:9161/10000 train_time:899909ms step_avg:98.23ms +[2025-08-22 19:31:47] [Rank 0] step:9181/10000 train_time:902005ms step_avg:98.25ms +[2025-08-22 19:31:47] [Rank 0] step:9181/10000 train_time:902005ms step_avg:98.25ms +[2025-08-22 19:31:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:31:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:32:03] [Rank 0] PRINT: step:9200/10000 val_loss:3.4413 svd_entropy: attn_qk:H=0.9068,top10E=0.08,eRank=414.7,q75/q25=11.92 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.48 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.7,q75/q25=2.88 vo_prod:H=0.8472,top10E=0.13,eRank=281.5,q75/q25=61.63 train_time:904017ms step_avg:98.26ms +[2025-08-22 19:32:03] [Rank 0] PRINT: step:9200/10000 val_loss:3.4413 svd_entropy: attn_qk:H=0.9068,top10E=0.08,eRank=414.7,q75/q25=11.92 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.48 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.7,q75/q25=2.88 vo_prod:H=0.8472,top10E=0.13,eRank=281.5,q75/q25=61.63 train_time:904017ms step_avg:98.26ms +[2025-08-22 19:32:03] [Rank 0] step:9201/10000 train_time:904071ms step_avg:98.26ms +[2025-08-22 19:32:03] [Rank 0] step:9201/10000 train_time:904071ms step_avg:98.26ms +[2025-08-22 19:32:05] [Rank 0] step:9221/10000 train_time:906159ms step_avg:98.27ms +[2025-08-22 19:32:05] [Rank 0] step:9221/10000 train_time:906159ms step_avg:98.27ms +[2025-08-22 19:32:07] [Rank 0] step:9241/10000 train_time:908225ms step_avg:98.28ms +[2025-08-22 19:32:07] [Rank 0] step:9241/10000 train_time:908225ms step_avg:98.28ms +[2025-08-22 19:32:09] [Rank 0] step:9261/10000 train_time:910288ms step_avg:98.29ms +[2025-08-22 19:32:09] [Rank 0] step:9261/10000 train_time:910288ms step_avg:98.29ms +[2025-08-22 19:32:11] [Rank 0] step:9281/10000 train_time:912339ms step_avg:98.30ms +[2025-08-22 19:32:11] [Rank 0] step:9281/10000 train_time:912339ms step_avg:98.30ms +[2025-08-22 19:32:13] [Rank 0] step:9301/10000 train_time:914391ms step_avg:98.31ms +[2025-08-22 19:32:13] [Rank 0] step:9301/10000 train_time:914391ms step_avg:98.31ms +[2025-08-22 19:32:15] [Rank 0] step:9321/10000 train_time:916455ms step_avg:98.32ms +[2025-08-22 19:32:15] [Rank 0] step:9321/10000 train_time:916455ms step_avg:98.32ms +[2025-08-22 19:32:17] [Rank 0] step:9341/10000 train_time:918516ms step_avg:98.33ms +[2025-08-22 19:32:17] [Rank 0] step:9341/10000 train_time:918516ms step_avg:98.33ms +[2025-08-22 19:32:19] [Rank 0] step:9361/10000 train_time:920578ms step_avg:98.34ms +[2025-08-22 19:32:19] [Rank 0] step:9361/10000 train_time:920578ms step_avg:98.34ms +[2025-08-22 19:32:21] [Rank 0] step:9381/10000 train_time:922651ms step_avg:98.35ms +[2025-08-22 19:32:21] [Rank 0] step:9381/10000 train_time:922651ms step_avg:98.35ms +[2025-08-22 19:32:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:32:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:32:37] [Rank 0] PRINT: step:9400/10000 val_loss:3.4324 svd_entropy: attn_qk:H=0.9067,top10E=0.08,eRank=414.4,q75/q25=11.92 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.7,q75/q25=2.88 vo_prod:H=0.8473,top10E=0.13,eRank=281.6,q75/q25=61.43 train_time:924671ms step_avg:98.37ms +[2025-08-22 19:32:37] [Rank 0] PRINT: step:9400/10000 val_loss:3.4324 svd_entropy: attn_qk:H=0.9067,top10E=0.08,eRank=414.4,q75/q25=11.92 attn_vo:H=0.9167,top10E=0.06,eRank=442.0,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.7,q75/q25=2.88 vo_prod:H=0.8473,top10E=0.13,eRank=281.6,q75/q25=61.43 train_time:924671ms step_avg:98.37ms +[2025-08-22 19:32:37] [Rank 0] step:9401/10000 train_time:924727ms step_avg:98.36ms +[2025-08-22 19:32:37] [Rank 0] step:9401/10000 train_time:924727ms step_avg:98.36ms +[2025-08-22 19:32:39] [Rank 0] step:9421/10000 train_time:926800ms step_avg:98.38ms +[2025-08-22 19:32:39] [Rank 0] step:9421/10000 train_time:926800ms step_avg:98.38ms +[2025-08-22 19:32:42] [Rank 0] step:9441/10000 train_time:928854ms step_avg:98.39ms +[2025-08-22 19:32:42] [Rank 0] step:9441/10000 train_time:928854ms step_avg:98.39ms +[2025-08-22 19:32:44] [Rank 0] step:9461/10000 train_time:930916ms step_avg:98.40ms +[2025-08-22 19:32:44] [Rank 0] step:9461/10000 train_time:930916ms step_avg:98.40ms +[2025-08-22 19:32:46] [Rank 0] step:9481/10000 train_time:932979ms step_avg:98.41ms +[2025-08-22 19:32:46] [Rank 0] step:9481/10000 train_time:932979ms step_avg:98.41ms +[2025-08-22 19:32:48] [Rank 0] step:9501/10000 train_time:935040ms step_avg:98.41ms +[2025-08-22 19:32:48] [Rank 0] step:9501/10000 train_time:935040ms step_avg:98.41ms +[2025-08-22 19:32:50] [Rank 0] step:9521/10000 train_time:937092ms step_avg:98.42ms +[2025-08-22 19:32:50] [Rank 0] step:9521/10000 train_time:937092ms step_avg:98.42ms +[2025-08-22 19:32:52] [Rank 0] step:9541/10000 train_time:939152ms step_avg:98.43ms +[2025-08-22 19:32:52] [Rank 0] step:9541/10000 train_time:939152ms step_avg:98.43ms +[2025-08-22 19:32:54] [Rank 0] step:9561/10000 train_time:941207ms step_avg:98.44ms +[2025-08-22 19:32:54] [Rank 0] step:9561/10000 train_time:941207ms step_avg:98.44ms +[2025-08-22 19:32:56] [Rank 0] step:9581/10000 train_time:943265ms step_avg:98.45ms +[2025-08-22 19:32:56] [Rank 0] step:9581/10000 train_time:943265ms step_avg:98.45ms +[2025-08-22 19:32:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:32:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:33:12] [Rank 0] PRINT: step:9600/10000 val_loss:3.4242 svd_entropy: attn_qk:H=0.9067,top10E=0.08,eRank=414.2,q75/q25=11.93 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8473,top10E=0.13,eRank=281.7,q75/q25=61.28 train_time:945295ms step_avg:98.47ms +[2025-08-22 19:33:12] [Rank 0] PRINT: step:9600/10000 val_loss:3.4242 svd_entropy: attn_qk:H=0.9067,top10E=0.08,eRank=414.2,q75/q25=11.93 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.49 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8473,top10E=0.13,eRank=281.7,q75/q25=61.28 train_time:945295ms step_avg:98.47ms +[2025-08-22 19:33:12] [Rank 0] step:9601/10000 train_time:945351ms step_avg:98.46ms +[2025-08-22 19:33:12] [Rank 0] step:9601/10000 train_time:945351ms step_avg:98.46ms +[2025-08-22 19:33:14] [Rank 0] step:9621/10000 train_time:947432ms step_avg:98.48ms +[2025-08-22 19:33:14] [Rank 0] step:9621/10000 train_time:947432ms step_avg:98.48ms +[2025-08-22 19:33:16] [Rank 0] step:9641/10000 train_time:949493ms step_avg:98.48ms +[2025-08-22 19:33:16] [Rank 0] step:9641/10000 train_time:949493ms step_avg:98.48ms +[2025-08-22 19:33:18] [Rank 0] step:9661/10000 train_time:951580ms step_avg:98.50ms +[2025-08-22 19:33:18] [Rank 0] step:9661/10000 train_time:951580ms step_avg:98.50ms +[2025-08-22 19:33:20] [Rank 0] step:9681/10000 train_time:953662ms step_avg:98.51ms +[2025-08-22 19:33:20] [Rank 0] step:9681/10000 train_time:953662ms step_avg:98.51ms +[2025-08-22 19:33:22] [Rank 0] step:9701/10000 train_time:955762ms step_avg:98.52ms +[2025-08-22 19:33:22] [Rank 0] step:9701/10000 train_time:955762ms step_avg:98.52ms +[2025-08-22 19:33:25] [Rank 0] step:9721/10000 train_time:957850ms step_avg:98.53ms +[2025-08-22 19:33:25] [Rank 0] step:9721/10000 train_time:957850ms step_avg:98.53ms +[2025-08-22 19:33:27] [Rank 0] step:9741/10000 train_time:959949ms step_avg:98.55ms +[2025-08-22 19:33:27] [Rank 0] step:9741/10000 train_time:959949ms step_avg:98.55ms +[2025-08-22 19:33:29] [Rank 0] step:9761/10000 train_time:962043ms step_avg:98.56ms +[2025-08-22 19:33:29] [Rank 0] step:9761/10000 train_time:962043ms step_avg:98.56ms +[2025-08-22 19:33:31] [Rank 0] step:9781/10000 train_time:964138ms step_avg:98.57ms +[2025-08-22 19:33:31] [Rank 0] step:9781/10000 train_time:964138ms step_avg:98.57ms +[2025-08-22 19:33:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:33:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:33:47] [Rank 0] PRINT: step:9800/10000 val_loss:3.4161 svd_entropy: attn_qk:H=0.9066,top10E=0.08,eRank=414.1,q75/q25=11.94 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8474,top10E=0.13,eRank=281.8,q75/q25=61.52 train_time:966198ms step_avg:98.59ms +[2025-08-22 19:33:47] [Rank 0] PRINT: step:9800/10000 val_loss:3.4161 svd_entropy: attn_qk:H=0.9066,top10E=0.08,eRank=414.1,q75/q25=11.94 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9664,top10E=0.06,eRank=614.8,q75/q25=2.87 vo_prod:H=0.8474,top10E=0.13,eRank=281.8,q75/q25=61.52 train_time:966198ms step_avg:98.59ms +[2025-08-22 19:33:47] [Rank 0] step:9801/10000 train_time:966253ms step_avg:98.59ms +[2025-08-22 19:33:47] [Rank 0] step:9801/10000 train_time:966253ms step_avg:98.59ms +[2025-08-22 19:33:49] [Rank 0] step:9821/10000 train_time:968357ms step_avg:98.60ms +[2025-08-22 19:33:49] [Rank 0] step:9821/10000 train_time:968357ms step_avg:98.60ms +[2025-08-22 19:33:51] [Rank 0] step:9841/10000 train_time:970448ms step_avg:98.61ms +[2025-08-22 19:33:51] [Rank 0] step:9841/10000 train_time:970448ms step_avg:98.61ms +[2025-08-22 19:33:53] [Rank 0] step:9861/10000 train_time:972527ms step_avg:98.62ms +[2025-08-22 19:33:53] [Rank 0] step:9861/10000 train_time:972527ms step_avg:98.62ms +[2025-08-22 19:33:55] [Rank 0] step:9881/10000 train_time:974604ms step_avg:98.63ms +[2025-08-22 19:33:55] [Rank 0] step:9881/10000 train_time:974604ms step_avg:98.63ms +[2025-08-22 19:33:57] [Rank 0] step:9901/10000 train_time:976703ms step_avg:98.65ms +[2025-08-22 19:33:57] [Rank 0] step:9901/10000 train_time:976703ms step_avg:98.65ms +[2025-08-22 19:34:00] [Rank 0] step:9921/10000 train_time:978783ms step_avg:98.66ms +[2025-08-22 19:34:00] [Rank 0] step:9921/10000 train_time:978783ms step_avg:98.66ms +[2025-08-22 19:34:02] [Rank 0] step:9941/10000 train_time:980881ms step_avg:98.67ms +[2025-08-22 19:34:02] [Rank 0] step:9941/10000 train_time:980881ms step_avg:98.67ms +[2025-08-22 19:34:04] [Rank 0] step:9961/10000 train_time:982963ms step_avg:98.68ms +[2025-08-22 19:34:04] [Rank 0] step:9961/10000 train_time:982963ms step_avg:98.68ms +[2025-08-22 19:34:06] [Rank 0] step:9981/10000 train_time:985056ms step_avg:98.69ms +[2025-08-22 19:34:06] [Rank 0] step:9981/10000 train_time:985056ms step_avg:98.69ms +[2025-08-22 19:34:08] [Rank 0] step:10000/10000 train_time:987109ms step_avg:98.71ms +[2025-08-22 19:34:08] [Rank 0] step:10000/10000 train_time:987109ms step_avg:98.71ms +[2025-08-22 19:34:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:34:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:34:22] [Rank 0] PRINT: step:10000/10000 val_loss:3.4093 svd_entropy: attn_qk:H=0.9065,top10E=0.08,eRank=414.0,q75/q25=11.93 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.06,eRank=614.9,q75/q25=2.87 vo_prod:H=0.8474,top10E=0.13,eRank=281.9,q75/q25=61.37 train_time:987177ms step_avg:98.72ms +[2025-08-22 19:34:22] [Rank 0] PRINT: step:10000/10000 val_loss:3.4093 svd_entropy: attn_qk:H=0.9065,top10E=0.08,eRank=414.0,q75/q25=11.93 attn_vo:H=0.9167,top10E=0.06,eRank=442.1,q75/q25=11.50 mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.76 mlp_w2:H=0.9665,top10E=0.06,eRank=614.9,q75/q25=2.87 vo_prod:H=0.8474,top10E=0.13,eRank=281.9,q75/q25=61.37 train_time:987177ms step_avg:98.72ms +[2025-08-22 19:34:22] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 19:34:22 2025 --- +[2025-08-22 19:34:22] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 19:34:22 2025 --- +[2025-08-22 19:34:22] [Rank 0] PRINT: Peak memory allocated: 11035 MiB reserved: 16456 MiB +[2025-08-22 19:34:22] [Rank 0] PRINT: Peak memory allocated: 11035 MiB reserved: 16456 MiB diff --git a/logs_svd_gated/mode_10_param_gated_seed_41/config.json b/logs_svd_gated/mode_10_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..0793c0c502ecdbde22e9f9e342a72fe8bcb40f6a --- /dev/null +++ b/logs_svd_gated/mode_10_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 10, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "e8b257b4-4702-4c5e-a05e-b4d6df923baf", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_10_param_gated_seed_41/training_log_e8b257b4-4702-4c5e-a05e-b4d6df923baf.txt b/logs_svd_gated/mode_10_param_gated_seed_41/training_log_e8b257b4-4702-4c5e-a05e-b4d6df923baf.txt new file mode 100644 index 0000000000000000000000000000000000000000..3c4381726e78b7dd5c2f4696c2b2d33378905bca --- /dev/null +++ b/logs_svd_gated/mode_10_param_gated_seed_41/training_log_e8b257b4-4702-4c5e-a05e-b4d6df923baf.txt @@ -0,0 +1,2926 @@ +[2025-08-22 13:22:40] [Rank 0] PRINT: --- Script Start: Fri Aug 22 13:22:40 2025 --- +[2025-08-22 13:22:40] [Rank 0] PRINT: --- Script Start: Fri Aug 22 13:22:40 2025 --- +[2025-08-22 13:22:40] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=10, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 13:22:40] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=10, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 13:22:40] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 13:22:40] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 13:22:40] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 13:22:40] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 13:22:40] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_10_param_gated_seed_41 +[2025-08-22 13:22:40] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_10_param_gated_seed_41 +[2025-08-22 13:22:40] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 13:22:40] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 13:22:40] [Rank 0] PRINT: Constructing model... +[2025-08-22 13:22:40] [Rank 0] PRINT: Constructing model... +[2025-08-22 13:22:42] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 13:22:42] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 13:22:42] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 13:22:42] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 13:22:42] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 13:22:42] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 13:22:42] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 10 +[2025-08-22 13:22:42] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 10 +[2025-08-22 13:22:42] [Rank 0] PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: 0.05). +[2025-08-22 13:22:42] [Rank 0] PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: 0.05). +[2025-08-22 13:22:42] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 13:22:42] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 13:22:42] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 13:22:42] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 13:22:42] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 13:22:42] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 13:22:42] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 13:22:42] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 13:22:42] [Rank 0] PRINT: Starting warmup... +[2025-08-22 13:22:42] [Rank 0] PRINT: Starting warmup... +[2025-08-22 13:23:26] [Rank 0] PRINT: Warmup complete. +[2025-08-22 13:23:26] [Rank 0] PRINT: Warmup complete. +[2025-08-22 13:23:26] [Rank 0] PRINT: Starting training... +[2025-08-22 13:23:26] [Rank 0] PRINT: Starting training... +[2025-08-22 13:23:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:23:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:23:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 13:23:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 13:23:46] [Rank 0] step:21/10000 train_time:1846ms step_avg:87.90ms +[2025-08-22 13:23:46] [Rank 0] step:21/10000 train_time:1846ms step_avg:87.90ms +[2025-08-22 13:23:48] [Rank 0] step:41/10000 train_time:3641ms step_avg:88.80ms +[2025-08-22 13:23:48] [Rank 0] step:41/10000 train_time:3641ms step_avg:88.80ms +[2025-08-22 13:23:50] [Rank 0] step:61/10000 train_time:5437ms step_avg:89.14ms +[2025-08-22 13:23:50] [Rank 0] step:61/10000 train_time:5437ms step_avg:89.14ms +[2025-08-22 13:23:51] [Rank 0] step:81/10000 train_time:7234ms step_avg:89.31ms +[2025-08-22 13:23:51] [Rank 0] step:81/10000 train_time:7234ms step_avg:89.31ms +[2025-08-22 13:23:53] [Rank 0] step:101/10000 train_time:9033ms step_avg:89.44ms +[2025-08-22 13:23:53] [Rank 0] step:101/10000 train_time:9033ms step_avg:89.44ms +[2025-08-22 13:23:55] [Rank 0] step:121/10000 train_time:10833ms step_avg:89.53ms +[2025-08-22 13:23:55] [Rank 0] step:121/10000 train_time:10833ms step_avg:89.53ms +[2025-08-22 13:23:57] [Rank 0] step:141/10000 train_time:12633ms step_avg:89.60ms +[2025-08-22 13:23:57] [Rank 0] step:141/10000 train_time:12633ms step_avg:89.60ms +[2025-08-22 13:23:59] [Rank 0] step:161/10000 train_time:14435ms step_avg:89.66ms +[2025-08-22 13:23:59] [Rank 0] step:161/10000 train_time:14435ms step_avg:89.66ms +[2025-08-22 13:24:00] [Rank 0] step:181/10000 train_time:16236ms step_avg:89.70ms +[2025-08-22 13:24:00] [Rank 0] step:181/10000 train_time:16236ms step_avg:89.70ms +[2025-08-22 13:24:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:24:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:24:16] [Rank 0] PRINT: step:200/10000 val_loss:5.4503 svd_entropy: attn_qk:H=0.6662,top10E=0.45,eRank=110.3,q75/q25=35.45 attn_vo:H=0.6992,top10E=0.31,eRank=193.3,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=3.12 mlp_w2:H=0.9663,top10E=0.04,eRank=614.6,q75/q25=3.35 vo_prod:H=0.4609,top10E=0.61,eRank=28.4,q75/q25=inf train_time:18042ms step_avg:90.21ms +[2025-08-22 13:24:16] [Rank 0] PRINT: step:200/10000 val_loss:5.4503 svd_entropy: attn_qk:H=0.6662,top10E=0.45,eRank=110.3,q75/q25=35.45 attn_vo:H=0.6992,top10E=0.31,eRank=193.3,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=3.12 mlp_w2:H=0.9663,top10E=0.04,eRank=614.6,q75/q25=3.35 vo_prod:H=0.4609,top10E=0.61,eRank=28.4,q75/q25=inf train_time:18042ms step_avg:90.21ms +[2025-08-22 13:24:16] [Rank 0] step:201/10000 train_time:18064ms step_avg:89.87ms +[2025-08-22 13:24:16] [Rank 0] step:201/10000 train_time:18064ms step_avg:89.87ms +[2025-08-22 13:24:18] [Rank 0] step:221/10000 train_time:19864ms step_avg:89.88ms +[2025-08-22 13:24:18] [Rank 0] step:221/10000 train_time:19864ms step_avg:89.88ms +[2025-08-22 13:24:20] [Rank 0] step:241/10000 train_time:21662ms step_avg:89.88ms +[2025-08-22 13:24:20] [Rank 0] step:241/10000 train_time:21662ms step_avg:89.88ms +[2025-08-22 13:24:22] [Rank 0] step:261/10000 train_time:23462ms step_avg:89.89ms +[2025-08-22 13:24:22] [Rank 0] step:261/10000 train_time:23462ms step_avg:89.89ms +[2025-08-22 13:24:23] [Rank 0] step:281/10000 train_time:25264ms step_avg:89.91ms +[2025-08-22 13:24:23] [Rank 0] step:281/10000 train_time:25264ms step_avg:89.91ms +[2025-08-22 13:24:25] [Rank 0] step:301/10000 train_time:27066ms step_avg:89.92ms +[2025-08-22 13:24:25] [Rank 0] step:301/10000 train_time:27066ms step_avg:89.92ms +[2025-08-22 13:24:27] [Rank 0] step:321/10000 train_time:28868ms step_avg:89.93ms +[2025-08-22 13:24:27] [Rank 0] step:321/10000 train_time:28868ms step_avg:89.93ms +[2025-08-22 13:24:29] [Rank 0] step:341/10000 train_time:30673ms step_avg:89.95ms +[2025-08-22 13:24:29] [Rank 0] step:341/10000 train_time:30673ms step_avg:89.95ms +[2025-08-22 13:24:31] [Rank 0] step:361/10000 train_time:32476ms step_avg:89.96ms +[2025-08-22 13:24:31] [Rank 0] step:361/10000 train_time:32476ms step_avg:89.96ms +[2025-08-22 13:24:32] [Rank 0] step:381/10000 train_time:34280ms step_avg:89.97ms +[2025-08-22 13:24:32] [Rank 0] step:381/10000 train_time:34280ms step_avg:89.97ms +[2025-08-22 13:24:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:24:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:24:48] [Rank 0] PRINT: step:400/10000 val_loss:5.0826 svd_entropy: attn_qk:H=0.6887,top10E=0.40,eRank=120.5,q75/q25=43.30 attn_vo:H=0.7529,top10E=0.23,eRank=235.7,q75/q25=inf mlp_w1:H=0.9673,top10E=0.04,eRank=618.6,q75/q25=3.20 mlp_w2:H=0.9667,top10E=0.04,eRank=616.0,q75/q25=3.28 vo_prod:H=0.5385,top10E=0.48,eRank=48.3,q75/q25=inf train_time:36089ms step_avg:90.22ms +[2025-08-22 13:24:48] [Rank 0] PRINT: step:400/10000 val_loss:5.0826 svd_entropy: attn_qk:H=0.6887,top10E=0.40,eRank=120.5,q75/q25=43.30 attn_vo:H=0.7529,top10E=0.23,eRank=235.7,q75/q25=inf mlp_w1:H=0.9673,top10E=0.04,eRank=618.6,q75/q25=3.20 mlp_w2:H=0.9667,top10E=0.04,eRank=616.0,q75/q25=3.28 vo_prod:H=0.5385,top10E=0.48,eRank=48.3,q75/q25=inf train_time:36089ms step_avg:90.22ms +[2025-08-22 13:24:48] [Rank 0] step:401/10000 train_time:36109ms step_avg:90.05ms +[2025-08-22 13:24:48] [Rank 0] step:401/10000 train_time:36109ms step_avg:90.05ms +[2025-08-22 13:24:50] [Rank 0] step:421/10000 train_time:37910ms step_avg:90.05ms +[2025-08-22 13:24:50] [Rank 0] step:421/10000 train_time:37910ms step_avg:90.05ms +[2025-08-22 13:24:51] [Rank 0] step:441/10000 train_time:39707ms step_avg:90.04ms +[2025-08-22 13:24:51] [Rank 0] step:441/10000 train_time:39707ms step_avg:90.04ms +[2025-08-22 13:24:53] [Rank 0] step:461/10000 train_time:41505ms step_avg:90.03ms +[2025-08-22 13:24:53] [Rank 0] step:461/10000 train_time:41505ms step_avg:90.03ms +[2025-08-22 13:24:55] [Rank 0] step:481/10000 train_time:43304ms step_avg:90.03ms +[2025-08-22 13:24:55] [Rank 0] step:481/10000 train_time:43304ms step_avg:90.03ms +[2025-08-22 13:24:57] [Rank 0] step:501/10000 train_time:45105ms step_avg:90.03ms +[2025-08-22 13:24:57] [Rank 0] step:501/10000 train_time:45105ms step_avg:90.03ms +[2025-08-22 13:24:59] [Rank 0] step:521/10000 train_time:46905ms step_avg:90.03ms +[2025-08-22 13:24:59] [Rank 0] step:521/10000 train_time:46905ms step_avg:90.03ms +[2025-08-22 13:25:00] [Rank 0] step:541/10000 train_time:48704ms step_avg:90.03ms +[2025-08-22 13:25:00] [Rank 0] step:541/10000 train_time:48704ms step_avg:90.03ms +[2025-08-22 13:25:02] [Rank 0] step:561/10000 train_time:50505ms step_avg:90.03ms +[2025-08-22 13:25:02] [Rank 0] step:561/10000 train_time:50505ms step_avg:90.03ms +[2025-08-22 13:25:04] [Rank 0] step:581/10000 train_time:52306ms step_avg:90.03ms +[2025-08-22 13:25:04] [Rank 0] step:581/10000 train_time:52306ms step_avg:90.03ms +[2025-08-22 13:25:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:25:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:25:20] [Rank 0] PRINT: step:600/10000 val_loss:4.8864 svd_entropy: attn_qk:H=0.6928,top10E=0.39,eRank=123.1,q75/q25=47.87 attn_vo:H=0.7664,top10E=0.21,eRank=246.5,q75/q25=inf mlp_w1:H=0.9664,top10E=0.04,eRank=615.1,q75/q25=3.24 mlp_w2:H=0.9657,top10E=0.04,eRank=611.8,q75/q25=3.29 vo_prod:H=0.5637,top10E=0.43,eRank=58.3,q75/q25=inf train_time:54114ms step_avg:90.19ms +[2025-08-22 13:25:20] [Rank 0] PRINT: step:600/10000 val_loss:4.8864 svd_entropy: attn_qk:H=0.6928,top10E=0.39,eRank=123.1,q75/q25=47.87 attn_vo:H=0.7664,top10E=0.21,eRank=246.5,q75/q25=inf mlp_w1:H=0.9664,top10E=0.04,eRank=615.1,q75/q25=3.24 mlp_w2:H=0.9657,top10E=0.04,eRank=611.8,q75/q25=3.29 vo_prod:H=0.5637,top10E=0.43,eRank=58.3,q75/q25=inf train_time:54114ms step_avg:90.19ms +[2025-08-22 13:25:20] [Rank 0] step:601/10000 train_time:54135ms step_avg:90.08ms +[2025-08-22 13:25:20] [Rank 0] step:601/10000 train_time:54135ms step_avg:90.08ms +[2025-08-22 13:25:22] [Rank 0] step:621/10000 train_time:55929ms step_avg:90.06ms +[2025-08-22 13:25:22] [Rank 0] step:621/10000 train_time:55929ms step_avg:90.06ms +[2025-08-22 13:25:23] [Rank 0] step:641/10000 train_time:57727ms step_avg:90.06ms +[2025-08-22 13:25:23] [Rank 0] step:641/10000 train_time:57727ms step_avg:90.06ms +[2025-08-22 13:25:25] [Rank 0] step:661/10000 train_time:59525ms step_avg:90.05ms +[2025-08-22 13:25:25] [Rank 0] step:661/10000 train_time:59525ms step_avg:90.05ms +[2025-08-22 13:25:27] [Rank 0] step:681/10000 train_time:61324ms step_avg:90.05ms +[2025-08-22 13:25:27] [Rank 0] step:681/10000 train_time:61324ms step_avg:90.05ms +[2025-08-22 13:25:29] [Rank 0] step:701/10000 train_time:63126ms step_avg:90.05ms +[2025-08-22 13:25:29] [Rank 0] step:701/10000 train_time:63126ms step_avg:90.05ms +[2025-08-22 13:25:31] [Rank 0] step:721/10000 train_time:64928ms step_avg:90.05ms +[2025-08-22 13:25:31] [Rank 0] step:721/10000 train_time:64928ms step_avg:90.05ms +[2025-08-22 13:25:32] [Rank 0] step:741/10000 train_time:66730ms step_avg:90.05ms +[2025-08-22 13:25:32] [Rank 0] step:741/10000 train_time:66730ms step_avg:90.05ms +[2025-08-22 13:25:34] [Rank 0] step:761/10000 train_time:68546ms step_avg:90.07ms +[2025-08-22 13:25:34] [Rank 0] step:761/10000 train_time:68546ms step_avg:90.07ms +[2025-08-22 13:25:36] [Rank 0] step:781/10000 train_time:70364ms step_avg:90.09ms +[2025-08-22 13:25:36] [Rank 0] step:781/10000 train_time:70364ms step_avg:90.09ms +[2025-08-22 13:25:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:25:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:25:52] [Rank 0] PRINT: step:800/10000 val_loss:4.6828 svd_entropy: attn_qk:H=0.6975,top10E=0.38,eRank=126.1,q75/q25=50.69 attn_vo:H=0.7738,top10E=0.19,eRank=252.7,q75/q25=inf mlp_w1:H=0.9664,top10E=0.04,eRank=614.8,q75/q25=3.23 mlp_w2:H=0.9652,top10E=0.04,eRank=610.1,q75/q25=3.28 vo_prod:H=0.5814,top10E=0.40,eRank=66.9,q75/q25=inf train_time:72186ms step_avg:90.23ms +[2025-08-22 13:25:52] [Rank 0] PRINT: step:800/10000 val_loss:4.6828 svd_entropy: attn_qk:H=0.6975,top10E=0.38,eRank=126.1,q75/q25=50.69 attn_vo:H=0.7738,top10E=0.19,eRank=252.7,q75/q25=inf mlp_w1:H=0.9664,top10E=0.04,eRank=614.8,q75/q25=3.23 mlp_w2:H=0.9652,top10E=0.04,eRank=610.1,q75/q25=3.28 vo_prod:H=0.5814,top10E=0.40,eRank=66.9,q75/q25=inf train_time:72186ms step_avg:90.23ms +[2025-08-22 13:25:52] [Rank 0] step:801/10000 train_time:72206ms step_avg:90.15ms +[2025-08-22 13:25:52] [Rank 0] step:801/10000 train_time:72206ms step_avg:90.15ms +[2025-08-22 13:25:54] [Rank 0] step:821/10000 train_time:74021ms step_avg:90.16ms +[2025-08-22 13:25:54] [Rank 0] step:821/10000 train_time:74021ms step_avg:90.16ms +[2025-08-22 13:25:55] [Rank 0] step:841/10000 train_time:75835ms step_avg:90.17ms +[2025-08-22 13:25:55] [Rank 0] step:841/10000 train_time:75835ms step_avg:90.17ms +[2025-08-22 13:25:57] [Rank 0] step:861/10000 train_time:77650ms step_avg:90.19ms +[2025-08-22 13:25:57] [Rank 0] step:861/10000 train_time:77650ms step_avg:90.19ms +[2025-08-22 13:25:59] [Rank 0] step:881/10000 train_time:79467ms step_avg:90.20ms +[2025-08-22 13:25:59] [Rank 0] step:881/10000 train_time:79467ms step_avg:90.20ms +[2025-08-22 13:26:01] [Rank 0] step:901/10000 train_time:81285ms step_avg:90.22ms +[2025-08-22 13:26:01] [Rank 0] step:901/10000 train_time:81285ms step_avg:90.22ms +[2025-08-22 13:26:03] [Rank 0] step:921/10000 train_time:83104ms step_avg:90.23ms +[2025-08-22 13:26:03] [Rank 0] step:921/10000 train_time:83104ms step_avg:90.23ms +[2025-08-22 13:26:05] [Rank 0] step:941/10000 train_time:84925ms step_avg:90.25ms +[2025-08-22 13:26:05] [Rank 0] step:941/10000 train_time:84925ms step_avg:90.25ms +[2025-08-22 13:26:06] [Rank 0] step:961/10000 train_time:86744ms step_avg:90.26ms +[2025-08-22 13:26:06] [Rank 0] step:961/10000 train_time:86744ms step_avg:90.26ms +[2025-08-22 13:26:08] [Rank 0] step:981/10000 train_time:88565ms step_avg:90.28ms +[2025-08-22 13:26:08] [Rank 0] step:981/10000 train_time:88565ms step_avg:90.28ms +[2025-08-22 13:26:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:26:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:26:24] [Rank 0] PRINT: step:1000/10000 val_loss:4.5107 svd_entropy: attn_qk:H=0.7014,top10E=0.37,eRank=128.6,q75/q25=51.70 attn_vo:H=0.7787,top10E=0.19,eRank=256.5,q75/q25=inf mlp_w1:H=0.9666,top10E=0.04,eRank=615.7,q75/q25=3.20 mlp_w2:H=0.9651,top10E=0.05,eRank=609.5,q75/q25=3.26 vo_prod:H=0.5909,top10E=0.38,eRank=72.7,q75/q25=inf train_time:90389ms step_avg:90.39ms +[2025-08-22 13:26:24] [Rank 0] PRINT: step:1000/10000 val_loss:4.5107 svd_entropy: attn_qk:H=0.7014,top10E=0.37,eRank=128.6,q75/q25=51.70 attn_vo:H=0.7787,top10E=0.19,eRank=256.5,q75/q25=inf mlp_w1:H=0.9666,top10E=0.04,eRank=615.7,q75/q25=3.20 mlp_w2:H=0.9651,top10E=0.05,eRank=609.5,q75/q25=3.26 vo_prod:H=0.5909,top10E=0.38,eRank=72.7,q75/q25=inf train_time:90389ms step_avg:90.39ms +[2025-08-22 13:26:24] [Rank 0] step:1001/10000 train_time:90409ms step_avg:90.32ms +[2025-08-22 13:26:24] [Rank 0] step:1001/10000 train_time:90409ms step_avg:90.32ms +[2025-08-22 13:26:26] [Rank 0] step:1021/10000 train_time:92208ms step_avg:90.31ms +[2025-08-22 13:26:26] [Rank 0] step:1021/10000 train_time:92208ms step_avg:90.31ms +[2025-08-22 13:26:27] [Rank 0] step:1041/10000 train_time:94020ms step_avg:90.32ms +[2025-08-22 13:26:27] [Rank 0] step:1041/10000 train_time:94020ms step_avg:90.32ms +[2025-08-22 13:26:29] [Rank 0] step:1061/10000 train_time:95833ms step_avg:90.32ms +[2025-08-22 13:26:29] [Rank 0] step:1061/10000 train_time:95833ms step_avg:90.32ms +[2025-08-22 13:26:31] [Rank 0] step:1081/10000 train_time:97647ms step_avg:90.33ms +[2025-08-22 13:26:31] [Rank 0] step:1081/10000 train_time:97647ms step_avg:90.33ms +[2025-08-22 13:26:33] [Rank 0] step:1101/10000 train_time:99462ms step_avg:90.34ms +[2025-08-22 13:26:33] [Rank 0] step:1101/10000 train_time:99462ms step_avg:90.34ms +[2025-08-22 13:26:35] [Rank 0] step:1121/10000 train_time:101277ms step_avg:90.35ms +[2025-08-22 13:26:35] [Rank 0] step:1121/10000 train_time:101277ms step_avg:90.35ms +[2025-08-22 13:26:37] [Rank 0] step:1141/10000 train_time:103092ms step_avg:90.35ms +[2025-08-22 13:26:37] [Rank 0] step:1141/10000 train_time:103092ms step_avg:90.35ms +[2025-08-22 13:26:38] [Rank 0] step:1161/10000 train_time:104909ms step_avg:90.36ms +[2025-08-22 13:26:38] [Rank 0] step:1161/10000 train_time:104909ms step_avg:90.36ms +[2025-08-22 13:26:40] [Rank 0] step:1181/10000 train_time:106728ms step_avg:90.37ms +[2025-08-22 13:26:40] [Rank 0] step:1181/10000 train_time:106728ms step_avg:90.37ms +[2025-08-22 13:26:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:26:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:26:56] [Rank 0] PRINT: step:1200/10000 val_loss:4.3965 svd_entropy: attn_qk:H=0.7042,top10E=0.36,eRank=130.3,q75/q25=53.67 attn_vo:H=0.7820,top10E=0.18,eRank=259.3,q75/q25=inf mlp_w1:H=0.9667,top10E=0.04,eRank=616.1,q75/q25=3.19 mlp_w2:H=0.9649,top10E=0.05,eRank=608.8,q75/q25=3.25 vo_prod:H=0.5976,top10E=0.36,eRank=77.0,q75/q25=inf train_time:108550ms step_avg:90.46ms +[2025-08-22 13:26:56] [Rank 0] PRINT: step:1200/10000 val_loss:4.3965 svd_entropy: attn_qk:H=0.7042,top10E=0.36,eRank=130.3,q75/q25=53.67 attn_vo:H=0.7820,top10E=0.18,eRank=259.3,q75/q25=inf mlp_w1:H=0.9667,top10E=0.04,eRank=616.1,q75/q25=3.19 mlp_w2:H=0.9649,top10E=0.05,eRank=608.8,q75/q25=3.25 vo_prod:H=0.5976,top10E=0.36,eRank=77.0,q75/q25=inf train_time:108550ms step_avg:90.46ms +[2025-08-22 13:26:56] [Rank 0] step:1201/10000 train_time:108571ms step_avg:90.40ms +[2025-08-22 13:26:56] [Rank 0] step:1201/10000 train_time:108571ms step_avg:90.40ms +[2025-08-22 13:26:58] [Rank 0] step:1221/10000 train_time:110375ms step_avg:90.40ms +[2025-08-22 13:26:58] [Rank 0] step:1221/10000 train_time:110375ms step_avg:90.40ms +[2025-08-22 13:27:00] [Rank 0] step:1241/10000 train_time:112186ms step_avg:90.40ms +[2025-08-22 13:27:00] [Rank 0] step:1241/10000 train_time:112186ms step_avg:90.40ms +[2025-08-22 13:27:01] [Rank 0] step:1261/10000 train_time:114000ms step_avg:90.40ms +[2025-08-22 13:27:01] [Rank 0] step:1261/10000 train_time:114000ms step_avg:90.40ms +[2025-08-22 13:27:03] [Rank 0] step:1281/10000 train_time:115814ms step_avg:90.41ms +[2025-08-22 13:27:03] [Rank 0] step:1281/10000 train_time:115814ms step_avg:90.41ms +[2025-08-22 13:27:05] [Rank 0] step:1301/10000 train_time:117628ms step_avg:90.41ms +[2025-08-22 13:27:05] [Rank 0] step:1301/10000 train_time:117628ms step_avg:90.41ms +[2025-08-22 13:27:07] [Rank 0] step:1321/10000 train_time:119441ms step_avg:90.42ms +[2025-08-22 13:27:07] [Rank 0] step:1321/10000 train_time:119441ms step_avg:90.42ms +[2025-08-22 13:27:09] [Rank 0] step:1341/10000 train_time:121255ms step_avg:90.42ms +[2025-08-22 13:27:09] [Rank 0] step:1341/10000 train_time:121255ms step_avg:90.42ms +[2025-08-22 13:27:10] [Rank 0] step:1361/10000 train_time:123070ms step_avg:90.43ms +[2025-08-22 13:27:10] [Rank 0] step:1361/10000 train_time:123070ms step_avg:90.43ms +[2025-08-22 13:27:12] [Rank 0] step:1381/10000 train_time:124883ms step_avg:90.43ms +[2025-08-22 13:27:12] [Rank 0] step:1381/10000 train_time:124883ms step_avg:90.43ms +[2025-08-22 13:27:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:27:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:27:28] [Rank 0] PRINT: step:1400/10000 val_loss:4.3370 svd_entropy: attn_qk:H=0.7061,top10E=0.36,eRank=131.8,q75/q25=55.06 attn_vo:H=0.7844,top10E=0.17,eRank=261.6,q75/q25=inf mlp_w1:H=0.9668,top10E=0.04,eRank=616.7,q75/q25=3.17 mlp_w2:H=0.9649,top10E=0.05,eRank=608.5,q75/q25=3.23 vo_prod:H=0.6017,top10E=0.36,eRank=79.8,q75/q25=inf train_time:126704ms step_avg:90.50ms +[2025-08-22 13:27:28] [Rank 0] PRINT: step:1400/10000 val_loss:4.3370 svd_entropy: attn_qk:H=0.7061,top10E=0.36,eRank=131.8,q75/q25=55.06 attn_vo:H=0.7844,top10E=0.17,eRank=261.6,q75/q25=inf mlp_w1:H=0.9668,top10E=0.04,eRank=616.7,q75/q25=3.17 mlp_w2:H=0.9649,top10E=0.05,eRank=608.5,q75/q25=3.23 vo_prod:H=0.6017,top10E=0.36,eRank=79.8,q75/q25=inf train_time:126704ms step_avg:90.50ms +[2025-08-22 13:27:28] [Rank 0] step:1401/10000 train_time:126725ms step_avg:90.45ms +[2025-08-22 13:27:28] [Rank 0] step:1401/10000 train_time:126725ms step_avg:90.45ms +[2025-08-22 13:27:30] [Rank 0] step:1421/10000 train_time:128526ms step_avg:90.45ms +[2025-08-22 13:27:30] [Rank 0] step:1421/10000 train_time:128526ms step_avg:90.45ms +[2025-08-22 13:27:32] [Rank 0] step:1441/10000 train_time:130336ms step_avg:90.45ms +[2025-08-22 13:27:32] [Rank 0] step:1441/10000 train_time:130336ms step_avg:90.45ms +[2025-08-22 13:27:33] [Rank 0] step:1461/10000 train_time:132147ms step_avg:90.45ms +[2025-08-22 13:27:33] [Rank 0] step:1461/10000 train_time:132147ms step_avg:90.45ms +[2025-08-22 13:27:35] [Rank 0] step:1481/10000 train_time:133960ms step_avg:90.45ms +[2025-08-22 13:27:35] [Rank 0] step:1481/10000 train_time:133960ms step_avg:90.45ms +[2025-08-22 13:27:37] [Rank 0] step:1501/10000 train_time:135782ms step_avg:90.46ms +[2025-08-22 13:27:37] [Rank 0] step:1501/10000 train_time:135782ms step_avg:90.46ms +[2025-08-22 13:27:39] [Rank 0] step:1521/10000 train_time:137606ms step_avg:90.47ms +[2025-08-22 13:27:39] [Rank 0] step:1521/10000 train_time:137606ms step_avg:90.47ms +[2025-08-22 13:27:41] [Rank 0] step:1541/10000 train_time:139433ms step_avg:90.48ms +[2025-08-22 13:27:41] [Rank 0] step:1541/10000 train_time:139433ms step_avg:90.48ms +[2025-08-22 13:27:43] [Rank 0] step:1561/10000 train_time:141258ms step_avg:90.49ms +[2025-08-22 13:27:43] [Rank 0] step:1561/10000 train_time:141258ms step_avg:90.49ms +[2025-08-22 13:27:44] [Rank 0] step:1581/10000 train_time:143086ms step_avg:90.50ms +[2025-08-22 13:27:44] [Rank 0] step:1581/10000 train_time:143086ms step_avg:90.50ms +[2025-08-22 13:27:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:27:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:28:00] [Rank 0] PRINT: step:1600/10000 val_loss:4.2459 svd_entropy: attn_qk:H=0.7085,top10E=0.35,eRank=133.2,q75/q25=55.70 attn_vo:H=0.7864,top10E=0.17,eRank=263.7,q75/q25=inf mlp_w1:H=0.9670,top10E=0.04,eRank=617.2,q75/q25=3.15 mlp_w2:H=0.9648,top10E=0.05,eRank=608.4,q75/q25=3.22 vo_prod:H=0.6058,top10E=0.35,eRank=82.4,q75/q25=inf train_time:144919ms step_avg:90.57ms +[2025-08-22 13:28:00] [Rank 0] PRINT: step:1600/10000 val_loss:4.2459 svd_entropy: attn_qk:H=0.7085,top10E=0.35,eRank=133.2,q75/q25=55.70 attn_vo:H=0.7864,top10E=0.17,eRank=263.7,q75/q25=inf mlp_w1:H=0.9670,top10E=0.04,eRank=617.2,q75/q25=3.15 mlp_w2:H=0.9648,top10E=0.05,eRank=608.4,q75/q25=3.22 vo_prod:H=0.6058,top10E=0.35,eRank=82.4,q75/q25=inf train_time:144919ms step_avg:90.57ms +[2025-08-22 13:28:00] [Rank 0] step:1601/10000 train_time:144940ms step_avg:90.53ms +[2025-08-22 13:28:00] [Rank 0] step:1601/10000 train_time:144940ms step_avg:90.53ms +[2025-08-22 13:28:02] [Rank 0] step:1621/10000 train_time:146773ms step_avg:90.54ms +[2025-08-22 13:28:02] [Rank 0] step:1621/10000 train_time:146773ms step_avg:90.54ms +[2025-08-22 13:28:04] [Rank 0] step:1641/10000 train_time:148601ms step_avg:90.55ms +[2025-08-22 13:28:04] [Rank 0] step:1641/10000 train_time:148601ms step_avg:90.55ms +[2025-08-22 13:28:06] [Rank 0] step:1661/10000 train_time:150426ms step_avg:90.56ms +[2025-08-22 13:28:06] [Rank 0] step:1661/10000 train_time:150426ms step_avg:90.56ms +[2025-08-22 13:28:08] [Rank 0] step:1681/10000 train_time:152254ms step_avg:90.57ms +[2025-08-22 13:28:08] [Rank 0] step:1681/10000 train_time:152254ms step_avg:90.57ms +[2025-08-22 13:28:09] [Rank 0] step:1701/10000 train_time:154084ms step_avg:90.58ms +[2025-08-22 13:28:09] [Rank 0] step:1701/10000 train_time:154084ms step_avg:90.58ms +[2025-08-22 13:28:11] [Rank 0] step:1721/10000 train_time:155914ms step_avg:90.59ms +[2025-08-22 13:28:11] [Rank 0] step:1721/10000 train_time:155914ms step_avg:90.59ms +[2025-08-22 13:28:13] [Rank 0] step:1741/10000 train_time:157744ms step_avg:90.61ms +[2025-08-22 13:28:13] [Rank 0] step:1741/10000 train_time:157744ms step_avg:90.61ms +[2025-08-22 13:28:15] [Rank 0] step:1761/10000 train_time:159576ms step_avg:90.62ms +[2025-08-22 13:28:15] [Rank 0] step:1761/10000 train_time:159576ms step_avg:90.62ms +[2025-08-22 13:28:17] [Rank 0] step:1781/10000 train_time:161407ms step_avg:90.63ms +[2025-08-22 13:28:17] [Rank 0] step:1781/10000 train_time:161407ms step_avg:90.63ms +[2025-08-22 13:28:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:28:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:28:32] [Rank 0] PRINT: step:1800/10000 val_loss:4.1841 svd_entropy: attn_qk:H=0.7104,top10E=0.35,eRank=134.7,q75/q25=56.46 attn_vo:H=0.7882,top10E=0.17,eRank=265.8,q75/q25=inf mlp_w1:H=0.9671,top10E=0.04,eRank=617.8,q75/q25=3.13 mlp_w2:H=0.9648,top10E=0.05,eRank=608.3,q75/q25=3.20 vo_prod:H=0.6096,top10E=0.34,eRank=84.9,q75/q25=inf train_time:163245ms step_avg:90.69ms +[2025-08-22 13:28:32] [Rank 0] PRINT: step:1800/10000 val_loss:4.1841 svd_entropy: attn_qk:H=0.7104,top10E=0.35,eRank=134.7,q75/q25=56.46 attn_vo:H=0.7882,top10E=0.17,eRank=265.8,q75/q25=inf mlp_w1:H=0.9671,top10E=0.04,eRank=617.8,q75/q25=3.13 mlp_w2:H=0.9648,top10E=0.05,eRank=608.3,q75/q25=3.20 vo_prod:H=0.6096,top10E=0.34,eRank=84.9,q75/q25=inf train_time:163245ms step_avg:90.69ms +[2025-08-22 13:28:32] [Rank 0] step:1801/10000 train_time:163265ms step_avg:90.65ms +[2025-08-22 13:28:32] [Rank 0] step:1801/10000 train_time:163265ms step_avg:90.65ms +[2025-08-22 13:28:34] [Rank 0] step:1821/10000 train_time:165079ms step_avg:90.65ms +[2025-08-22 13:28:34] [Rank 0] step:1821/10000 train_time:165079ms step_avg:90.65ms +[2025-08-22 13:28:36] [Rank 0] step:1841/10000 train_time:166903ms step_avg:90.66ms +[2025-08-22 13:28:36] [Rank 0] step:1841/10000 train_time:166903ms step_avg:90.66ms +[2025-08-22 13:28:38] [Rank 0] step:1861/10000 train_time:168727ms step_avg:90.66ms +[2025-08-22 13:28:38] [Rank 0] step:1861/10000 train_time:168727ms step_avg:90.66ms +[2025-08-22 13:28:40] [Rank 0] step:1881/10000 train_time:170552ms step_avg:90.67ms +[2025-08-22 13:28:40] [Rank 0] step:1881/10000 train_time:170552ms step_avg:90.67ms +[2025-08-22 13:28:42] [Rank 0] step:1901/10000 train_time:172378ms step_avg:90.68ms +[2025-08-22 13:28:42] [Rank 0] step:1901/10000 train_time:172378ms step_avg:90.68ms +[2025-08-22 13:28:43] [Rank 0] step:1921/10000 train_time:174205ms step_avg:90.68ms +[2025-08-22 13:28:43] [Rank 0] step:1921/10000 train_time:174205ms step_avg:90.68ms +[2025-08-22 13:28:45] [Rank 0] step:1941/10000 train_time:176035ms step_avg:90.69ms +[2025-08-22 13:28:45] [Rank 0] step:1941/10000 train_time:176035ms step_avg:90.69ms +[2025-08-22 13:28:47] [Rank 0] step:1961/10000 train_time:177862ms step_avg:90.70ms +[2025-08-22 13:28:47] [Rank 0] step:1961/10000 train_time:177862ms step_avg:90.70ms +[2025-08-22 13:28:49] [Rank 0] step:1981/10000 train_time:179691ms step_avg:90.71ms +[2025-08-22 13:28:49] [Rank 0] step:1981/10000 train_time:179691ms step_avg:90.71ms +[2025-08-22 13:28:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:28:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:29:05] [Rank 0] PRINT: step:2000/10000 val_loss:4.1552 svd_entropy: attn_qk:H=0.7122,top10E=0.35,eRank=136.0,q75/q25=57.18 attn_vo:H=0.7895,top10E=0.16,eRank=267.4,q75/q25=inf mlp_w1:H=0.9673,top10E=0.04,eRank=618.4,q75/q25=3.13 mlp_w2:H=0.9648,top10E=0.05,eRank=608.4,q75/q25=3.20 vo_prod:H=0.6120,top10E=0.34,eRank=86.7,q75/q25=inf train_time:181526ms step_avg:90.76ms +[2025-08-22 13:29:05] [Rank 0] PRINT: step:2000/10000 val_loss:4.1552 svd_entropy: attn_qk:H=0.7122,top10E=0.35,eRank=136.0,q75/q25=57.18 attn_vo:H=0.7895,top10E=0.16,eRank=267.4,q75/q25=inf mlp_w1:H=0.9673,top10E=0.04,eRank=618.4,q75/q25=3.13 mlp_w2:H=0.9648,top10E=0.05,eRank=608.4,q75/q25=3.20 vo_prod:H=0.6120,top10E=0.34,eRank=86.7,q75/q25=inf train_time:181526ms step_avg:90.76ms +[2025-08-22 13:29:05] [Rank 0] step:2001/10000 train_time:181546ms step_avg:90.73ms +[2025-08-22 13:29:05] [Rank 0] step:2001/10000 train_time:181546ms step_avg:90.73ms +[2025-08-22 13:29:07] [Rank 0] step:2021/10000 train_time:183365ms step_avg:90.73ms +[2025-08-22 13:29:07] [Rank 0] step:2021/10000 train_time:183365ms step_avg:90.73ms +[2025-08-22 13:29:09] [Rank 0] step:2041/10000 train_time:185838ms step_avg:91.05ms +[2025-08-22 13:29:09] [Rank 0] step:2041/10000 train_time:185838ms step_avg:91.05ms +[2025-08-22 13:29:11] [Rank 0] step:2061/10000 train_time:187666ms step_avg:91.06ms +[2025-08-22 13:29:11] [Rank 0] step:2061/10000 train_time:187666ms step_avg:91.06ms +[2025-08-22 13:29:13] [Rank 0] step:2081/10000 train_time:189491ms step_avg:91.06ms +[2025-08-22 13:29:13] [Rank 0] step:2081/10000 train_time:189491ms step_avg:91.06ms +[2025-08-22 13:29:14] [Rank 0] step:2101/10000 train_time:191318ms step_avg:91.06ms +[2025-08-22 13:29:14] [Rank 0] step:2101/10000 train_time:191318ms step_avg:91.06ms +[2025-08-22 13:29:16] [Rank 0] step:2121/10000 train_time:193144ms step_avg:91.06ms +[2025-08-22 13:29:16] [Rank 0] step:2121/10000 train_time:193144ms step_avg:91.06ms +[2025-08-22 13:29:18] [Rank 0] step:2141/10000 train_time:194972ms step_avg:91.07ms +[2025-08-22 13:29:18] [Rank 0] step:2141/10000 train_time:194972ms step_avg:91.07ms +[2025-08-22 13:29:20] [Rank 0] step:2161/10000 train_time:196802ms step_avg:91.07ms +[2025-08-22 13:29:20] [Rank 0] step:2161/10000 train_time:196802ms step_avg:91.07ms +[2025-08-22 13:29:22] [Rank 0] step:2181/10000 train_time:198671ms step_avg:91.09ms +[2025-08-22 13:29:22] [Rank 0] step:2181/10000 train_time:198671ms step_avg:91.09ms +[2025-08-22 13:29:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:29:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:29:37] [Rank 0] PRINT: step:2200/10000 val_loss:4.1115 svd_entropy: attn_qk:H=0.7138,top10E=0.35,eRank=137.1,q75/q25=57.55 attn_vo:H=0.7900,top10E=0.16,eRank=268.2,q75/q25=inf mlp_w1:H=0.9674,top10E=0.04,eRank=619.0,q75/q25=3.11 mlp_w2:H=0.9648,top10E=0.05,eRank=608.4,q75/q25=3.18 vo_prod:H=0.6121,top10E=0.34,eRank=86.9,q75/q25=inf train_time:200555ms step_avg:91.16ms +[2025-08-22 13:29:37] [Rank 0] PRINT: step:2200/10000 val_loss:4.1115 svd_entropy: attn_qk:H=0.7138,top10E=0.35,eRank=137.1,q75/q25=57.55 attn_vo:H=0.7900,top10E=0.16,eRank=268.2,q75/q25=inf mlp_w1:H=0.9674,top10E=0.04,eRank=619.0,q75/q25=3.11 mlp_w2:H=0.9648,top10E=0.05,eRank=608.4,q75/q25=3.18 vo_prod:H=0.6121,top10E=0.34,eRank=86.9,q75/q25=inf train_time:200555ms step_avg:91.16ms +[2025-08-22 13:29:38] [Rank 0] step:2201/10000 train_time:200576ms step_avg:91.13ms +[2025-08-22 13:29:38] [Rank 0] step:2201/10000 train_time:200576ms step_avg:91.13ms +[2025-08-22 13:29:39] [Rank 0] step:2221/10000 train_time:202398ms step_avg:91.13ms +[2025-08-22 13:29:39] [Rank 0] step:2221/10000 train_time:202398ms step_avg:91.13ms +[2025-08-22 13:29:41] [Rank 0] step:2241/10000 train_time:204262ms step_avg:91.15ms +[2025-08-22 13:29:41] [Rank 0] step:2241/10000 train_time:204262ms step_avg:91.15ms +[2025-08-22 13:29:43] [Rank 0] step:2261/10000 train_time:206132ms step_avg:91.17ms +[2025-08-22 13:29:43] [Rank 0] step:2261/10000 train_time:206132ms step_avg:91.17ms +[2025-08-22 13:29:45] [Rank 0] step:2281/10000 train_time:208006ms step_avg:91.19ms +[2025-08-22 13:29:45] [Rank 0] step:2281/10000 train_time:208006ms step_avg:91.19ms +[2025-08-22 13:29:47] [Rank 0] step:2301/10000 train_time:209880ms step_avg:91.21ms +[2025-08-22 13:29:47] [Rank 0] step:2301/10000 train_time:209880ms step_avg:91.21ms +[2025-08-22 13:29:49] [Rank 0] step:2321/10000 train_time:211755ms step_avg:91.23ms +[2025-08-22 13:29:49] [Rank 0] step:2321/10000 train_time:211755ms step_avg:91.23ms +[2025-08-22 13:29:51] [Rank 0] step:2341/10000 train_time:213629ms step_avg:91.26ms +[2025-08-22 13:29:51] [Rank 0] step:2341/10000 train_time:213629ms step_avg:91.26ms +[2025-08-22 13:29:53] [Rank 0] step:2361/10000 train_time:215505ms step_avg:91.28ms +[2025-08-22 13:29:53] [Rank 0] step:2361/10000 train_time:215505ms step_avg:91.28ms +[2025-08-22 13:29:54] [Rank 0] step:2381/10000 train_time:217381ms step_avg:91.30ms +[2025-08-22 13:29:54] [Rank 0] step:2381/10000 train_time:217381ms step_avg:91.30ms +[2025-08-22 13:29:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:29:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:30:10] [Rank 0] PRINT: step:2400/10000 val_loss:4.0403 svd_entropy: attn_qk:H=0.7153,top10E=0.34,eRank=138.3,q75/q25=57.71 attn_vo:H=0.7908,top10E=0.16,eRank=269.5,q75/q25=inf mlp_w1:H=0.9675,top10E=0.04,eRank=619.5,q75/q25=3.10 mlp_w2:H=0.9648,top10E=0.05,eRank=608.5,q75/q25=3.17 vo_prod:H=0.6122,top10E=0.34,eRank=88.0,q75/q25=inf train_time:219263ms step_avg:91.36ms +[2025-08-22 13:30:10] [Rank 0] PRINT: step:2400/10000 val_loss:4.0403 svd_entropy: attn_qk:H=0.7153,top10E=0.34,eRank=138.3,q75/q25=57.71 attn_vo:H=0.7908,top10E=0.16,eRank=269.5,q75/q25=inf mlp_w1:H=0.9675,top10E=0.04,eRank=619.5,q75/q25=3.10 mlp_w2:H=0.9648,top10E=0.05,eRank=608.5,q75/q25=3.17 vo_prod:H=0.6122,top10E=0.34,eRank=88.0,q75/q25=inf train_time:219263ms step_avg:91.36ms +[2025-08-22 13:30:10] [Rank 0] step:2401/10000 train_time:219283ms step_avg:91.33ms +[2025-08-22 13:30:10] [Rank 0] step:2401/10000 train_time:219283ms step_avg:91.33ms +[2025-08-22 13:30:12] [Rank 0] step:2421/10000 train_time:221157ms step_avg:91.35ms +[2025-08-22 13:30:12] [Rank 0] step:2421/10000 train_time:221157ms step_avg:91.35ms +[2025-08-22 13:30:14] [Rank 0] step:2441/10000 train_time:223027ms step_avg:91.37ms +[2025-08-22 13:30:14] [Rank 0] step:2441/10000 train_time:223027ms step_avg:91.37ms +[2025-08-22 13:30:16] [Rank 0] step:2461/10000 train_time:224899ms step_avg:91.39ms +[2025-08-22 13:30:16] [Rank 0] step:2461/10000 train_time:224899ms step_avg:91.39ms +[2025-08-22 13:30:18] [Rank 0] step:2481/10000 train_time:226769ms step_avg:91.40ms +[2025-08-22 13:30:18] [Rank 0] step:2481/10000 train_time:226769ms step_avg:91.40ms +[2025-08-22 13:30:20] [Rank 0] step:2501/10000 train_time:228643ms step_avg:91.42ms +[2025-08-22 13:30:20] [Rank 0] step:2501/10000 train_time:228643ms step_avg:91.42ms +[2025-08-22 13:30:22] [Rank 0] step:2521/10000 train_time:230514ms step_avg:91.44ms +[2025-08-22 13:30:22] [Rank 0] step:2521/10000 train_time:230514ms step_avg:91.44ms +[2025-08-22 13:30:23] [Rank 0] step:2541/10000 train_time:232389ms step_avg:91.46ms +[2025-08-22 13:30:23] [Rank 0] step:2541/10000 train_time:232389ms step_avg:91.46ms +[2025-08-22 13:30:25] [Rank 0] step:2561/10000 train_time:234262ms step_avg:91.47ms +[2025-08-22 13:30:25] [Rank 0] step:2561/10000 train_time:234262ms step_avg:91.47ms +[2025-08-22 13:30:27] [Rank 0] step:2581/10000 train_time:236222ms step_avg:91.52ms +[2025-08-22 13:30:27] [Rank 0] step:2581/10000 train_time:236222ms step_avg:91.52ms +[2025-08-22 13:30:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:30:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:30:43] [Rank 0] PRINT: step:2600/10000 val_loss:4.0123 svd_entropy: attn_qk:H=0.7169,top10E=0.34,eRank=139.5,q75/q25=58.39 attn_vo:H=0.7914,top10E=0.16,eRank=270.3,q75/q25=inf mlp_w1:H=0.9677,top10E=0.04,eRank=620.0,q75/q25=3.09 mlp_w2:H=0.9649,top10E=0.05,eRank=608.7,q75/q25=3.15 vo_prod:H=0.6130,top10E=0.34,eRank=89.0,q75/q25=inf train_time:238156ms step_avg:91.60ms +[2025-08-22 13:30:43] [Rank 0] PRINT: step:2600/10000 val_loss:4.0123 svd_entropy: attn_qk:H=0.7169,top10E=0.34,eRank=139.5,q75/q25=58.39 attn_vo:H=0.7914,top10E=0.16,eRank=270.3,q75/q25=inf mlp_w1:H=0.9677,top10E=0.04,eRank=620.0,q75/q25=3.09 mlp_w2:H=0.9649,top10E=0.05,eRank=608.7,q75/q25=3.15 vo_prod:H=0.6130,top10E=0.34,eRank=89.0,q75/q25=inf train_time:238156ms step_avg:91.60ms +[2025-08-22 13:30:43] [Rank 0] step:2601/10000 train_time:238177ms step_avg:91.57ms +[2025-08-22 13:30:43] [Rank 0] step:2601/10000 train_time:238177ms step_avg:91.57ms +[2025-08-22 13:30:45] [Rank 0] step:2621/10000 train_time:240044ms step_avg:91.58ms +[2025-08-22 13:30:45] [Rank 0] step:2621/10000 train_time:240044ms step_avg:91.58ms +[2025-08-22 13:30:47] [Rank 0] step:2641/10000 train_time:241912ms step_avg:91.60ms +[2025-08-22 13:30:47] [Rank 0] step:2641/10000 train_time:241912ms step_avg:91.60ms +[2025-08-22 13:30:49] [Rank 0] step:2661/10000 train_time:243780ms step_avg:91.61ms +[2025-08-22 13:30:49] [Rank 0] step:2661/10000 train_time:243780ms step_avg:91.61ms +[2025-08-22 13:30:51] [Rank 0] step:2681/10000 train_time:245649ms step_avg:91.63ms +[2025-08-22 13:30:51] [Rank 0] step:2681/10000 train_time:245649ms step_avg:91.63ms +[2025-08-22 13:30:52] [Rank 0] step:2701/10000 train_time:247518ms step_avg:91.64ms +[2025-08-22 13:30:52] [Rank 0] step:2701/10000 train_time:247518ms step_avg:91.64ms +[2025-08-22 13:30:54] [Rank 0] step:2721/10000 train_time:249388ms step_avg:91.65ms +[2025-08-22 13:30:54] [Rank 0] step:2721/10000 train_time:249388ms step_avg:91.65ms +[2025-08-22 13:30:56] [Rank 0] step:2741/10000 train_time:251259ms step_avg:91.67ms +[2025-08-22 13:30:56] [Rank 0] step:2741/10000 train_time:251259ms step_avg:91.67ms +[2025-08-22 13:30:58] [Rank 0] step:2761/10000 train_time:253131ms step_avg:91.68ms +[2025-08-22 13:30:58] [Rank 0] step:2761/10000 train_time:253131ms step_avg:91.68ms +[2025-08-22 13:31:00] [Rank 0] step:2781/10000 train_time:255002ms step_avg:91.69ms +[2025-08-22 13:31:00] [Rank 0] step:2781/10000 train_time:255002ms step_avg:91.69ms +[2025-08-22 13:31:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:31:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:31:16] [Rank 0] PRINT: step:2800/10000 val_loss:3.9917 svd_entropy: attn_qk:H=0.7190,top10E=0.34,eRank=141.0,q75/q25=58.52 attn_vo:H=0.7920,top10E=0.16,eRank=271.2,q75/q25=inf mlp_w1:H=0.9678,top10E=0.04,eRank=620.5,q75/q25=3.08 mlp_w2:H=0.9649,top10E=0.05,eRank=608.8,q75/q25=3.14 vo_prod:H=0.6131,top10E=0.34,eRank=89.5,q75/q25=inf train_time:256879ms step_avg:91.74ms +[2025-08-22 13:31:16] [Rank 0] PRINT: step:2800/10000 val_loss:3.9917 svd_entropy: attn_qk:H=0.7190,top10E=0.34,eRank=141.0,q75/q25=58.52 attn_vo:H=0.7920,top10E=0.16,eRank=271.2,q75/q25=inf mlp_w1:H=0.9678,top10E=0.04,eRank=620.5,q75/q25=3.08 mlp_w2:H=0.9649,top10E=0.05,eRank=608.8,q75/q25=3.14 vo_prod:H=0.6131,top10E=0.34,eRank=89.5,q75/q25=inf train_time:256879ms step_avg:91.74ms +[2025-08-22 13:31:16] [Rank 0] step:2801/10000 train_time:256900ms step_avg:91.72ms +[2025-08-22 13:31:16] [Rank 0] step:2801/10000 train_time:256900ms step_avg:91.72ms +[2025-08-22 13:31:18] [Rank 0] step:2821/10000 train_time:258758ms step_avg:91.73ms +[2025-08-22 13:31:18] [Rank 0] step:2821/10000 train_time:258758ms step_avg:91.73ms +[2025-08-22 13:31:20] [Rank 0] step:2841/10000 train_time:260624ms step_avg:91.74ms +[2025-08-22 13:31:20] [Rank 0] step:2841/10000 train_time:260624ms step_avg:91.74ms +[2025-08-22 13:31:21] [Rank 0] step:2861/10000 train_time:262492ms step_avg:91.75ms +[2025-08-22 13:31:21] [Rank 0] step:2861/10000 train_time:262492ms step_avg:91.75ms +[2025-08-22 13:31:23] [Rank 0] step:2881/10000 train_time:264362ms step_avg:91.76ms +[2025-08-22 13:31:23] [Rank 0] step:2881/10000 train_time:264362ms step_avg:91.76ms +[2025-08-22 13:31:25] [Rank 0] step:2901/10000 train_time:266230ms step_avg:91.77ms +[2025-08-22 13:31:25] [Rank 0] step:2901/10000 train_time:266230ms step_avg:91.77ms +[2025-08-22 13:31:27] [Rank 0] step:2921/10000 train_time:268100ms step_avg:91.78ms +[2025-08-22 13:31:27] [Rank 0] step:2921/10000 train_time:268100ms step_avg:91.78ms +[2025-08-22 13:31:29] [Rank 0] step:2941/10000 train_time:269970ms step_avg:91.80ms +[2025-08-22 13:31:29] [Rank 0] step:2941/10000 train_time:269970ms step_avg:91.80ms +[2025-08-22 13:31:31] [Rank 0] step:2961/10000 train_time:271912ms step_avg:91.83ms +[2025-08-22 13:31:31] [Rank 0] step:2961/10000 train_time:271912ms step_avg:91.83ms +[2025-08-22 13:31:33] [Rank 0] step:2981/10000 train_time:273877ms step_avg:91.87ms +[2025-08-22 13:31:33] [Rank 0] step:2981/10000 train_time:273877ms step_avg:91.87ms +[2025-08-22 13:31:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:31:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:31:48] [Rank 0] PRINT: step:3000/10000 val_loss:3.9598 svd_entropy: attn_qk:H=0.7205,top10E=0.34,eRank=142.1,q75/q25=58.83 attn_vo:H=0.7927,top10E=0.16,eRank=272.2,q75/q25=inf mlp_w1:H=0.9679,top10E=0.04,eRank=621.0,q75/q25=3.07 mlp_w2:H=0.9650,top10E=0.05,eRank=609.0,q75/q25=3.14 vo_prod:H=0.6140,top10E=0.33,eRank=90.5,q75/q25=inf train_time:275761ms step_avg:91.92ms +[2025-08-22 13:31:48] [Rank 0] PRINT: step:3000/10000 val_loss:3.9598 svd_entropy: attn_qk:H=0.7205,top10E=0.34,eRank=142.1,q75/q25=58.83 attn_vo:H=0.7927,top10E=0.16,eRank=272.2,q75/q25=inf mlp_w1:H=0.9679,top10E=0.04,eRank=621.0,q75/q25=3.07 mlp_w2:H=0.9650,top10E=0.05,eRank=609.0,q75/q25=3.14 vo_prod:H=0.6140,top10E=0.33,eRank=90.5,q75/q25=inf train_time:275761ms step_avg:91.92ms +[2025-08-22 13:31:48] [Rank 0] step:3001/10000 train_time:275781ms step_avg:91.90ms +[2025-08-22 13:31:48] [Rank 0] step:3001/10000 train_time:275781ms step_avg:91.90ms +[2025-08-22 13:31:50] [Rank 0] step:3021/10000 train_time:277659ms step_avg:91.91ms +[2025-08-22 13:31:50] [Rank 0] step:3021/10000 train_time:277659ms step_avg:91.91ms +[2025-08-22 13:31:52] [Rank 0] step:3041/10000 train_time:279534ms step_avg:91.92ms +[2025-08-22 13:31:52] [Rank 0] step:3041/10000 train_time:279534ms step_avg:91.92ms +[2025-08-22 13:31:54] [Rank 0] step:3061/10000 train_time:281410ms step_avg:91.93ms +[2025-08-22 13:31:54] [Rank 0] step:3061/10000 train_time:281410ms step_avg:91.93ms +[2025-08-22 13:31:56] [Rank 0] step:3081/10000 train_time:283286ms step_avg:91.95ms +[2025-08-22 13:31:56] [Rank 0] step:3081/10000 train_time:283286ms step_avg:91.95ms +[2025-08-22 13:31:58] [Rank 0] step:3101/10000 train_time:285166ms step_avg:91.96ms +[2025-08-22 13:31:58] [Rank 0] step:3101/10000 train_time:285166ms step_avg:91.96ms +[2025-08-22 13:32:00] [Rank 0] step:3121/10000 train_time:287045ms step_avg:91.97ms +[2025-08-22 13:32:00] [Rank 0] step:3121/10000 train_time:287045ms step_avg:91.97ms +[2025-08-22 13:32:02] [Rank 0] step:3141/10000 train_time:288923ms step_avg:91.98ms +[2025-08-22 13:32:02] [Rank 0] step:3141/10000 train_time:288923ms step_avg:91.98ms +[2025-08-22 13:32:03] [Rank 0] step:3161/10000 train_time:290805ms step_avg:92.00ms +[2025-08-22 13:32:03] [Rank 0] step:3161/10000 train_time:290805ms step_avg:92.00ms +[2025-08-22 13:32:05] [Rank 0] step:3181/10000 train_time:292685ms step_avg:92.01ms +[2025-08-22 13:32:05] [Rank 0] step:3181/10000 train_time:292685ms step_avg:92.01ms +[2025-08-22 13:32:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:32:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:32:21] [Rank 0] PRINT: step:3200/10000 val_loss:3.9348 svd_entropy: attn_qk:H=0.7219,top10E=0.34,eRank=143.2,q75/q25=58.68 attn_vo:H=0.7935,top10E=0.16,eRank=273.2,q75/q25=inf mlp_w1:H=0.9680,top10E=0.04,eRank=621.4,q75/q25=3.06 mlp_w2:H=0.9650,top10E=0.05,eRank=609.1,q75/q25=3.13 vo_prod:H=0.6154,top10E=0.33,eRank=91.5,q75/q25=inf train_time:294572ms step_avg:92.05ms +[2025-08-22 13:32:21] [Rank 0] PRINT: step:3200/10000 val_loss:3.9348 svd_entropy: attn_qk:H=0.7219,top10E=0.34,eRank=143.2,q75/q25=58.68 attn_vo:H=0.7935,top10E=0.16,eRank=273.2,q75/q25=inf mlp_w1:H=0.9680,top10E=0.04,eRank=621.4,q75/q25=3.06 mlp_w2:H=0.9650,top10E=0.05,eRank=609.1,q75/q25=3.13 vo_prod:H=0.6154,top10E=0.33,eRank=91.5,q75/q25=inf train_time:294572ms step_avg:92.05ms +[2025-08-22 13:32:21] [Rank 0] step:3201/10000 train_time:294593ms step_avg:92.03ms +[2025-08-22 13:32:21] [Rank 0] step:3201/10000 train_time:294593ms step_avg:92.03ms +[2025-08-22 13:32:23] [Rank 0] step:3221/10000 train_time:296459ms step_avg:92.04ms +[2025-08-22 13:32:23] [Rank 0] step:3221/10000 train_time:296459ms step_avg:92.04ms +[2025-08-22 13:32:25] [Rank 0] step:3241/10000 train_time:298332ms step_avg:92.05ms +[2025-08-22 13:32:25] [Rank 0] step:3241/10000 train_time:298332ms step_avg:92.05ms +[2025-08-22 13:32:27] [Rank 0] step:3261/10000 train_time:300206ms step_avg:92.06ms +[2025-08-22 13:32:27] [Rank 0] step:3261/10000 train_time:300206ms step_avg:92.06ms +[2025-08-22 13:32:29] [Rank 0] step:3281/10000 train_time:302083ms step_avg:92.07ms +[2025-08-22 13:32:29] [Rank 0] step:3281/10000 train_time:302083ms step_avg:92.07ms +[2025-08-22 13:32:31] [Rank 0] step:3301/10000 train_time:303956ms step_avg:92.08ms +[2025-08-22 13:32:31] [Rank 0] step:3301/10000 train_time:303956ms step_avg:92.08ms +[2025-08-22 13:32:32] [Rank 0] step:3321/10000 train_time:305832ms step_avg:92.09ms +[2025-08-22 13:32:32] [Rank 0] step:3321/10000 train_time:305832ms step_avg:92.09ms +[2025-08-22 13:32:34] [Rank 0] step:3341/10000 train_time:307772ms step_avg:92.12ms +[2025-08-22 13:32:34] [Rank 0] step:3341/10000 train_time:307772ms step_avg:92.12ms +[2025-08-22 13:32:36] [Rank 0] step:3361/10000 train_time:309714ms step_avg:92.15ms +[2025-08-22 13:32:36] [Rank 0] step:3361/10000 train_time:309714ms step_avg:92.15ms +[2025-08-22 13:32:38] [Rank 0] step:3381/10000 train_time:311592ms step_avg:92.16ms +[2025-08-22 13:32:38] [Rank 0] step:3381/10000 train_time:311592ms step_avg:92.16ms +[2025-08-22 13:32:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:32:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:32:54] [Rank 0] PRINT: step:3400/10000 val_loss:3.9105 svd_entropy: attn_qk:H=0.7234,top10E=0.33,eRank=144.4,q75/q25=59.18 attn_vo:H=0.7941,top10E=0.16,eRank=274.1,q75/q25=inf mlp_w1:H=0.9681,top10E=0.04,eRank=621.8,q75/q25=3.05 mlp_w2:H=0.9650,top10E=0.05,eRank=609.3,q75/q25=3.12 vo_prod:H=0.6161,top10E=0.33,eRank=92.3,q75/q25=inf train_time:313477ms step_avg:92.20ms +[2025-08-22 13:32:54] [Rank 0] PRINT: step:3400/10000 val_loss:3.9105 svd_entropy: attn_qk:H=0.7234,top10E=0.33,eRank=144.4,q75/q25=59.18 attn_vo:H=0.7941,top10E=0.16,eRank=274.1,q75/q25=inf mlp_w1:H=0.9681,top10E=0.04,eRank=621.8,q75/q25=3.05 mlp_w2:H=0.9650,top10E=0.05,eRank=609.3,q75/q25=3.12 vo_prod:H=0.6161,top10E=0.33,eRank=92.3,q75/q25=inf train_time:313477ms step_avg:92.20ms +[2025-08-22 13:32:54] [Rank 0] step:3401/10000 train_time:313498ms step_avg:92.18ms +[2025-08-22 13:32:54] [Rank 0] step:3401/10000 train_time:313498ms step_avg:92.18ms +[2025-08-22 13:32:56] [Rank 0] step:3421/10000 train_time:315375ms step_avg:92.19ms +[2025-08-22 13:32:56] [Rank 0] step:3421/10000 train_time:315375ms step_avg:92.19ms +[2025-08-22 13:32:58] [Rank 0] step:3441/10000 train_time:317248ms step_avg:92.20ms +[2025-08-22 13:32:58] [Rank 0] step:3441/10000 train_time:317248ms step_avg:92.20ms +[2025-08-22 13:33:00] [Rank 0] step:3461/10000 train_time:319121ms step_avg:92.20ms +[2025-08-22 13:33:00] [Rank 0] step:3461/10000 train_time:319121ms step_avg:92.20ms +[2025-08-22 13:33:02] [Rank 0] step:3481/10000 train_time:320995ms step_avg:92.21ms +[2025-08-22 13:33:02] [Rank 0] step:3481/10000 train_time:320995ms step_avg:92.21ms +[2025-08-22 13:33:03] [Rank 0] step:3501/10000 train_time:322873ms step_avg:92.22ms +[2025-08-22 13:33:03] [Rank 0] step:3501/10000 train_time:322873ms step_avg:92.22ms +[2025-08-22 13:33:05] [Rank 0] step:3521/10000 train_time:324753ms step_avg:92.23ms +[2025-08-22 13:33:05] [Rank 0] step:3521/10000 train_time:324753ms step_avg:92.23ms +[2025-08-22 13:33:07] [Rank 0] step:3541/10000 train_time:326632ms step_avg:92.24ms +[2025-08-22 13:33:07] [Rank 0] step:3541/10000 train_time:326632ms step_avg:92.24ms +[2025-08-22 13:33:09] [Rank 0] step:3561/10000 train_time:328509ms step_avg:92.25ms +[2025-08-22 13:33:09] [Rank 0] step:3561/10000 train_time:328509ms step_avg:92.25ms +[2025-08-22 13:33:11] [Rank 0] step:3581/10000 train_time:330388ms step_avg:92.26ms +[2025-08-22 13:33:11] [Rank 0] step:3581/10000 train_time:330388ms step_avg:92.26ms +[2025-08-22 13:33:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:33:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:33:27] [Rank 0] PRINT: step:3600/10000 val_loss:3.9022 svd_entropy: attn_qk:H=0.7247,top10E=0.33,eRank=145.4,q75/q25=59.50 attn_vo:H=0.7947,top10E=0.16,eRank=274.8,q75/q25=inf mlp_w1:H=0.9682,top10E=0.04,eRank=622.2,q75/q25=3.04 mlp_w2:H=0.9651,top10E=0.05,eRank=609.5,q75/q25=3.11 vo_prod:H=0.6173,top10E=0.33,eRank=93.1,q75/q25=inf train_time:332273ms step_avg:92.30ms +[2025-08-22 13:33:27] [Rank 0] PRINT: step:3600/10000 val_loss:3.9022 svd_entropy: attn_qk:H=0.7247,top10E=0.33,eRank=145.4,q75/q25=59.50 attn_vo:H=0.7947,top10E=0.16,eRank=274.8,q75/q25=inf mlp_w1:H=0.9682,top10E=0.04,eRank=622.2,q75/q25=3.04 mlp_w2:H=0.9651,top10E=0.05,eRank=609.5,q75/q25=3.11 vo_prod:H=0.6173,top10E=0.33,eRank=93.1,q75/q25=inf train_time:332273ms step_avg:92.30ms +[2025-08-22 13:33:27] [Rank 0] step:3601/10000 train_time:332294ms step_avg:92.28ms +[2025-08-22 13:33:27] [Rank 0] step:3601/10000 train_time:332294ms step_avg:92.28ms +[2025-08-22 13:33:29] [Rank 0] step:3621/10000 train_time:334168ms step_avg:92.29ms +[2025-08-22 13:33:29] [Rank 0] step:3621/10000 train_time:334168ms step_avg:92.29ms +[2025-08-22 13:33:30] [Rank 0] step:3641/10000 train_time:336043ms step_avg:92.29ms +[2025-08-22 13:33:30] [Rank 0] step:3641/10000 train_time:336043ms step_avg:92.29ms +[2025-08-22 13:33:32] [Rank 0] step:3661/10000 train_time:337919ms step_avg:92.30ms +[2025-08-22 13:33:32] [Rank 0] step:3661/10000 train_time:337919ms step_avg:92.30ms +[2025-08-22 13:33:34] [Rank 0] step:3681/10000 train_time:339798ms step_avg:92.31ms +[2025-08-22 13:33:34] [Rank 0] step:3681/10000 train_time:339798ms step_avg:92.31ms +[2025-08-22 13:33:36] [Rank 0] step:3701/10000 train_time:341679ms step_avg:92.32ms +[2025-08-22 13:33:36] [Rank 0] step:3701/10000 train_time:341679ms step_avg:92.32ms +[2025-08-22 13:33:38] [Rank 0] step:3721/10000 train_time:343686ms step_avg:92.36ms +[2025-08-22 13:33:38] [Rank 0] step:3721/10000 train_time:343686ms step_avg:92.36ms +[2025-08-22 13:33:40] [Rank 0] step:3741/10000 train_time:345671ms step_avg:92.40ms +[2025-08-22 13:33:40] [Rank 0] step:3741/10000 train_time:345671ms step_avg:92.40ms +[2025-08-22 13:33:42] [Rank 0] step:3761/10000 train_time:347587ms step_avg:92.42ms +[2025-08-22 13:33:42] [Rank 0] step:3761/10000 train_time:347587ms step_avg:92.42ms +[2025-08-22 13:33:44] [Rank 0] step:3781/10000 train_time:349507ms step_avg:92.44ms +[2025-08-22 13:33:44] [Rank 0] step:3781/10000 train_time:349507ms step_avg:92.44ms +[2025-08-22 13:33:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:33:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:34:00] [Rank 0] PRINT: step:3800/10000 val_loss:3.8688 svd_entropy: attn_qk:H=0.7262,top10E=0.33,eRank=146.5,q75/q25=59.92 attn_vo:H=0.7953,top10E=0.16,eRank=275.7,q75/q25=inf mlp_w1:H=0.9683,top10E=0.04,eRank=622.6,q75/q25=3.04 mlp_w2:H=0.9651,top10E=0.05,eRank=609.6,q75/q25=3.10 vo_prod:H=0.6183,top10E=0.33,eRank=93.8,q75/q25=inf train_time:351431ms step_avg:92.48ms +[2025-08-22 13:34:00] [Rank 0] PRINT: step:3800/10000 val_loss:3.8688 svd_entropy: attn_qk:H=0.7262,top10E=0.33,eRank=146.5,q75/q25=59.92 attn_vo:H=0.7953,top10E=0.16,eRank=275.7,q75/q25=inf mlp_w1:H=0.9683,top10E=0.04,eRank=622.6,q75/q25=3.04 mlp_w2:H=0.9651,top10E=0.05,eRank=609.6,q75/q25=3.10 vo_prod:H=0.6183,top10E=0.33,eRank=93.8,q75/q25=inf train_time:351431ms step_avg:92.48ms +[2025-08-22 13:34:00] [Rank 0] step:3801/10000 train_time:351451ms step_avg:92.46ms +[2025-08-22 13:34:00] [Rank 0] step:3801/10000 train_time:351451ms step_avg:92.46ms +[2025-08-22 13:34:02] [Rank 0] step:3821/10000 train_time:353353ms step_avg:92.48ms +[2025-08-22 13:34:02] [Rank 0] step:3821/10000 train_time:353353ms step_avg:92.48ms +[2025-08-22 13:34:04] [Rank 0] step:3841/10000 train_time:355272ms step_avg:92.49ms +[2025-08-22 13:34:04] [Rank 0] step:3841/10000 train_time:355272ms step_avg:92.49ms +[2025-08-22 13:34:05] [Rank 0] step:3861/10000 train_time:357189ms step_avg:92.51ms +[2025-08-22 13:34:05] [Rank 0] step:3861/10000 train_time:357189ms step_avg:92.51ms +[2025-08-22 13:34:07] [Rank 0] step:3881/10000 train_time:359105ms step_avg:92.53ms +[2025-08-22 13:34:07] [Rank 0] step:3881/10000 train_time:359105ms step_avg:92.53ms +[2025-08-22 13:34:09] [Rank 0] step:3901/10000 train_time:361022ms step_avg:92.55ms +[2025-08-22 13:34:09] [Rank 0] step:3901/10000 train_time:361022ms step_avg:92.55ms +[2025-08-22 13:34:11] [Rank 0] step:3921/10000 train_time:362940ms step_avg:92.56ms +[2025-08-22 13:34:11] [Rank 0] step:3921/10000 train_time:362940ms step_avg:92.56ms +[2025-08-22 13:34:13] [Rank 0] step:3941/10000 train_time:364857ms step_avg:92.58ms +[2025-08-22 13:34:13] [Rank 0] step:3941/10000 train_time:364857ms step_avg:92.58ms +[2025-08-22 13:34:15] [Rank 0] step:3961/10000 train_time:366775ms step_avg:92.60ms +[2025-08-22 13:34:15] [Rank 0] step:3961/10000 train_time:366775ms step_avg:92.60ms +[2025-08-22 13:34:17] [Rank 0] step:3981/10000 train_time:368696ms step_avg:92.61ms +[2025-08-22 13:34:17] [Rank 0] step:3981/10000 train_time:368696ms step_avg:92.61ms +[2025-08-22 13:34:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:34:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:34:33] [Rank 0] PRINT: step:4000/10000 val_loss:3.8510 svd_entropy: attn_qk:H=0.7275,top10E=0.33,eRank=147.6,q75/q25=59.86 attn_vo:H=0.7961,top10E=0.16,eRank=276.6,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=622.9,q75/q25=3.03 mlp_w2:H=0.9651,top10E=0.05,eRank=609.7,q75/q25=3.10 vo_prod:H=0.6201,top10E=0.33,eRank=95.0,q75/q25=inf train_time:370621ms step_avg:92.66ms +[2025-08-22 13:34:33] [Rank 0] PRINT: step:4000/10000 val_loss:3.8510 svd_entropy: attn_qk:H=0.7275,top10E=0.33,eRank=147.6,q75/q25=59.86 attn_vo:H=0.7961,top10E=0.16,eRank=276.6,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=622.9,q75/q25=3.03 mlp_w2:H=0.9651,top10E=0.05,eRank=609.7,q75/q25=3.10 vo_prod:H=0.6201,top10E=0.33,eRank=95.0,q75/q25=inf train_time:370621ms step_avg:92.66ms +[2025-08-22 13:34:33] [Rank 0] step:4001/10000 train_time:370642ms step_avg:92.64ms +[2025-08-22 13:34:33] [Rank 0] step:4001/10000 train_time:370642ms step_avg:92.64ms +[2025-08-22 13:34:35] [Rank 0] step:4021/10000 train_time:372562ms step_avg:92.65ms +[2025-08-22 13:34:35] [Rank 0] step:4021/10000 train_time:372562ms step_avg:92.65ms +[2025-08-22 13:34:37] [Rank 0] step:4041/10000 train_time:374474ms step_avg:92.67ms +[2025-08-22 13:34:37] [Rank 0] step:4041/10000 train_time:374474ms step_avg:92.67ms +[2025-08-22 13:34:39] [Rank 0] step:4061/10000 train_time:376387ms step_avg:92.68ms +[2025-08-22 13:34:39] [Rank 0] step:4061/10000 train_time:376387ms step_avg:92.68ms +[2025-08-22 13:34:41] [Rank 0] step:4081/10000 train_time:378348ms step_avg:92.71ms +[2025-08-22 13:34:41] [Rank 0] step:4081/10000 train_time:378348ms step_avg:92.71ms +[2025-08-22 13:34:43] [Rank 0] step:4101/10000 train_time:380352ms step_avg:92.75ms +[2025-08-22 13:34:43] [Rank 0] step:4101/10000 train_time:380352ms step_avg:92.75ms +[2025-08-22 13:34:45] [Rank 0] step:4121/10000 train_time:382265ms step_avg:92.76ms +[2025-08-22 13:34:45] [Rank 0] step:4121/10000 train_time:382265ms step_avg:92.76ms +[2025-08-22 13:34:46] [Rank 0] step:4141/10000 train_time:384180ms step_avg:92.77ms +[2025-08-22 13:34:46] [Rank 0] step:4141/10000 train_time:384180ms step_avg:92.77ms +[2025-08-22 13:34:48] [Rank 0] step:4161/10000 train_time:386095ms step_avg:92.79ms +[2025-08-22 13:34:48] [Rank 0] step:4161/10000 train_time:386095ms step_avg:92.79ms +[2025-08-22 13:34:50] [Rank 0] step:4181/10000 train_time:388010ms step_avg:92.80ms +[2025-08-22 13:34:50] [Rank 0] step:4181/10000 train_time:388010ms step_avg:92.80ms +[2025-08-22 13:34:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:34:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:35:06] [Rank 0] PRINT: step:4200/10000 val_loss:3.8442 svd_entropy: attn_qk:H=0.7287,top10E=0.33,eRank=148.5,q75/q25=59.97 attn_vo:H=0.7968,top10E=0.15,eRank=277.5,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.3,q75/q25=3.02 mlp_w2:H=0.9652,top10E=0.05,eRank=609.9,q75/q25=3.09 vo_prod:H=0.6214,top10E=0.32,eRank=95.8,q75/q25=inf train_time:389930ms step_avg:92.84ms +[2025-08-22 13:35:06] [Rank 0] PRINT: step:4200/10000 val_loss:3.8442 svd_entropy: attn_qk:H=0.7287,top10E=0.33,eRank=148.5,q75/q25=59.97 attn_vo:H=0.7968,top10E=0.15,eRank=277.5,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.3,q75/q25=3.02 mlp_w2:H=0.9652,top10E=0.05,eRank=609.9,q75/q25=3.09 vo_prod:H=0.6214,top10E=0.32,eRank=95.8,q75/q25=inf train_time:389930ms step_avg:92.84ms +[2025-08-22 13:35:06] [Rank 0] step:4201/10000 train_time:389951ms step_avg:92.82ms +[2025-08-22 13:35:06] [Rank 0] step:4201/10000 train_time:389951ms step_avg:92.82ms +[2025-08-22 13:35:08] [Rank 0] step:4221/10000 train_time:391869ms step_avg:92.84ms +[2025-08-22 13:35:08] [Rank 0] step:4221/10000 train_time:391869ms step_avg:92.84ms +[2025-08-22 13:35:10] [Rank 0] step:4241/10000 train_time:393784ms step_avg:92.85ms +[2025-08-22 13:35:10] [Rank 0] step:4241/10000 train_time:393784ms step_avg:92.85ms +[2025-08-22 13:35:12] [Rank 0] step:4261/10000 train_time:395695ms step_avg:92.86ms +[2025-08-22 13:35:12] [Rank 0] step:4261/10000 train_time:395695ms step_avg:92.86ms +[2025-08-22 13:35:14] [Rank 0] step:4281/10000 train_time:397608ms step_avg:92.88ms +[2025-08-22 13:35:14] [Rank 0] step:4281/10000 train_time:397608ms step_avg:92.88ms +[2025-08-22 13:35:16] [Rank 0] step:4301/10000 train_time:399519ms step_avg:92.89ms +[2025-08-22 13:35:16] [Rank 0] step:4301/10000 train_time:399519ms step_avg:92.89ms +[2025-08-22 13:35:17] [Rank 0] step:4321/10000 train_time:401433ms step_avg:92.90ms +[2025-08-22 13:35:17] [Rank 0] step:4321/10000 train_time:401433ms step_avg:92.90ms +[2025-08-22 13:35:19] [Rank 0] step:4341/10000 train_time:403345ms step_avg:92.92ms +[2025-08-22 13:35:19] [Rank 0] step:4341/10000 train_time:403345ms step_avg:92.92ms +[2025-08-22 13:35:21] [Rank 0] step:4361/10000 train_time:405258ms step_avg:92.93ms +[2025-08-22 13:35:21] [Rank 0] step:4361/10000 train_time:405258ms step_avg:92.93ms +[2025-08-22 13:35:23] [Rank 0] step:4381/10000 train_time:407170ms step_avg:92.94ms +[2025-08-22 13:35:23] [Rank 0] step:4381/10000 train_time:407170ms step_avg:92.94ms +[2025-08-22 13:35:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:35:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:35:39] [Rank 0] PRINT: step:4400/10000 val_loss:3.8253 svd_entropy: attn_qk:H=0.7297,top10E=0.32,eRank=149.4,q75/q25=59.73 attn_vo:H=0.7975,top10E=0.15,eRank=278.4,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.6,q75/q25=3.01 mlp_w2:H=0.9652,top10E=0.05,eRank=610.1,q75/q25=3.08 vo_prod:H=0.6231,top10E=0.32,eRank=96.9,q75/q25=inf train_time:409089ms step_avg:92.97ms +[2025-08-22 13:35:39] [Rank 0] PRINT: step:4400/10000 val_loss:3.8253 svd_entropy: attn_qk:H=0.7297,top10E=0.32,eRank=149.4,q75/q25=59.73 attn_vo:H=0.7975,top10E=0.15,eRank=278.4,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.6,q75/q25=3.01 mlp_w2:H=0.9652,top10E=0.05,eRank=610.1,q75/q25=3.08 vo_prod:H=0.6231,top10E=0.32,eRank=96.9,q75/q25=inf train_time:409089ms step_avg:92.97ms +[2025-08-22 13:35:39] [Rank 0] step:4401/10000 train_time:409109ms step_avg:92.96ms +[2025-08-22 13:35:39] [Rank 0] step:4401/10000 train_time:409109ms step_avg:92.96ms +[2025-08-22 13:35:41] [Rank 0] step:4421/10000 train_time:411021ms step_avg:92.97ms +[2025-08-22 13:35:41] [Rank 0] step:4421/10000 train_time:411021ms step_avg:92.97ms +[2025-08-22 13:35:43] [Rank 0] step:4441/10000 train_time:413008ms step_avg:93.00ms +[2025-08-22 13:35:43] [Rank 0] step:4441/10000 train_time:413008ms step_avg:93.00ms +[2025-08-22 13:35:45] [Rank 0] step:4461/10000 train_time:414979ms step_avg:93.02ms +[2025-08-22 13:35:45] [Rank 0] step:4461/10000 train_time:414979ms step_avg:93.02ms +[2025-08-22 13:35:47] [Rank 0] step:4481/10000 train_time:416897ms step_avg:93.04ms +[2025-08-22 13:35:47] [Rank 0] step:4481/10000 train_time:416897ms step_avg:93.04ms +[2025-08-22 13:35:49] [Rank 0] step:4501/10000 train_time:418814ms step_avg:93.05ms +[2025-08-22 13:35:49] [Rank 0] step:4501/10000 train_time:418814ms step_avg:93.05ms +[2025-08-22 13:35:51] [Rank 0] step:4521/10000 train_time:420731ms step_avg:93.06ms +[2025-08-22 13:35:51] [Rank 0] step:4521/10000 train_time:420731ms step_avg:93.06ms +[2025-08-22 13:35:53] [Rank 0] step:4541/10000 train_time:422651ms step_avg:93.07ms +[2025-08-22 13:35:53] [Rank 0] step:4541/10000 train_time:422651ms step_avg:93.07ms +[2025-08-22 13:35:55] [Rank 0] step:4561/10000 train_time:424572ms step_avg:93.09ms +[2025-08-22 13:35:55] [Rank 0] step:4561/10000 train_time:424572ms step_avg:93.09ms +[2025-08-22 13:35:57] [Rank 0] step:4581/10000 train_time:426495ms step_avg:93.10ms +[2025-08-22 13:35:57] [Rank 0] step:4581/10000 train_time:426495ms step_avg:93.10ms +[2025-08-22 13:35:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:35:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:36:12] [Rank 0] PRINT: step:4600/10000 val_loss:3.8049 svd_entropy: attn_qk:H=0.7309,top10E=0.32,eRank=150.3,q75/q25=60.12 attn_vo:H=0.7981,top10E=0.15,eRank=279.2,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.8,q75/q25=3.01 mlp_w2:H=0.9652,top10E=0.05,eRank=610.2,q75/q25=3.08 vo_prod:H=0.6245,top10E=0.32,eRank=97.8,q75/q25=inf train_time:428423ms step_avg:93.14ms +[2025-08-22 13:36:12] [Rank 0] PRINT: step:4600/10000 val_loss:3.8049 svd_entropy: attn_qk:H=0.7309,top10E=0.32,eRank=150.3,q75/q25=60.12 attn_vo:H=0.7981,top10E=0.15,eRank=279.2,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.8,q75/q25=3.01 mlp_w2:H=0.9652,top10E=0.05,eRank=610.2,q75/q25=3.08 vo_prod:H=0.6245,top10E=0.32,eRank=97.8,q75/q25=inf train_time:428423ms step_avg:93.14ms +[2025-08-22 13:36:12] [Rank 0] step:4601/10000 train_time:428444ms step_avg:93.12ms +[2025-08-22 13:36:12] [Rank 0] step:4601/10000 train_time:428444ms step_avg:93.12ms +[2025-08-22 13:36:14] [Rank 0] step:4621/10000 train_time:430366ms step_avg:93.13ms +[2025-08-22 13:36:14] [Rank 0] step:4621/10000 train_time:430366ms step_avg:93.13ms +[2025-08-22 13:36:16] [Rank 0] step:4641/10000 train_time:432286ms step_avg:93.15ms +[2025-08-22 13:36:16] [Rank 0] step:4641/10000 train_time:432286ms step_avg:93.15ms +[2025-08-22 13:36:18] [Rank 0] step:4661/10000 train_time:434208ms step_avg:93.16ms +[2025-08-22 13:36:18] [Rank 0] step:4661/10000 train_time:434208ms step_avg:93.16ms +[2025-08-22 13:36:20] [Rank 0] step:4681/10000 train_time:436131ms step_avg:93.17ms +[2025-08-22 13:36:20] [Rank 0] step:4681/10000 train_time:436131ms step_avg:93.17ms +[2025-08-22 13:36:22] [Rank 0] step:4701/10000 train_time:438054ms step_avg:93.18ms +[2025-08-22 13:36:22] [Rank 0] step:4701/10000 train_time:438054ms step_avg:93.18ms +[2025-08-22 13:36:24] [Rank 0] step:4721/10000 train_time:439976ms step_avg:93.20ms +[2025-08-22 13:36:24] [Rank 0] step:4721/10000 train_time:439976ms step_avg:93.20ms +[2025-08-22 13:36:26] [Rank 0] step:4741/10000 train_time:441900ms step_avg:93.21ms +[2025-08-22 13:36:26] [Rank 0] step:4741/10000 train_time:441900ms step_avg:93.21ms +[2025-08-22 13:36:28] [Rank 0] step:4761/10000 train_time:443826ms step_avg:93.22ms +[2025-08-22 13:36:28] [Rank 0] step:4761/10000 train_time:443826ms step_avg:93.22ms +[2025-08-22 13:36:30] [Rank 0] step:4781/10000 train_time:445751ms step_avg:93.23ms +[2025-08-22 13:36:30] [Rank 0] step:4781/10000 train_time:445751ms step_avg:93.23ms +[2025-08-22 13:36:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:36:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:36:46] [Rank 0] PRINT: step:4800/10000 val_loss:3.7978 svd_entropy: attn_qk:H=0.7322,top10E=0.32,eRank=151.3,q75/q25=59.85 attn_vo:H=0.7987,top10E=0.15,eRank=279.9,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.1,q75/q25=3.00 mlp_w2:H=0.9653,top10E=0.05,eRank=610.3,q75/q25=3.07 vo_prod:H=0.6257,top10E=0.32,eRank=98.5,q75/q25=inf train_time:447683ms step_avg:93.27ms +[2025-08-22 13:36:46] [Rank 0] PRINT: step:4800/10000 val_loss:3.7978 svd_entropy: attn_qk:H=0.7322,top10E=0.32,eRank=151.3,q75/q25=59.85 attn_vo:H=0.7987,top10E=0.15,eRank=279.9,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.1,q75/q25=3.00 mlp_w2:H=0.9653,top10E=0.05,eRank=610.3,q75/q25=3.07 vo_prod:H=0.6257,top10E=0.32,eRank=98.5,q75/q25=inf train_time:447683ms step_avg:93.27ms +[2025-08-22 13:36:46] [Rank 0] step:4801/10000 train_time:447705ms step_avg:93.25ms +[2025-08-22 13:36:46] [Rank 0] step:4801/10000 train_time:447705ms step_avg:93.25ms +[2025-08-22 13:36:48] [Rank 0] step:4821/10000 train_time:449689ms step_avg:93.28ms +[2025-08-22 13:36:48] [Rank 0] step:4821/10000 train_time:449689ms step_avg:93.28ms +[2025-08-22 13:36:50] [Rank 0] step:4841/10000 train_time:451676ms step_avg:93.30ms +[2025-08-22 13:36:50] [Rank 0] step:4841/10000 train_time:451676ms step_avg:93.30ms +[2025-08-22 13:36:52] [Rank 0] step:4861/10000 train_time:453596ms step_avg:93.31ms +[2025-08-22 13:36:52] [Rank 0] step:4861/10000 train_time:453596ms step_avg:93.31ms +[2025-08-22 13:36:54] [Rank 0] step:4881/10000 train_time:455516ms step_avg:93.32ms +[2025-08-22 13:36:54] [Rank 0] step:4881/10000 train_time:455516ms step_avg:93.32ms +[2025-08-22 13:36:55] [Rank 0] step:4901/10000 train_time:457434ms step_avg:93.33ms +[2025-08-22 13:36:55] [Rank 0] step:4901/10000 train_time:457434ms step_avg:93.33ms +[2025-08-22 13:36:57] [Rank 0] step:4921/10000 train_time:459355ms step_avg:93.35ms +[2025-08-22 13:36:57] [Rank 0] step:4921/10000 train_time:459355ms step_avg:93.35ms +[2025-08-22 13:36:59] [Rank 0] step:4941/10000 train_time:461278ms step_avg:93.36ms +[2025-08-22 13:36:59] [Rank 0] step:4941/10000 train_time:461278ms step_avg:93.36ms +[2025-08-22 13:37:01] [Rank 0] step:4961/10000 train_time:463197ms step_avg:93.37ms +[2025-08-22 13:37:01] [Rank 0] step:4961/10000 train_time:463197ms step_avg:93.37ms +[2025-08-22 13:37:03] [Rank 0] step:4981/10000 train_time:465122ms step_avg:93.38ms +[2025-08-22 13:37:03] [Rank 0] step:4981/10000 train_time:465122ms step_avg:93.38ms +[2025-08-22 13:37:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:37:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:37:19] [Rank 0] PRINT: step:5000/10000 val_loss:3.7840 svd_entropy: attn_qk:H=0.7331,top10E=0.32,eRank=152.1,q75/q25=60.08 attn_vo:H=0.7993,top10E=0.15,eRank=280.7,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.3,q75/q25=3.00 mlp_w2:H=0.9653,top10E=0.05,eRank=610.4,q75/q25=3.06 vo_prod:H=0.6268,top10E=0.31,eRank=99.2,q75/q25=inf train_time:467049ms step_avg:93.41ms +[2025-08-22 13:37:19] [Rank 0] PRINT: step:5000/10000 val_loss:3.7840 svd_entropy: attn_qk:H=0.7331,top10E=0.32,eRank=152.1,q75/q25=60.08 attn_vo:H=0.7993,top10E=0.15,eRank=280.7,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.3,q75/q25=3.00 mlp_w2:H=0.9653,top10E=0.05,eRank=610.4,q75/q25=3.06 vo_prod:H=0.6268,top10E=0.31,eRank=99.2,q75/q25=inf train_time:467049ms step_avg:93.41ms +[2025-08-22 13:37:19] [Rank 0] step:5001/10000 train_time:467069ms step_avg:93.40ms +[2025-08-22 13:37:19] [Rank 0] step:5001/10000 train_time:467069ms step_avg:93.40ms +[2025-08-22 13:37:21] [Rank 0] step:5021/10000 train_time:468972ms step_avg:93.40ms +[2025-08-22 13:37:21] [Rank 0] step:5021/10000 train_time:468972ms step_avg:93.40ms +[2025-08-22 13:37:23] [Rank 0] step:5041/10000 train_time:470892ms step_avg:93.41ms +[2025-08-22 13:37:23] [Rank 0] step:5041/10000 train_time:470892ms step_avg:93.41ms +[2025-08-22 13:37:25] [Rank 0] step:5061/10000 train_time:472808ms step_avg:93.42ms +[2025-08-22 13:37:25] [Rank 0] step:5061/10000 train_time:472808ms step_avg:93.42ms +[2025-08-22 13:37:26] [Rank 0] step:5081/10000 train_time:474727ms step_avg:93.43ms +[2025-08-22 13:37:26] [Rank 0] step:5081/10000 train_time:474727ms step_avg:93.43ms +[2025-08-22 13:37:28] [Rank 0] step:5101/10000 train_time:476647ms step_avg:93.44ms +[2025-08-22 13:37:28] [Rank 0] step:5101/10000 train_time:476647ms step_avg:93.44ms +[2025-08-22 13:37:30] [Rank 0] step:5121/10000 train_time:478569ms step_avg:93.45ms +[2025-08-22 13:37:30] [Rank 0] step:5121/10000 train_time:478569ms step_avg:93.45ms +[2025-08-22 13:37:32] [Rank 0] step:5141/10000 train_time:480492ms step_avg:93.46ms +[2025-08-22 13:37:32] [Rank 0] step:5141/10000 train_time:480492ms step_avg:93.46ms +[2025-08-22 13:37:34] [Rank 0] step:5161/10000 train_time:482411ms step_avg:93.47ms +[2025-08-22 13:37:34] [Rank 0] step:5161/10000 train_time:482411ms step_avg:93.47ms +[2025-08-22 13:37:36] [Rank 0] step:5181/10000 train_time:484333ms step_avg:93.48ms +[2025-08-22 13:37:36] [Rank 0] step:5181/10000 train_time:484333ms step_avg:93.48ms +[2025-08-22 13:37:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:37:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:37:52] [Rank 0] PRINT: step:5200/10000 val_loss:3.7716 svd_entropy: attn_qk:H=0.7341,top10E=0.32,eRank=153.0,q75/q25=59.73 attn_vo:H=0.7998,top10E=0.15,eRank=281.3,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.6,q75/q25=3.00 mlp_w2:H=0.9653,top10E=0.05,eRank=610.5,q75/q25=3.06 vo_prod:H=0.6280,top10E=0.31,eRank=99.8,q75/q25=inf train_time:486284ms step_avg:93.52ms +[2025-08-22 13:37:52] [Rank 0] PRINT: step:5200/10000 val_loss:3.7716 svd_entropy: attn_qk:H=0.7341,top10E=0.32,eRank=153.0,q75/q25=59.73 attn_vo:H=0.7998,top10E=0.15,eRank=281.3,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.6,q75/q25=3.00 mlp_w2:H=0.9653,top10E=0.05,eRank=610.5,q75/q25=3.06 vo_prod:H=0.6280,top10E=0.31,eRank=99.8,q75/q25=inf train_time:486284ms step_avg:93.52ms +[2025-08-22 13:37:52] [Rank 0] step:5201/10000 train_time:486304ms step_avg:93.50ms +[2025-08-22 13:37:52] [Rank 0] step:5201/10000 train_time:486304ms step_avg:93.50ms +[2025-08-22 13:37:54] [Rank 0] step:5221/10000 train_time:488254ms step_avg:93.52ms +[2025-08-22 13:37:54] [Rank 0] step:5221/10000 train_time:488254ms step_avg:93.52ms +[2025-08-22 13:37:56] [Rank 0] step:5241/10000 train_time:490203ms step_avg:93.53ms +[2025-08-22 13:37:56] [Rank 0] step:5241/10000 train_time:490203ms step_avg:93.53ms +[2025-08-22 13:37:58] [Rank 0] step:5261/10000 train_time:492153ms step_avg:93.55ms +[2025-08-22 13:37:58] [Rank 0] step:5261/10000 train_time:492153ms step_avg:93.55ms +[2025-08-22 13:38:00] [Rank 0] step:5281/10000 train_time:494102ms step_avg:93.56ms +[2025-08-22 13:38:00] [Rank 0] step:5281/10000 train_time:494102ms step_avg:93.56ms +[2025-08-22 13:38:01] [Rank 0] step:5301/10000 train_time:496061ms step_avg:93.58ms +[2025-08-22 13:38:01] [Rank 0] step:5301/10000 train_time:496061ms step_avg:93.58ms +[2025-08-22 13:38:03] [Rank 0] step:5321/10000 train_time:498012ms step_avg:93.59ms +[2025-08-22 13:38:03] [Rank 0] step:5321/10000 train_time:498012ms step_avg:93.59ms +[2025-08-22 13:38:05] [Rank 0] step:5341/10000 train_time:499965ms step_avg:93.61ms +[2025-08-22 13:38:05] [Rank 0] step:5341/10000 train_time:499965ms step_avg:93.61ms +[2025-08-22 13:38:07] [Rank 0] step:5361/10000 train_time:501920ms step_avg:93.62ms +[2025-08-22 13:38:07] [Rank 0] step:5361/10000 train_time:501920ms step_avg:93.62ms +[2025-08-22 13:38:09] [Rank 0] step:5381/10000 train_time:503875ms step_avg:93.64ms +[2025-08-22 13:38:09] [Rank 0] step:5381/10000 train_time:503875ms step_avg:93.64ms +[2025-08-22 13:38:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:38:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:38:25] [Rank 0] PRINT: step:5400/10000 val_loss:3.7611 svd_entropy: attn_qk:H=0.7351,top10E=0.32,eRank=153.8,q75/q25=59.74 attn_vo:H=0.8003,top10E=0.15,eRank=281.9,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=2.99 mlp_w2:H=0.9654,top10E=0.05,eRank=610.6,q75/q25=3.06 vo_prod:H=0.6292,top10E=0.31,eRank=100.4,q75/q25=inf train_time:505833ms step_avg:93.67ms +[2025-08-22 13:38:25] [Rank 0] PRINT: step:5400/10000 val_loss:3.7611 svd_entropy: attn_qk:H=0.7351,top10E=0.32,eRank=153.8,q75/q25=59.74 attn_vo:H=0.8003,top10E=0.15,eRank=281.9,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=2.99 mlp_w2:H=0.9654,top10E=0.05,eRank=610.6,q75/q25=3.06 vo_prod:H=0.6292,top10E=0.31,eRank=100.4,q75/q25=inf train_time:505833ms step_avg:93.67ms +[2025-08-22 13:38:25] [Rank 0] step:5401/10000 train_time:505854ms step_avg:93.66ms +[2025-08-22 13:38:25] [Rank 0] step:5401/10000 train_time:505854ms step_avg:93.66ms +[2025-08-22 13:38:27] [Rank 0] step:5421/10000 train_time:507796ms step_avg:93.67ms +[2025-08-22 13:38:27] [Rank 0] step:5421/10000 train_time:507796ms step_avg:93.67ms +[2025-08-22 13:38:29] [Rank 0] step:5441/10000 train_time:509745ms step_avg:93.69ms +[2025-08-22 13:38:29] [Rank 0] step:5441/10000 train_time:509745ms step_avg:93.69ms +[2025-08-22 13:38:31] [Rank 0] step:5461/10000 train_time:511701ms step_avg:93.70ms +[2025-08-22 13:38:31] [Rank 0] step:5461/10000 train_time:511701ms step_avg:93.70ms +[2025-08-22 13:38:33] [Rank 0] step:5481/10000 train_time:513653ms step_avg:93.72ms +[2025-08-22 13:38:33] [Rank 0] step:5481/10000 train_time:513653ms step_avg:93.72ms +[2025-08-22 13:38:35] [Rank 0] step:5501/10000 train_time:515615ms step_avg:93.73ms +[2025-08-22 13:38:35] [Rank 0] step:5501/10000 train_time:515615ms step_avg:93.73ms +[2025-08-22 13:38:37] [Rank 0] step:5521/10000 train_time:517576ms step_avg:93.75ms +[2025-08-22 13:38:37] [Rank 0] step:5521/10000 train_time:517576ms step_avg:93.75ms +[2025-08-22 13:38:39] [Rank 0] step:5541/10000 train_time:519533ms step_avg:93.76ms +[2025-08-22 13:38:39] [Rank 0] step:5541/10000 train_time:519533ms step_avg:93.76ms +[2025-08-22 13:38:41] [Rank 0] step:5561/10000 train_time:521488ms step_avg:93.78ms +[2025-08-22 13:38:41] [Rank 0] step:5561/10000 train_time:521488ms step_avg:93.78ms +[2025-08-22 13:38:43] [Rank 0] step:5581/10000 train_time:523444ms step_avg:93.79ms +[2025-08-22 13:38:43] [Rank 0] step:5581/10000 train_time:523444ms step_avg:93.79ms +[2025-08-22 13:38:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:38:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:38:58] [Rank 0] PRINT: step:5600/10000 val_loss:3.7508 svd_entropy: attn_qk:H=0.7362,top10E=0.31,eRank=154.7,q75/q25=59.80 attn_vo:H=0.8007,top10E=0.15,eRank=282.5,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9654,top10E=0.05,eRank=610.7,q75/q25=3.05 vo_prod:H=0.6302,top10E=0.31,eRank=101.0,q75/q25=inf train_time:525412ms step_avg:93.82ms +[2025-08-22 13:38:58] [Rank 0] PRINT: step:5600/10000 val_loss:3.7508 svd_entropy: attn_qk:H=0.7362,top10E=0.31,eRank=154.7,q75/q25=59.80 attn_vo:H=0.8007,top10E=0.15,eRank=282.5,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9654,top10E=0.05,eRank=610.7,q75/q25=3.05 vo_prod:H=0.6302,top10E=0.31,eRank=101.0,q75/q25=inf train_time:525412ms step_avg:93.82ms +[2025-08-22 13:38:58] [Rank 0] step:5601/10000 train_time:525433ms step_avg:93.81ms +[2025-08-22 13:38:58] [Rank 0] step:5601/10000 train_time:525433ms step_avg:93.81ms +[2025-08-22 13:39:00] [Rank 0] step:5621/10000 train_time:527372ms step_avg:93.82ms +[2025-08-22 13:39:00] [Rank 0] step:5621/10000 train_time:527372ms step_avg:93.82ms +[2025-08-22 13:39:02] [Rank 0] step:5641/10000 train_time:529322ms step_avg:93.83ms +[2025-08-22 13:39:02] [Rank 0] step:5641/10000 train_time:529322ms step_avg:93.83ms +[2025-08-22 13:39:04] [Rank 0] step:5661/10000 train_time:531269ms step_avg:93.85ms +[2025-08-22 13:39:04] [Rank 0] step:5661/10000 train_time:531269ms step_avg:93.85ms +[2025-08-22 13:39:06] [Rank 0] step:5681/10000 train_time:533219ms step_avg:93.86ms +[2025-08-22 13:39:06] [Rank 0] step:5681/10000 train_time:533219ms step_avg:93.86ms +[2025-08-22 13:39:08] [Rank 0] step:5701/10000 train_time:535170ms step_avg:93.87ms +[2025-08-22 13:39:08] [Rank 0] step:5701/10000 train_time:535170ms step_avg:93.87ms +[2025-08-22 13:39:10] [Rank 0] step:5721/10000 train_time:537128ms step_avg:93.89ms +[2025-08-22 13:39:10] [Rank 0] step:5721/10000 train_time:537128ms step_avg:93.89ms +[2025-08-22 13:39:12] [Rank 0] step:5741/10000 train_time:539079ms step_avg:93.90ms +[2025-08-22 13:39:12] [Rank 0] step:5741/10000 train_time:539079ms step_avg:93.90ms +[2025-08-22 13:39:14] [Rank 0] step:5761/10000 train_time:541033ms step_avg:93.91ms +[2025-08-22 13:39:14] [Rank 0] step:5761/10000 train_time:541033ms step_avg:93.91ms +[2025-08-22 13:39:16] [Rank 0] step:5781/10000 train_time:542988ms step_avg:93.93ms +[2025-08-22 13:39:16] [Rank 0] step:5781/10000 train_time:542988ms step_avg:93.93ms +[2025-08-22 13:39:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:39:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:39:31] [Rank 0] PRINT: step:5800/10000 val_loss:3.7474 svd_entropy: attn_qk:H=0.7372,top10E=0.31,eRank=155.5,q75/q25=59.76 attn_vo:H=0.8011,top10E=0.15,eRank=283.0,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.3,q75/q25=2.98 mlp_w2:H=0.9654,top10E=0.05,eRank=610.8,q75/q25=3.05 vo_prod:H=0.6310,top10E=0.31,eRank=101.3,q75/q25=inf train_time:544948ms step_avg:93.96ms +[2025-08-22 13:39:31] [Rank 0] PRINT: step:5800/10000 val_loss:3.7474 svd_entropy: attn_qk:H=0.7372,top10E=0.31,eRank=155.5,q75/q25=59.76 attn_vo:H=0.8011,top10E=0.15,eRank=283.0,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.3,q75/q25=2.98 mlp_w2:H=0.9654,top10E=0.05,eRank=610.8,q75/q25=3.05 vo_prod:H=0.6310,top10E=0.31,eRank=101.3,q75/q25=inf train_time:544948ms step_avg:93.96ms +[2025-08-22 13:39:32] [Rank 0] step:5801/10000 train_time:544967ms step_avg:93.94ms +[2025-08-22 13:39:32] [Rank 0] step:5801/10000 train_time:544967ms step_avg:93.94ms +[2025-08-22 13:39:33] [Rank 0] step:5821/10000 train_time:546914ms step_avg:93.96ms +[2025-08-22 13:39:33] [Rank 0] step:5821/10000 train_time:546914ms step_avg:93.96ms +[2025-08-22 13:39:35] [Rank 0] step:5841/10000 train_time:548863ms step_avg:93.97ms +[2025-08-22 13:39:35] [Rank 0] step:5841/10000 train_time:548863ms step_avg:93.97ms +[2025-08-22 13:39:37] [Rank 0] step:5861/10000 train_time:550817ms step_avg:93.98ms +[2025-08-22 13:39:37] [Rank 0] step:5861/10000 train_time:550817ms step_avg:93.98ms +[2025-08-22 13:39:39] [Rank 0] step:5881/10000 train_time:552769ms step_avg:93.99ms +[2025-08-22 13:39:39] [Rank 0] step:5881/10000 train_time:552769ms step_avg:93.99ms +[2025-08-22 13:39:41] [Rank 0] step:5901/10000 train_time:554721ms step_avg:94.00ms +[2025-08-22 13:39:41] [Rank 0] step:5901/10000 train_time:554721ms step_avg:94.00ms +[2025-08-22 13:39:43] [Rank 0] step:5921/10000 train_time:556672ms step_avg:94.02ms +[2025-08-22 13:39:43] [Rank 0] step:5921/10000 train_time:556672ms step_avg:94.02ms +[2025-08-22 13:39:45] [Rank 0] step:5941/10000 train_time:558628ms step_avg:94.03ms +[2025-08-22 13:39:45] [Rank 0] step:5941/10000 train_time:558628ms step_avg:94.03ms +[2025-08-22 13:39:47] [Rank 0] step:5961/10000 train_time:560582ms step_avg:94.04ms +[2025-08-22 13:39:47] [Rank 0] step:5961/10000 train_time:560582ms step_avg:94.04ms +[2025-08-22 13:39:49] [Rank 0] step:5981/10000 train_time:562534ms step_avg:94.05ms +[2025-08-22 13:39:49] [Rank 0] step:5981/10000 train_time:562534ms step_avg:94.05ms +[2025-08-22 13:39:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:39:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:40:05] [Rank 0] PRINT: step:6000/10000 val_loss:3.7276 svd_entropy: attn_qk:H=0.7382,top10E=0.31,eRank=156.3,q75/q25=59.78 attn_vo:H=0.8015,top10E=0.15,eRank=283.5,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.5,q75/q25=2.98 mlp_w2:H=0.9654,top10E=0.05,eRank=610.9,q75/q25=3.04 vo_prod:H=0.6316,top10E=0.31,eRank=101.8,q75/q25=inf train_time:564493ms step_avg:94.08ms +[2025-08-22 13:40:05] [Rank 0] PRINT: step:6000/10000 val_loss:3.7276 svd_entropy: attn_qk:H=0.7382,top10E=0.31,eRank=156.3,q75/q25=59.78 attn_vo:H=0.8015,top10E=0.15,eRank=283.5,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.5,q75/q25=2.98 mlp_w2:H=0.9654,top10E=0.05,eRank=610.9,q75/q25=3.04 vo_prod:H=0.6316,top10E=0.31,eRank=101.8,q75/q25=inf train_time:564493ms step_avg:94.08ms +[2025-08-22 13:40:05] [Rank 0] step:6001/10000 train_time:564513ms step_avg:94.07ms +[2025-08-22 13:40:05] [Rank 0] step:6001/10000 train_time:564513ms step_avg:94.07ms +[2025-08-22 13:40:07] [Rank 0] step:6021/10000 train_time:566471ms step_avg:94.08ms +[2025-08-22 13:40:07] [Rank 0] step:6021/10000 train_time:566471ms step_avg:94.08ms +[2025-08-22 13:40:09] [Rank 0] step:6041/10000 train_time:568427ms step_avg:94.09ms +[2025-08-22 13:40:09] [Rank 0] step:6041/10000 train_time:568427ms step_avg:94.09ms +[2025-08-22 13:40:11] [Rank 0] step:6061/10000 train_time:570384ms step_avg:94.11ms +[2025-08-22 13:40:11] [Rank 0] step:6061/10000 train_time:570384ms step_avg:94.11ms +[2025-08-22 13:40:13] [Rank 0] step:6081/10000 train_time:572338ms step_avg:94.12ms +[2025-08-22 13:40:13] [Rank 0] step:6081/10000 train_time:572338ms step_avg:94.12ms +[2025-08-22 13:40:15] [Rank 0] step:6101/10000 train_time:574298ms step_avg:94.13ms +[2025-08-22 13:40:15] [Rank 0] step:6101/10000 train_time:574298ms step_avg:94.13ms +[2025-08-22 13:40:17] [Rank 0] step:6121/10000 train_time:576321ms step_avg:94.15ms +[2025-08-22 13:40:17] [Rank 0] step:6121/10000 train_time:576321ms step_avg:94.15ms +[2025-08-22 13:40:19] [Rank 0] step:6141/10000 train_time:578287ms step_avg:94.17ms +[2025-08-22 13:40:19] [Rank 0] step:6141/10000 train_time:578287ms step_avg:94.17ms +[2025-08-22 13:40:21] [Rank 0] step:6161/10000 train_time:580244ms step_avg:94.18ms +[2025-08-22 13:40:21] [Rank 0] step:6161/10000 train_time:580244ms step_avg:94.18ms +[2025-08-22 13:40:23] [Rank 0] step:6181/10000 train_time:582204ms step_avg:94.19ms +[2025-08-22 13:40:23] [Rank 0] step:6181/10000 train_time:582204ms step_avg:94.19ms +[2025-08-22 13:40:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:40:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:40:38] [Rank 0] PRINT: step:6200/10000 val_loss:3.7140 svd_entropy: attn_qk:H=0.7392,top10E=0.31,eRank=157.1,q75/q25=60.00 attn_vo:H=0.8019,top10E=0.15,eRank=284.0,q75/q25=inf mlp_w1:H=0.9691,top10E=0.04,eRank=625.6,q75/q25=2.97 mlp_w2:H=0.9654,top10E=0.05,eRank=611.0,q75/q25=3.04 vo_prod:H=0.6322,top10E=0.31,eRank=102.1,q75/q25=inf train_time:584171ms step_avg:94.22ms +[2025-08-22 13:40:38] [Rank 0] PRINT: step:6200/10000 val_loss:3.7140 svd_entropy: attn_qk:H=0.7392,top10E=0.31,eRank=157.1,q75/q25=60.00 attn_vo:H=0.8019,top10E=0.15,eRank=284.0,q75/q25=inf mlp_w1:H=0.9691,top10E=0.04,eRank=625.6,q75/q25=2.97 mlp_w2:H=0.9654,top10E=0.05,eRank=611.0,q75/q25=3.04 vo_prod:H=0.6322,top10E=0.31,eRank=102.1,q75/q25=inf train_time:584171ms step_avg:94.22ms +[2025-08-22 13:40:38] [Rank 0] step:6201/10000 train_time:584191ms step_avg:94.21ms +[2025-08-22 13:40:38] [Rank 0] step:6201/10000 train_time:584191ms step_avg:94.21ms +[2025-08-22 13:40:40] [Rank 0] step:6221/10000 train_time:586144ms step_avg:94.22ms +[2025-08-22 13:40:40] [Rank 0] step:6221/10000 train_time:586144ms step_avg:94.22ms +[2025-08-22 13:40:42] [Rank 0] step:6241/10000 train_time:588098ms step_avg:94.23ms +[2025-08-22 13:40:42] [Rank 0] step:6241/10000 train_time:588098ms step_avg:94.23ms +[2025-08-22 13:40:44] [Rank 0] step:6261/10000 train_time:590056ms step_avg:94.24ms +[2025-08-22 13:40:44] [Rank 0] step:6261/10000 train_time:590056ms step_avg:94.24ms +[2025-08-22 13:40:46] [Rank 0] step:6281/10000 train_time:592017ms step_avg:94.26ms +[2025-08-22 13:40:46] [Rank 0] step:6281/10000 train_time:592017ms step_avg:94.26ms +[2025-08-22 13:40:48] [Rank 0] step:6301/10000 train_time:593979ms step_avg:94.27ms +[2025-08-22 13:40:48] [Rank 0] step:6301/10000 train_time:593979ms step_avg:94.27ms +[2025-08-22 13:40:50] [Rank 0] step:6321/10000 train_time:595943ms step_avg:94.28ms +[2025-08-22 13:40:50] [Rank 0] step:6321/10000 train_time:595943ms step_avg:94.28ms +[2025-08-22 13:40:52] [Rank 0] step:6341/10000 train_time:597902ms step_avg:94.29ms +[2025-08-22 13:40:52] [Rank 0] step:6341/10000 train_time:597902ms step_avg:94.29ms +[2025-08-22 13:40:54] [Rank 0] step:6361/10000 train_time:599873ms step_avg:94.30ms +[2025-08-22 13:40:54] [Rank 0] step:6361/10000 train_time:599873ms step_avg:94.30ms +[2025-08-22 13:40:56] [Rank 0] step:6381/10000 train_time:601838ms step_avg:94.32ms +[2025-08-22 13:40:56] [Rank 0] step:6381/10000 train_time:601838ms step_avg:94.32ms +[2025-08-22 13:40:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:40:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:41:11] [Rank 0] PRINT: step:6400/10000 val_loss:3.7006 svd_entropy: attn_qk:H=0.7398,top10E=0.31,eRank=157.6,q75/q25=59.83 attn_vo:H=0.8023,top10E=0.15,eRank=284.5,q75/q25=inf mlp_w1:H=0.9691,top10E=0.04,eRank=625.8,q75/q25=2.97 mlp_w2:H=0.9654,top10E=0.05,eRank=611.0,q75/q25=3.04 vo_prod:H=0.6333,top10E=0.31,eRank=102.7,q75/q25=inf train_time:603804ms step_avg:94.34ms +[2025-08-22 13:41:11] [Rank 0] PRINT: step:6400/10000 val_loss:3.7006 svd_entropy: attn_qk:H=0.7398,top10E=0.31,eRank=157.6,q75/q25=59.83 attn_vo:H=0.8023,top10E=0.15,eRank=284.5,q75/q25=inf mlp_w1:H=0.9691,top10E=0.04,eRank=625.8,q75/q25=2.97 mlp_w2:H=0.9654,top10E=0.05,eRank=611.0,q75/q25=3.04 vo_prod:H=0.6333,top10E=0.31,eRank=102.7,q75/q25=inf train_time:603804ms step_avg:94.34ms +[2025-08-22 13:41:12] [Rank 0] step:6401/10000 train_time:603824ms step_avg:94.33ms +[2025-08-22 13:41:12] [Rank 0] step:6401/10000 train_time:603824ms step_avg:94.33ms +[2025-08-22 13:41:14] [Rank 0] step:6421/10000 train_time:605784ms step_avg:94.34ms +[2025-08-22 13:41:14] [Rank 0] step:6421/10000 train_time:605784ms step_avg:94.34ms +[2025-08-22 13:41:16] [Rank 0] step:6441/10000 train_time:607739ms step_avg:94.35ms +[2025-08-22 13:41:16] [Rank 0] step:6441/10000 train_time:607739ms step_avg:94.35ms +[2025-08-22 13:41:17] [Rank 0] step:6461/10000 train_time:609699ms step_avg:94.37ms +[2025-08-22 13:41:17] [Rank 0] step:6461/10000 train_time:609699ms step_avg:94.37ms +[2025-08-22 13:41:19] [Rank 0] step:6481/10000 train_time:611663ms step_avg:94.38ms +[2025-08-22 13:41:19] [Rank 0] step:6481/10000 train_time:611663ms step_avg:94.38ms +[2025-08-22 13:41:21] [Rank 0] step:6501/10000 train_time:613617ms step_avg:94.39ms +[2025-08-22 13:41:21] [Rank 0] step:6501/10000 train_time:613617ms step_avg:94.39ms +[2025-08-22 13:41:23] [Rank 0] step:6521/10000 train_time:615573ms step_avg:94.40ms +[2025-08-22 13:41:23] [Rank 0] step:6521/10000 train_time:615573ms step_avg:94.40ms +[2025-08-22 13:41:25] [Rank 0] step:6541/10000 train_time:617531ms step_avg:94.41ms +[2025-08-22 13:41:25] [Rank 0] step:6541/10000 train_time:617531ms step_avg:94.41ms +[2025-08-22 13:41:27] [Rank 0] step:6561/10000 train_time:619489ms step_avg:94.42ms +[2025-08-22 13:41:27] [Rank 0] step:6561/10000 train_time:619489ms step_avg:94.42ms +[2025-08-22 13:41:29] [Rank 0] step:6581/10000 train_time:621444ms step_avg:94.43ms +[2025-08-22 13:41:29] [Rank 0] step:6581/10000 train_time:621444ms step_avg:94.43ms +[2025-08-22 13:41:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:41:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:41:45] [Rank 0] PRINT: step:6600/10000 val_loss:3.6875 svd_entropy: attn_qk:H=0.7406,top10E=0.31,eRank=158.3,q75/q25=59.52 attn_vo:H=0.8027,top10E=0.15,eRank=285.2,q75/q25=inf mlp_w1:H=0.9691,top10E=0.04,eRank=626.0,q75/q25=2.97 mlp_w2:H=0.9654,top10E=0.05,eRank=611.1,q75/q25=3.04 vo_prod:H=0.6341,top10E=0.30,eRank=103.3,q75/q25=inf train_time:623410ms step_avg:94.46ms +[2025-08-22 13:41:45] [Rank 0] PRINT: step:6600/10000 val_loss:3.6875 svd_entropy: attn_qk:H=0.7406,top10E=0.31,eRank=158.3,q75/q25=59.52 attn_vo:H=0.8027,top10E=0.15,eRank=285.2,q75/q25=inf mlp_w1:H=0.9691,top10E=0.04,eRank=626.0,q75/q25=2.97 mlp_w2:H=0.9654,top10E=0.05,eRank=611.1,q75/q25=3.04 vo_prod:H=0.6341,top10E=0.30,eRank=103.3,q75/q25=inf train_time:623410ms step_avg:94.46ms +[2025-08-22 13:41:45] [Rank 0] step:6601/10000 train_time:623430ms step_avg:94.44ms +[2025-08-22 13:41:45] [Rank 0] step:6601/10000 train_time:623430ms step_avg:94.44ms +[2025-08-22 13:41:47] [Rank 0] step:6621/10000 train_time:625368ms step_avg:94.45ms +[2025-08-22 13:41:47] [Rank 0] step:6621/10000 train_time:625368ms step_avg:94.45ms +[2025-08-22 13:41:49] [Rank 0] step:6641/10000 train_time:627332ms step_avg:94.46ms +[2025-08-22 13:41:49] [Rank 0] step:6641/10000 train_time:627332ms step_avg:94.46ms +[2025-08-22 13:41:51] [Rank 0] step:6661/10000 train_time:629292ms step_avg:94.47ms +[2025-08-22 13:41:51] [Rank 0] step:6661/10000 train_time:629292ms step_avg:94.47ms +[2025-08-22 13:41:53] [Rank 0] step:6681/10000 train_time:631265ms step_avg:94.49ms +[2025-08-22 13:41:53] [Rank 0] step:6681/10000 train_time:631265ms step_avg:94.49ms +[2025-08-22 13:41:55] [Rank 0] step:6701/10000 train_time:633259ms step_avg:94.50ms +[2025-08-22 13:41:55] [Rank 0] step:6701/10000 train_time:633259ms step_avg:94.50ms +[2025-08-22 13:41:57] [Rank 0] step:6721/10000 train_time:635248ms step_avg:94.52ms +[2025-08-22 13:41:57] [Rank 0] step:6721/10000 train_time:635248ms step_avg:94.52ms +[2025-08-22 13:41:59] [Rank 0] step:6741/10000 train_time:637231ms step_avg:94.53ms +[2025-08-22 13:41:59] [Rank 0] step:6741/10000 train_time:637231ms step_avg:94.53ms +[2025-08-22 13:42:01] [Rank 0] step:6761/10000 train_time:639213ms step_avg:94.54ms +[2025-08-22 13:42:01] [Rank 0] step:6761/10000 train_time:639213ms step_avg:94.54ms +[2025-08-22 13:42:03] [Rank 0] step:6781/10000 train_time:641328ms step_avg:94.58ms +[2025-08-22 13:42:03] [Rank 0] step:6781/10000 train_time:641328ms step_avg:94.58ms +[2025-08-22 13:42:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:42:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:42:19] [Rank 0] PRINT: step:6800/10000 val_loss:3.6703 svd_entropy: attn_qk:H=0.7413,top10E=0.31,eRank=158.8,q75/q25=59.44 attn_vo:H=0.8031,top10E=0.15,eRank=285.7,q75/q25=inf mlp_w1:H=0.9692,top10E=0.04,eRank=626.1,q75/q25=2.97 mlp_w2:H=0.9655,top10E=0.05,eRank=611.2,q75/q25=3.03 vo_prod:H=0.6349,top10E=0.30,eRank=103.7,q75/q25=inf train_time:643339ms step_avg:94.61ms +[2025-08-22 13:42:19] [Rank 0] PRINT: step:6800/10000 val_loss:3.6703 svd_entropy: attn_qk:H=0.7413,top10E=0.31,eRank=158.8,q75/q25=59.44 attn_vo:H=0.8031,top10E=0.15,eRank=285.7,q75/q25=inf mlp_w1:H=0.9692,top10E=0.04,eRank=626.1,q75/q25=2.97 mlp_w2:H=0.9655,top10E=0.05,eRank=611.2,q75/q25=3.03 vo_prod:H=0.6349,top10E=0.30,eRank=103.7,q75/q25=inf train_time:643339ms step_avg:94.61ms +[2025-08-22 13:42:19] [Rank 0] step:6801/10000 train_time:643360ms step_avg:94.60ms +[2025-08-22 13:42:19] [Rank 0] step:6801/10000 train_time:643360ms step_avg:94.60ms +[2025-08-22 13:42:21] [Rank 0] step:6821/10000 train_time:645351ms step_avg:94.61ms +[2025-08-22 13:42:21] [Rank 0] step:6821/10000 train_time:645351ms step_avg:94.61ms +[2025-08-22 13:42:23] [Rank 0] step:6841/10000 train_time:647334ms step_avg:94.63ms +[2025-08-22 13:42:23] [Rank 0] step:6841/10000 train_time:647334ms step_avg:94.63ms +[2025-08-22 13:42:25] [Rank 0] step:6861/10000 train_time:649312ms step_avg:94.64ms +[2025-08-22 13:42:25] [Rank 0] step:6861/10000 train_time:649312ms step_avg:94.64ms +[2025-08-22 13:42:27] [Rank 0] step:6881/10000 train_time:651298ms step_avg:94.65ms +[2025-08-22 13:42:27] [Rank 0] step:6881/10000 train_time:651298ms step_avg:94.65ms +[2025-08-22 13:42:29] [Rank 0] step:6901/10000 train_time:653280ms step_avg:94.66ms +[2025-08-22 13:42:29] [Rank 0] step:6901/10000 train_time:653280ms step_avg:94.66ms +[2025-08-22 13:42:31] [Rank 0] step:6921/10000 train_time:655260ms step_avg:94.68ms +[2025-08-22 13:42:31] [Rank 0] step:6921/10000 train_time:655260ms step_avg:94.68ms +[2025-08-22 13:42:33] [Rank 0] step:6941/10000 train_time:657253ms step_avg:94.69ms +[2025-08-22 13:42:33] [Rank 0] step:6941/10000 train_time:657253ms step_avg:94.69ms +[2025-08-22 13:42:35] [Rank 0] step:6961/10000 train_time:659251ms step_avg:94.71ms +[2025-08-22 13:42:35] [Rank 0] step:6961/10000 train_time:659251ms step_avg:94.71ms +[2025-08-22 13:42:37] [Rank 0] step:6981/10000 train_time:661240ms step_avg:94.72ms +[2025-08-22 13:42:37] [Rank 0] step:6981/10000 train_time:661240ms step_avg:94.72ms +[2025-08-22 13:42:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:42:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:42:52] [Rank 0] PRINT: step:7000/10000 val_loss:3.6568 svd_entropy: attn_qk:H=0.7421,top10E=0.31,eRank=159.4,q75/q25=59.52 attn_vo:H=0.8034,top10E=0.15,eRank=286.0,q75/q25=inf mlp_w1:H=0.9692,top10E=0.04,eRank=626.3,q75/q25=2.96 mlp_w2:H=0.9655,top10E=0.05,eRank=611.3,q75/q25=3.03 vo_prod:H=0.6356,top10E=0.30,eRank=103.9,q75/q25=inf train_time:663238ms step_avg:94.75ms +[2025-08-22 13:42:52] [Rank 0] PRINT: step:7000/10000 val_loss:3.6568 svd_entropy: attn_qk:H=0.7421,top10E=0.31,eRank=159.4,q75/q25=59.52 attn_vo:H=0.8034,top10E=0.15,eRank=286.0,q75/q25=inf mlp_w1:H=0.9692,top10E=0.04,eRank=626.3,q75/q25=2.96 mlp_w2:H=0.9655,top10E=0.05,eRank=611.3,q75/q25=3.03 vo_prod:H=0.6356,top10E=0.30,eRank=103.9,q75/q25=inf train_time:663238ms step_avg:94.75ms +[2025-08-22 13:42:53] [Rank 0] step:7001/10000 train_time:663260ms step_avg:94.74ms +[2025-08-22 13:42:53] [Rank 0] step:7001/10000 train_time:663260ms step_avg:94.74ms +[2025-08-22 13:42:55] [Rank 0] step:7021/10000 train_time:665243ms step_avg:94.75ms +[2025-08-22 13:42:55] [Rank 0] step:7021/10000 train_time:665243ms step_avg:94.75ms +[2025-08-22 13:42:57] [Rank 0] step:7041/10000 train_time:667228ms step_avg:94.76ms +[2025-08-22 13:42:57] [Rank 0] step:7041/10000 train_time:667228ms step_avg:94.76ms +[2025-08-22 13:42:59] [Rank 0] step:7061/10000 train_time:669216ms step_avg:94.78ms +[2025-08-22 13:42:59] [Rank 0] step:7061/10000 train_time:669216ms step_avg:94.78ms +[2025-08-22 13:43:01] [Rank 0] step:7081/10000 train_time:671204ms step_avg:94.79ms +[2025-08-22 13:43:01] [Rank 0] step:7081/10000 train_time:671204ms step_avg:94.79ms +[2025-08-22 13:43:02] [Rank 0] step:7101/10000 train_time:673201ms step_avg:94.80ms +[2025-08-22 13:43:02] [Rank 0] step:7101/10000 train_time:673201ms step_avg:94.80ms +[2025-08-22 13:43:04] [Rank 0] step:7121/10000 train_time:675186ms step_avg:94.82ms +[2025-08-22 13:43:04] [Rank 0] step:7121/10000 train_time:675186ms step_avg:94.82ms +[2025-08-22 13:43:07] [Rank 0] step:7141/10000 train_time:677248ms step_avg:94.84ms +[2025-08-22 13:43:07] [Rank 0] step:7141/10000 train_time:677248ms step_avg:94.84ms +[2025-08-22 13:43:09] [Rank 0] step:7161/10000 train_time:679290ms step_avg:94.86ms +[2025-08-22 13:43:09] [Rank 0] step:7161/10000 train_time:679290ms step_avg:94.86ms +[2025-08-22 13:43:11] [Rank 0] step:7181/10000 train_time:681285ms step_avg:94.87ms +[2025-08-22 13:43:11] [Rank 0] step:7181/10000 train_time:681285ms step_avg:94.87ms +[2025-08-22 13:43:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:43:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:43:26] [Rank 0] PRINT: step:7200/10000 val_loss:3.6468 svd_entropy: attn_qk:H=0.7427,top10E=0.31,eRank=160.0,q75/q25=59.34 attn_vo:H=0.8037,top10E=0.15,eRank=286.5,q75/q25=inf mlp_w1:H=0.9692,top10E=0.04,eRank=626.4,q75/q25=2.96 mlp_w2:H=0.9655,top10E=0.05,eRank=611.4,q75/q25=3.02 vo_prod:H=0.6360,top10E=0.30,eRank=104.2,q75/q25=inf train_time:683285ms step_avg:94.90ms +[2025-08-22 13:43:26] [Rank 0] PRINT: step:7200/10000 val_loss:3.6468 svd_entropy: attn_qk:H=0.7427,top10E=0.31,eRank=160.0,q75/q25=59.34 attn_vo:H=0.8037,top10E=0.15,eRank=286.5,q75/q25=inf mlp_w1:H=0.9692,top10E=0.04,eRank=626.4,q75/q25=2.96 mlp_w2:H=0.9655,top10E=0.05,eRank=611.4,q75/q25=3.02 vo_prod:H=0.6360,top10E=0.30,eRank=104.2,q75/q25=inf train_time:683285ms step_avg:94.90ms +[2025-08-22 13:43:27] [Rank 0] step:7201/10000 train_time:683305ms step_avg:94.89ms +[2025-08-22 13:43:27] [Rank 0] step:7201/10000 train_time:683305ms step_avg:94.89ms +[2025-08-22 13:43:29] [Rank 0] step:7221/10000 train_time:685284ms step_avg:94.90ms +[2025-08-22 13:43:29] [Rank 0] step:7221/10000 train_time:685284ms step_avg:94.90ms +[2025-08-22 13:43:31] [Rank 0] step:7241/10000 train_time:687267ms step_avg:94.91ms +[2025-08-22 13:43:31] [Rank 0] step:7241/10000 train_time:687267ms step_avg:94.91ms +[2025-08-22 13:43:33] [Rank 0] step:7261/10000 train_time:689250ms step_avg:94.92ms +[2025-08-22 13:43:33] [Rank 0] step:7261/10000 train_time:689250ms step_avg:94.92ms +[2025-08-22 13:43:35] [Rank 0] step:7281/10000 train_time:691242ms step_avg:94.94ms +[2025-08-22 13:43:35] [Rank 0] step:7281/10000 train_time:691242ms step_avg:94.94ms +[2025-08-22 13:43:37] [Rank 0] step:7301/10000 train_time:693226ms step_avg:94.95ms +[2025-08-22 13:43:37] [Rank 0] step:7301/10000 train_time:693226ms step_avg:94.95ms +[2025-08-22 13:43:39] [Rank 0] step:7321/10000 train_time:695224ms step_avg:94.96ms +[2025-08-22 13:43:39] [Rank 0] step:7321/10000 train_time:695224ms step_avg:94.96ms +[2025-08-22 13:43:41] [Rank 0] step:7341/10000 train_time:697209ms step_avg:94.97ms +[2025-08-22 13:43:41] [Rank 0] step:7341/10000 train_time:697209ms step_avg:94.97ms +[2025-08-22 13:43:43] [Rank 0] step:7361/10000 train_time:699205ms step_avg:94.99ms +[2025-08-22 13:43:43] [Rank 0] step:7361/10000 train_time:699205ms step_avg:94.99ms +[2025-08-22 13:43:44] [Rank 0] step:7381/10000 train_time:701198ms step_avg:95.00ms +[2025-08-22 13:43:44] [Rank 0] step:7381/10000 train_time:701198ms step_avg:95.00ms +[2025-08-22 13:43:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:43:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:44:00] [Rank 0] PRINT: step:7400/10000 val_loss:3.6272 svd_entropy: attn_qk:H=0.7432,top10E=0.30,eRank=160.4,q75/q25=59.41 attn_vo:H=0.8041,top10E=0.15,eRank=286.9,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.5,q75/q25=2.96 mlp_w2:H=0.9655,top10E=0.05,eRank=611.5,q75/q25=3.02 vo_prod:H=0.6370,top10E=0.30,eRank=104.7,q75/q25=inf train_time:703179ms step_avg:95.02ms +[2025-08-22 13:44:00] [Rank 0] PRINT: step:7400/10000 val_loss:3.6272 svd_entropy: attn_qk:H=0.7432,top10E=0.30,eRank=160.4,q75/q25=59.41 attn_vo:H=0.8041,top10E=0.15,eRank=286.9,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.5,q75/q25=2.96 mlp_w2:H=0.9655,top10E=0.05,eRank=611.5,q75/q25=3.02 vo_prod:H=0.6370,top10E=0.30,eRank=104.7,q75/q25=inf train_time:703179ms step_avg:95.02ms +[2025-08-22 13:44:01] [Rank 0] step:7401/10000 train_time:703200ms step_avg:95.01ms +[2025-08-22 13:44:01] [Rank 0] step:7401/10000 train_time:703200ms step_avg:95.01ms +[2025-08-22 13:44:03] [Rank 0] step:7421/10000 train_time:705176ms step_avg:95.02ms +[2025-08-22 13:44:03] [Rank 0] step:7421/10000 train_time:705176ms step_avg:95.02ms +[2025-08-22 13:44:04] [Rank 0] step:7441/10000 train_time:707159ms step_avg:95.04ms +[2025-08-22 13:44:04] [Rank 0] step:7441/10000 train_time:707159ms step_avg:95.04ms +[2025-08-22 13:44:06] [Rank 0] step:7461/10000 train_time:709143ms step_avg:95.05ms +[2025-08-22 13:44:06] [Rank 0] step:7461/10000 train_time:709143ms step_avg:95.05ms +[2025-08-22 13:44:08] [Rank 0] step:7481/10000 train_time:711138ms step_avg:95.06ms +[2025-08-22 13:44:08] [Rank 0] step:7481/10000 train_time:711138ms step_avg:95.06ms +[2025-08-22 13:44:11] [Rank 0] step:7501/10000 train_time:713198ms step_avg:95.08ms +[2025-08-22 13:44:11] [Rank 0] step:7501/10000 train_time:713198ms step_avg:95.08ms +[2025-08-22 13:44:13] [Rank 0] step:7521/10000 train_time:715258ms step_avg:95.10ms +[2025-08-22 13:44:13] [Rank 0] step:7521/10000 train_time:715258ms step_avg:95.10ms +[2025-08-22 13:44:15] [Rank 0] step:7541/10000 train_time:717253ms step_avg:95.11ms +[2025-08-22 13:44:15] [Rank 0] step:7541/10000 train_time:717253ms step_avg:95.11ms +[2025-08-22 13:44:17] [Rank 0] step:7561/10000 train_time:719235ms step_avg:95.12ms +[2025-08-22 13:44:17] [Rank 0] step:7561/10000 train_time:719235ms step_avg:95.12ms +[2025-08-22 13:44:19] [Rank 0] step:7581/10000 train_time:721234ms step_avg:95.14ms +[2025-08-22 13:44:19] [Rank 0] step:7581/10000 train_time:721234ms step_avg:95.14ms +[2025-08-22 13:44:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:44:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:44:34] [Rank 0] PRINT: step:7600/10000 val_loss:3.6193 svd_entropy: attn_qk:H=0.7437,top10E=0.30,eRank=160.8,q75/q25=59.23 attn_vo:H=0.8044,top10E=0.14,eRank=287.4,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.7,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.6,q75/q25=3.02 vo_prod:H=0.6381,top10E=0.30,eRank=105.3,q75/q25=inf train_time:723239ms step_avg:95.16ms +[2025-08-22 13:44:34] [Rank 0] PRINT: step:7600/10000 val_loss:3.6193 svd_entropy: attn_qk:H=0.7437,top10E=0.30,eRank=160.8,q75/q25=59.23 attn_vo:H=0.8044,top10E=0.14,eRank=287.4,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.7,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.6,q75/q25=3.02 vo_prod:H=0.6381,top10E=0.30,eRank=105.3,q75/q25=inf train_time:723239ms step_avg:95.16ms +[2025-08-22 13:44:35] [Rank 0] step:7601/10000 train_time:723259ms step_avg:95.15ms +[2025-08-22 13:44:35] [Rank 0] step:7601/10000 train_time:723259ms step_avg:95.15ms +[2025-08-22 13:44:37] [Rank 0] step:7621/10000 train_time:725235ms step_avg:95.16ms +[2025-08-22 13:44:37] [Rank 0] step:7621/10000 train_time:725235ms step_avg:95.16ms +[2025-08-22 13:44:39] [Rank 0] step:7641/10000 train_time:727219ms step_avg:95.17ms +[2025-08-22 13:44:39] [Rank 0] step:7641/10000 train_time:727219ms step_avg:95.17ms +[2025-08-22 13:44:41] [Rank 0] step:7661/10000 train_time:729207ms step_avg:95.18ms +[2025-08-22 13:44:41] [Rank 0] step:7661/10000 train_time:729207ms step_avg:95.18ms +[2025-08-22 13:44:43] [Rank 0] step:7681/10000 train_time:731190ms step_avg:95.19ms +[2025-08-22 13:44:43] [Rank 0] step:7681/10000 train_time:731190ms step_avg:95.19ms +[2025-08-22 13:44:45] [Rank 0] step:7701/10000 train_time:733177ms step_avg:95.21ms +[2025-08-22 13:44:45] [Rank 0] step:7701/10000 train_time:733177ms step_avg:95.21ms +[2025-08-22 13:44:47] [Rank 0] step:7721/10000 train_time:735179ms step_avg:95.22ms +[2025-08-22 13:44:47] [Rank 0] step:7721/10000 train_time:735179ms step_avg:95.22ms +[2025-08-22 13:44:49] [Rank 0] step:7741/10000 train_time:737168ms step_avg:95.23ms +[2025-08-22 13:44:49] [Rank 0] step:7741/10000 train_time:737168ms step_avg:95.23ms +[2025-08-22 13:44:51] [Rank 0] step:7761/10000 train_time:739179ms step_avg:95.24ms +[2025-08-22 13:44:51] [Rank 0] step:7761/10000 train_time:739179ms step_avg:95.24ms +[2025-08-22 13:44:53] [Rank 0] step:7781/10000 train_time:741171ms step_avg:95.25ms +[2025-08-22 13:44:53] [Rank 0] step:7781/10000 train_time:741171ms step_avg:95.25ms +[2025-08-22 13:44:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:44:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:45:08] [Rank 0] PRINT: step:7800/10000 val_loss:3.6061 svd_entropy: attn_qk:H=0.7441,top10E=0.30,eRank=161.2,q75/q25=58.81 attn_vo:H=0.8047,top10E=0.14,eRank=287.8,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.7,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.7,q75/q25=3.02 vo_prod:H=0.6388,top10E=0.30,eRank=105.7,q75/q25=inf train_time:743181ms step_avg:95.28ms +[2025-08-22 13:45:08] [Rank 0] PRINT: step:7800/10000 val_loss:3.6061 svd_entropy: attn_qk:H=0.7441,top10E=0.30,eRank=161.2,q75/q25=58.81 attn_vo:H=0.8047,top10E=0.14,eRank=287.8,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.7,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.7,q75/q25=3.02 vo_prod:H=0.6388,top10E=0.30,eRank=105.7,q75/q25=inf train_time:743181ms step_avg:95.28ms +[2025-08-22 13:45:09] [Rank 0] step:7801/10000 train_time:743202ms step_avg:95.27ms +[2025-08-22 13:45:09] [Rank 0] step:7801/10000 train_time:743202ms step_avg:95.27ms +[2025-08-22 13:45:11] [Rank 0] step:7821/10000 train_time:745185ms step_avg:95.28ms +[2025-08-22 13:45:11] [Rank 0] step:7821/10000 train_time:745185ms step_avg:95.28ms +[2025-08-22 13:45:13] [Rank 0] step:7841/10000 train_time:747263ms step_avg:95.30ms +[2025-08-22 13:45:13] [Rank 0] step:7841/10000 train_time:747263ms step_avg:95.30ms +[2025-08-22 13:45:15] [Rank 0] step:7861/10000 train_time:749298ms step_avg:95.32ms +[2025-08-22 13:45:15] [Rank 0] step:7861/10000 train_time:749298ms step_avg:95.32ms +[2025-08-22 13:45:17] [Rank 0] step:7881/10000 train_time:751295ms step_avg:95.33ms +[2025-08-22 13:45:17] [Rank 0] step:7881/10000 train_time:751295ms step_avg:95.33ms +[2025-08-22 13:45:19] [Rank 0] step:7901/10000 train_time:753281ms step_avg:95.34ms +[2025-08-22 13:45:19] [Rank 0] step:7901/10000 train_time:753281ms step_avg:95.34ms +[2025-08-22 13:45:21] [Rank 0] step:7921/10000 train_time:755277ms step_avg:95.35ms +[2025-08-22 13:45:21] [Rank 0] step:7921/10000 train_time:755277ms step_avg:95.35ms +[2025-08-22 13:45:23] [Rank 0] step:7941/10000 train_time:757277ms step_avg:95.36ms +[2025-08-22 13:45:23] [Rank 0] step:7941/10000 train_time:757277ms step_avg:95.36ms +[2025-08-22 13:45:25] [Rank 0] step:7961/10000 train_time:759271ms step_avg:95.37ms +[2025-08-22 13:45:25] [Rank 0] step:7961/10000 train_time:759271ms step_avg:95.37ms +[2025-08-22 13:45:27] [Rank 0] step:7981/10000 train_time:761256ms step_avg:95.38ms +[2025-08-22 13:45:27] [Rank 0] step:7981/10000 train_time:761256ms step_avg:95.38ms +[2025-08-22 13:45:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:45:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:45:42] [Rank 0] PRINT: step:8000/10000 val_loss:3.5875 svd_entropy: attn_qk:H=0.7446,top10E=0.30,eRank=161.5,q75/q25=58.38 attn_vo:H=0.8050,top10E=0.14,eRank=288.1,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.8,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.8,q75/q25=3.02 vo_prod:H=0.6394,top10E=0.30,eRank=106.1,q75/q25=inf train_time:763260ms step_avg:95.41ms +[2025-08-22 13:45:42] [Rank 0] PRINT: step:8000/10000 val_loss:3.5875 svd_entropy: attn_qk:H=0.7446,top10E=0.30,eRank=161.5,q75/q25=58.38 attn_vo:H=0.8050,top10E=0.14,eRank=288.1,q75/q25=inf mlp_w1:H=0.9693,top10E=0.04,eRank=626.8,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.8,q75/q25=3.02 vo_prod:H=0.6394,top10E=0.30,eRank=106.1,q75/q25=inf train_time:763260ms step_avg:95.41ms +[2025-08-22 13:45:42] [Rank 0] step:8001/10000 train_time:763280ms step_avg:95.40ms +[2025-08-22 13:45:42] [Rank 0] step:8001/10000 train_time:763280ms step_avg:95.40ms +[2025-08-22 13:45:44] [Rank 0] step:8021/10000 train_time:765257ms step_avg:95.41ms +[2025-08-22 13:45:44] [Rank 0] step:8021/10000 train_time:765257ms step_avg:95.41ms +[2025-08-22 13:45:46] [Rank 0] step:8041/10000 train_time:767254ms step_avg:95.42ms +[2025-08-22 13:45:46] [Rank 0] step:8041/10000 train_time:767254ms step_avg:95.42ms +[2025-08-22 13:45:48] [Rank 0] step:8061/10000 train_time:769243ms step_avg:95.43ms +[2025-08-22 13:45:48] [Rank 0] step:8061/10000 train_time:769243ms step_avg:95.43ms +[2025-08-22 13:45:50] [Rank 0] step:8081/10000 train_time:771221ms step_avg:95.44ms +[2025-08-22 13:45:50] [Rank 0] step:8081/10000 train_time:771221ms step_avg:95.44ms +[2025-08-22 13:45:52] [Rank 0] step:8101/10000 train_time:773216ms step_avg:95.45ms +[2025-08-22 13:45:52] [Rank 0] step:8101/10000 train_time:773216ms step_avg:95.45ms +[2025-08-22 13:45:54] [Rank 0] step:8121/10000 train_time:775203ms step_avg:95.46ms +[2025-08-22 13:45:54] [Rank 0] step:8121/10000 train_time:775203ms step_avg:95.46ms +[2025-08-22 13:45:57] [Rank 0] step:8141/10000 train_time:777838ms step_avg:95.55ms +[2025-08-22 13:45:57] [Rank 0] step:8141/10000 train_time:777838ms step_avg:95.55ms +[2025-08-22 13:45:59] [Rank 0] step:8161/10000 train_time:779846ms step_avg:95.56ms +[2025-08-22 13:45:59] [Rank 0] step:8161/10000 train_time:779846ms step_avg:95.56ms +[2025-08-22 13:46:01] [Rank 0] step:8181/10000 train_time:781866ms step_avg:95.57ms +[2025-08-22 13:46:01] [Rank 0] step:8181/10000 train_time:781866ms step_avg:95.57ms +[2025-08-22 13:46:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:46:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:46:17] [Rank 0] PRINT: step:8200/10000 val_loss:3.5751 svd_entropy: attn_qk:H=0.7450,top10E=0.30,eRank=161.8,q75/q25=58.41 attn_vo:H=0.8053,top10E=0.14,eRank=288.6,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=626.9,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.9,q75/q25=3.02 vo_prod:H=0.6402,top10E=0.30,eRank=106.5,q75/q25=inf train_time:783911ms step_avg:95.60ms +[2025-08-22 13:46:17] [Rank 0] PRINT: step:8200/10000 val_loss:3.5751 svd_entropy: attn_qk:H=0.7450,top10E=0.30,eRank=161.8,q75/q25=58.41 attn_vo:H=0.8053,top10E=0.14,eRank=288.6,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=626.9,q75/q25=2.95 mlp_w2:H=0.9656,top10E=0.05,eRank=611.9,q75/q25=3.02 vo_prod:H=0.6402,top10E=0.30,eRank=106.5,q75/q25=inf train_time:783911ms step_avg:95.60ms +[2025-08-22 13:46:17] [Rank 0] step:8201/10000 train_time:783931ms step_avg:95.59ms +[2025-08-22 13:46:17] [Rank 0] step:8201/10000 train_time:783931ms step_avg:95.59ms +[2025-08-22 13:46:19] [Rank 0] step:8221/10000 train_time:786005ms step_avg:95.61ms +[2025-08-22 13:46:19] [Rank 0] step:8221/10000 train_time:786005ms step_avg:95.61ms +[2025-08-22 13:46:21] [Rank 0] step:8241/10000 train_time:788024ms step_avg:95.62ms +[2025-08-22 13:46:21] [Rank 0] step:8241/10000 train_time:788024ms step_avg:95.62ms +[2025-08-22 13:46:23] [Rank 0] step:8261/10000 train_time:790044ms step_avg:95.64ms +[2025-08-22 13:46:23] [Rank 0] step:8261/10000 train_time:790044ms step_avg:95.64ms +[2025-08-22 13:46:25] [Rank 0] step:8281/10000 train_time:792055ms step_avg:95.65ms +[2025-08-22 13:46:25] [Rank 0] step:8281/10000 train_time:792055ms step_avg:95.65ms +[2025-08-22 13:46:27] [Rank 0] step:8301/10000 train_time:794070ms step_avg:95.66ms +[2025-08-22 13:46:27] [Rank 0] step:8301/10000 train_time:794070ms step_avg:95.66ms +[2025-08-22 13:46:29] [Rank 0] step:8321/10000 train_time:796085ms step_avg:95.67ms +[2025-08-22 13:46:29] [Rank 0] step:8321/10000 train_time:796085ms step_avg:95.67ms +[2025-08-22 13:46:31] [Rank 0] step:8341/10000 train_time:798107ms step_avg:95.68ms +[2025-08-22 13:46:31] [Rank 0] step:8341/10000 train_time:798107ms step_avg:95.68ms +[2025-08-22 13:46:33] [Rank 0] step:8361/10000 train_time:800128ms step_avg:95.70ms +[2025-08-22 13:46:33] [Rank 0] step:8361/10000 train_time:800128ms step_avg:95.70ms +[2025-08-22 13:46:35] [Rank 0] step:8381/10000 train_time:802141ms step_avg:95.71ms +[2025-08-22 13:46:35] [Rank 0] step:8381/10000 train_time:802141ms step_avg:95.71ms +[2025-08-22 13:46:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:46:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:46:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.5629 svd_entropy: attn_qk:H=0.7453,top10E=0.30,eRank=162.1,q75/q25=58.67 attn_vo:H=0.8055,top10E=0.14,eRank=288.9,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.0,q75/q25=2.95 mlp_w2:H=0.9657,top10E=0.05,eRank=612.0,q75/q25=3.02 vo_prod:H=0.6407,top10E=0.29,eRank=106.8,q75/q25=inf train_time:804167ms step_avg:95.73ms +[2025-08-22 13:46:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.5629 svd_entropy: attn_qk:H=0.7453,top10E=0.30,eRank=162.1,q75/q25=58.67 attn_vo:H=0.8055,top10E=0.14,eRank=288.9,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.0,q75/q25=2.95 mlp_w2:H=0.9657,top10E=0.05,eRank=612.0,q75/q25=3.02 vo_prod:H=0.6407,top10E=0.29,eRank=106.8,q75/q25=inf train_time:804167ms step_avg:95.73ms +[2025-08-22 13:46:51] [Rank 0] step:8401/10000 train_time:804187ms step_avg:95.73ms +[2025-08-22 13:46:51] [Rank 0] step:8401/10000 train_time:804187ms step_avg:95.73ms +[2025-08-22 13:46:53] [Rank 0] step:8421/10000 train_time:806202ms step_avg:95.74ms +[2025-08-22 13:46:53] [Rank 0] step:8421/10000 train_time:806202ms step_avg:95.74ms +[2025-08-22 13:46:55] [Rank 0] step:8441/10000 train_time:808215ms step_avg:95.75ms +[2025-08-22 13:46:55] [Rank 0] step:8441/10000 train_time:808215ms step_avg:95.75ms +[2025-08-22 13:46:57] [Rank 0] step:8461/10000 train_time:810227ms step_avg:95.76ms +[2025-08-22 13:46:57] [Rank 0] step:8461/10000 train_time:810227ms step_avg:95.76ms +[2025-08-22 13:46:59] [Rank 0] step:8481/10000 train_time:812249ms step_avg:95.77ms +[2025-08-22 13:46:59] [Rank 0] step:8481/10000 train_time:812249ms step_avg:95.77ms +[2025-08-22 13:47:01] [Rank 0] step:8501/10000 train_time:814289ms step_avg:95.79ms +[2025-08-22 13:47:01] [Rank 0] step:8501/10000 train_time:814289ms step_avg:95.79ms +[2025-08-22 13:47:03] [Rank 0] step:8521/10000 train_time:816310ms step_avg:95.80ms +[2025-08-22 13:47:03] [Rank 0] step:8521/10000 train_time:816310ms step_avg:95.80ms +[2025-08-22 13:47:05] [Rank 0] step:8541/10000 train_time:818344ms step_avg:95.81ms +[2025-08-22 13:47:05] [Rank 0] step:8541/10000 train_time:818344ms step_avg:95.81ms +[2025-08-22 13:47:07] [Rank 0] step:8561/10000 train_time:820371ms step_avg:95.83ms +[2025-08-22 13:47:07] [Rank 0] step:8561/10000 train_time:820371ms step_avg:95.83ms +[2025-08-22 13:47:09] [Rank 0] step:8581/10000 train_time:822398ms step_avg:95.84ms +[2025-08-22 13:47:09] [Rank 0] step:8581/10000 train_time:822398ms step_avg:95.84ms +[2025-08-22 13:47:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:47:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:47:25] [Rank 0] PRINT: step:8600/10000 val_loss:3.5534 svd_entropy: attn_qk:H=0.7456,top10E=0.30,eRank=162.4,q75/q25=58.55 attn_vo:H=0.8058,top10E=0.14,eRank=289.2,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.1,q75/q25=2.94 mlp_w2:H=0.9657,top10E=0.05,eRank=612.1,q75/q25=3.01 vo_prod:H=0.6413,top10E=0.29,eRank=107.2,q75/q25=inf train_time:824422ms step_avg:95.86ms +[2025-08-22 13:47:25] [Rank 0] PRINT: step:8600/10000 val_loss:3.5534 svd_entropy: attn_qk:H=0.7456,top10E=0.30,eRank=162.4,q75/q25=58.55 attn_vo:H=0.8058,top10E=0.14,eRank=289.2,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.1,q75/q25=2.94 mlp_w2:H=0.9657,top10E=0.05,eRank=612.1,q75/q25=3.01 vo_prod:H=0.6413,top10E=0.29,eRank=107.2,q75/q25=inf train_time:824422ms step_avg:95.86ms +[2025-08-22 13:47:25] [Rank 0] step:8601/10000 train_time:824443ms step_avg:95.85ms +[2025-08-22 13:47:25] [Rank 0] step:8601/10000 train_time:824443ms step_avg:95.85ms +[2025-08-22 13:47:27] [Rank 0] step:8621/10000 train_time:826460ms step_avg:95.87ms +[2025-08-22 13:47:27] [Rank 0] step:8621/10000 train_time:826460ms step_avg:95.87ms +[2025-08-22 13:47:29] [Rank 0] step:8641/10000 train_time:828474ms step_avg:95.88ms +[2025-08-22 13:47:29] [Rank 0] step:8641/10000 train_time:828474ms step_avg:95.88ms +[2025-08-22 13:47:31] [Rank 0] step:8661/10000 train_time:830493ms step_avg:95.89ms +[2025-08-22 13:47:31] [Rank 0] step:8661/10000 train_time:830493ms step_avg:95.89ms +[2025-08-22 13:47:33] [Rank 0] step:8681/10000 train_time:832517ms step_avg:95.90ms +[2025-08-22 13:47:33] [Rank 0] step:8681/10000 train_time:832517ms step_avg:95.90ms +[2025-08-22 13:47:35] [Rank 0] step:8701/10000 train_time:834530ms step_avg:95.91ms +[2025-08-22 13:47:35] [Rank 0] step:8701/10000 train_time:834530ms step_avg:95.91ms +[2025-08-22 13:47:37] [Rank 0] step:8721/10000 train_time:836552ms step_avg:95.92ms +[2025-08-22 13:47:37] [Rank 0] step:8721/10000 train_time:836552ms step_avg:95.92ms +[2025-08-22 13:47:39] [Rank 0] step:8741/10000 train_time:838567ms step_avg:95.93ms +[2025-08-22 13:47:39] [Rank 0] step:8741/10000 train_time:838567ms step_avg:95.93ms +[2025-08-22 13:47:41] [Rank 0] step:8761/10000 train_time:840588ms step_avg:95.95ms +[2025-08-22 13:47:41] [Rank 0] step:8761/10000 train_time:840588ms step_avg:95.95ms +[2025-08-22 13:47:43] [Rank 0] step:8781/10000 train_time:842615ms step_avg:95.96ms +[2025-08-22 13:47:43] [Rank 0] step:8781/10000 train_time:842615ms step_avg:95.96ms +[2025-08-22 13:47:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:47:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:47:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.5403 svd_entropy: attn_qk:H=0.7458,top10E=0.30,eRank=162.6,q75/q25=58.64 attn_vo:H=0.8060,top10E=0.14,eRank=289.5,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.1,q75/q25=2.94 mlp_w2:H=0.9657,top10E=0.05,eRank=612.2,q75/q25=3.01 vo_prod:H=0.6419,top10E=0.29,eRank=107.6,q75/q25=inf train_time:844641ms step_avg:95.98ms +[2025-08-22 13:47:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.5403 svd_entropy: attn_qk:H=0.7458,top10E=0.30,eRank=162.6,q75/q25=58.64 attn_vo:H=0.8060,top10E=0.14,eRank=289.5,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.1,q75/q25=2.94 mlp_w2:H=0.9657,top10E=0.05,eRank=612.2,q75/q25=3.01 vo_prod:H=0.6419,top10E=0.29,eRank=107.6,q75/q25=inf train_time:844641ms step_avg:95.98ms +[2025-08-22 13:47:59] [Rank 0] step:8801/10000 train_time:844662ms step_avg:95.97ms +[2025-08-22 13:47:59] [Rank 0] step:8801/10000 train_time:844662ms step_avg:95.97ms +[2025-08-22 13:48:01] [Rank 0] step:8821/10000 train_time:846664ms step_avg:95.98ms +[2025-08-22 13:48:01] [Rank 0] step:8821/10000 train_time:846664ms step_avg:95.98ms +[2025-08-22 13:48:03] [Rank 0] step:8841/10000 train_time:848703ms step_avg:96.00ms +[2025-08-22 13:48:03] [Rank 0] step:8841/10000 train_time:848703ms step_avg:96.00ms +[2025-08-22 13:48:05] [Rank 0] step:8861/10000 train_time:850716ms step_avg:96.01ms +[2025-08-22 13:48:05] [Rank 0] step:8861/10000 train_time:850716ms step_avg:96.01ms +[2025-08-22 13:48:07] [Rank 0] step:8881/10000 train_time:852731ms step_avg:96.02ms +[2025-08-22 13:48:07] [Rank 0] step:8881/10000 train_time:852731ms step_avg:96.02ms +[2025-08-22 13:48:09] [Rank 0] step:8901/10000 train_time:854752ms step_avg:96.03ms +[2025-08-22 13:48:09] [Rank 0] step:8901/10000 train_time:854752ms step_avg:96.03ms +[2025-08-22 13:48:11] [Rank 0] step:8921/10000 train_time:856788ms step_avg:96.04ms +[2025-08-22 13:48:11] [Rank 0] step:8921/10000 train_time:856788ms step_avg:96.04ms +[2025-08-22 13:48:13] [Rank 0] step:8941/10000 train_time:858817ms step_avg:96.05ms +[2025-08-22 13:48:13] [Rank 0] step:8941/10000 train_time:858817ms step_avg:96.05ms +[2025-08-22 13:48:15] [Rank 0] step:8961/10000 train_time:860840ms step_avg:96.07ms +[2025-08-22 13:48:15] [Rank 0] step:8961/10000 train_time:860840ms step_avg:96.07ms +[2025-08-22 13:48:17] [Rank 0] step:8981/10000 train_time:862861ms step_avg:96.08ms +[2025-08-22 13:48:17] [Rank 0] step:8981/10000 train_time:862861ms step_avg:96.08ms +[2025-08-22 13:48:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:48:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:48:33] [Rank 0] PRINT: step:9000/10000 val_loss:3.5289 svd_entropy: attn_qk:H=0.7460,top10E=0.30,eRank=162.7,q75/q25=58.41 attn_vo:H=0.8062,top10E=0.14,eRank=289.7,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.2,q75/q25=2.94 mlp_w2:H=0.9657,top10E=0.05,eRank=612.3,q75/q25=3.01 vo_prod:H=0.6424,top10E=0.29,eRank=107.9,q75/q25=inf train_time:864885ms step_avg:96.10ms +[2025-08-22 13:48:33] [Rank 0] PRINT: step:9000/10000 val_loss:3.5289 svd_entropy: attn_qk:H=0.7460,top10E=0.30,eRank=162.7,q75/q25=58.41 attn_vo:H=0.8062,top10E=0.14,eRank=289.7,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.2,q75/q25=2.94 mlp_w2:H=0.9657,top10E=0.05,eRank=612.3,q75/q25=3.01 vo_prod:H=0.6424,top10E=0.29,eRank=107.9,q75/q25=inf train_time:864885ms step_avg:96.10ms +[2025-08-22 13:48:33] [Rank 0] step:9001/10000 train_time:864904ms step_avg:96.09ms +[2025-08-22 13:48:33] [Rank 0] step:9001/10000 train_time:864904ms step_avg:96.09ms +[2025-08-22 13:48:35] [Rank 0] step:9021/10000 train_time:866931ms step_avg:96.10ms +[2025-08-22 13:48:35] [Rank 0] step:9021/10000 train_time:866931ms step_avg:96.10ms +[2025-08-22 13:48:37] [Rank 0] step:9041/10000 train_time:868950ms step_avg:96.11ms +[2025-08-22 13:48:37] [Rank 0] step:9041/10000 train_time:868950ms step_avg:96.11ms +[2025-08-22 13:48:39] [Rank 0] step:9061/10000 train_time:870973ms step_avg:96.12ms +[2025-08-22 13:48:39] [Rank 0] step:9061/10000 train_time:870973ms step_avg:96.12ms +[2025-08-22 13:48:41] [Rank 0] step:9081/10000 train_time:872999ms step_avg:96.13ms +[2025-08-22 13:48:41] [Rank 0] step:9081/10000 train_time:872999ms step_avg:96.13ms +[2025-08-22 13:48:43] [Rank 0] step:9101/10000 train_time:875031ms step_avg:96.15ms +[2025-08-22 13:48:43] [Rank 0] step:9101/10000 train_time:875031ms step_avg:96.15ms +[2025-08-22 13:48:45] [Rank 0] step:9121/10000 train_time:877053ms step_avg:96.16ms +[2025-08-22 13:48:45] [Rank 0] step:9121/10000 train_time:877053ms step_avg:96.16ms +[2025-08-22 13:48:47] [Rank 0] step:9141/10000 train_time:879064ms step_avg:96.17ms +[2025-08-22 13:48:47] [Rank 0] step:9141/10000 train_time:879064ms step_avg:96.17ms +[2025-08-22 13:48:49] [Rank 0] step:9161/10000 train_time:881077ms step_avg:96.18ms +[2025-08-22 13:48:49] [Rank 0] step:9161/10000 train_time:881077ms step_avg:96.18ms +[2025-08-22 13:48:51] [Rank 0] step:9181/10000 train_time:883133ms step_avg:96.19ms +[2025-08-22 13:48:51] [Rank 0] step:9181/10000 train_time:883133ms step_avg:96.19ms +[2025-08-22 13:48:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:48:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:49:07] [Rank 0] PRINT: step:9200/10000 val_loss:3.5201 svd_entropy: attn_qk:H=0.7462,top10E=0.30,eRank=162.9,q75/q25=58.29 attn_vo:H=0.8064,top10E=0.14,eRank=290.0,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.3,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.4,q75/q25=3.01 vo_prod:H=0.6429,top10E=0.29,eRank=108.2,q75/q25=inf train_time:885154ms step_avg:96.21ms +[2025-08-22 13:49:07] [Rank 0] PRINT: step:9200/10000 val_loss:3.5201 svd_entropy: attn_qk:H=0.7462,top10E=0.30,eRank=162.9,q75/q25=58.29 attn_vo:H=0.8064,top10E=0.14,eRank=290.0,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.3,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.4,q75/q25=3.01 vo_prod:H=0.6429,top10E=0.29,eRank=108.2,q75/q25=inf train_time:885154ms step_avg:96.21ms +[2025-08-22 13:49:07] [Rank 0] step:9201/10000 train_time:885176ms step_avg:96.20ms +[2025-08-22 13:49:07] [Rank 0] step:9201/10000 train_time:885176ms step_avg:96.20ms +[2025-08-22 13:49:09] [Rank 0] step:9221/10000 train_time:887209ms step_avg:96.22ms +[2025-08-22 13:49:09] [Rank 0] step:9221/10000 train_time:887209ms step_avg:96.22ms +[2025-08-22 13:49:11] [Rank 0] step:9241/10000 train_time:889234ms step_avg:96.23ms +[2025-08-22 13:49:11] [Rank 0] step:9241/10000 train_time:889234ms step_avg:96.23ms +[2025-08-22 13:49:13] [Rank 0] step:9261/10000 train_time:891259ms step_avg:96.24ms +[2025-08-22 13:49:13] [Rank 0] step:9261/10000 train_time:891259ms step_avg:96.24ms +[2025-08-22 13:49:15] [Rank 0] step:9281/10000 train_time:893269ms step_avg:96.25ms +[2025-08-22 13:49:15] [Rank 0] step:9281/10000 train_time:893269ms step_avg:96.25ms +[2025-08-22 13:49:17] [Rank 0] step:9301/10000 train_time:895284ms step_avg:96.26ms +[2025-08-22 13:49:17] [Rank 0] step:9301/10000 train_time:895284ms step_avg:96.26ms +[2025-08-22 13:49:19] [Rank 0] step:9321/10000 train_time:897307ms step_avg:96.27ms +[2025-08-22 13:49:19] [Rank 0] step:9321/10000 train_time:897307ms step_avg:96.27ms +[2025-08-22 13:49:21] [Rank 0] step:9341/10000 train_time:899329ms step_avg:96.28ms +[2025-08-22 13:49:21] [Rank 0] step:9341/10000 train_time:899329ms step_avg:96.28ms +[2025-08-22 13:49:23] [Rank 0] step:9361/10000 train_time:901357ms step_avg:96.29ms +[2025-08-22 13:49:23] [Rank 0] step:9361/10000 train_time:901357ms step_avg:96.29ms +[2025-08-22 13:49:25] [Rank 0] step:9381/10000 train_time:903546ms step_avg:96.32ms +[2025-08-22 13:49:25] [Rank 0] step:9381/10000 train_time:903546ms step_avg:96.32ms +[2025-08-22 13:49:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:49:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:49:41] [Rank 0] PRINT: step:9400/10000 val_loss:3.5108 svd_entropy: attn_qk:H=0.7464,top10E=0.30,eRank=163.0,q75/q25=58.15 attn_vo:H=0.8065,top10E=0.14,eRank=290.2,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.3,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.5,q75/q25=3.01 vo_prod:H=0.6433,top10E=0.29,eRank=108.6,q75/q25=inf train_time:905580ms step_avg:96.34ms +[2025-08-22 13:49:41] [Rank 0] PRINT: step:9400/10000 val_loss:3.5108 svd_entropy: attn_qk:H=0.7464,top10E=0.30,eRank=163.0,q75/q25=58.15 attn_vo:H=0.8065,top10E=0.14,eRank=290.2,q75/q25=inf mlp_w1:H=0.9694,top10E=0.04,eRank=627.3,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.5,q75/q25=3.01 vo_prod:H=0.6433,top10E=0.29,eRank=108.6,q75/q25=inf train_time:905580ms step_avg:96.34ms +[2025-08-22 13:49:41] [Rank 0] step:9401/10000 train_time:905602ms step_avg:96.33ms +[2025-08-22 13:49:41] [Rank 0] step:9401/10000 train_time:905602ms step_avg:96.33ms +[2025-08-22 13:49:43] [Rank 0] step:9421/10000 train_time:907618ms step_avg:96.34ms +[2025-08-22 13:49:43] [Rank 0] step:9421/10000 train_time:907618ms step_avg:96.34ms +[2025-08-22 13:49:45] [Rank 0] step:9441/10000 train_time:909636ms step_avg:96.35ms +[2025-08-22 13:49:45] [Rank 0] step:9441/10000 train_time:909636ms step_avg:96.35ms +[2025-08-22 13:49:47] [Rank 0] step:9461/10000 train_time:911663ms step_avg:96.36ms +[2025-08-22 13:49:47] [Rank 0] step:9461/10000 train_time:911663ms step_avg:96.36ms +[2025-08-22 13:49:49] [Rank 0] step:9481/10000 train_time:913691ms step_avg:96.37ms +[2025-08-22 13:49:49] [Rank 0] step:9481/10000 train_time:913691ms step_avg:96.37ms +[2025-08-22 13:49:51] [Rank 0] step:9501/10000 train_time:915720ms step_avg:96.38ms +[2025-08-22 13:49:51] [Rank 0] step:9501/10000 train_time:915720ms step_avg:96.38ms +[2025-08-22 13:49:53] [Rank 0] step:9521/10000 train_time:917735ms step_avg:96.39ms +[2025-08-22 13:49:53] [Rank 0] step:9521/10000 train_time:917735ms step_avg:96.39ms +[2025-08-22 13:49:55] [Rank 0] step:9541/10000 train_time:919756ms step_avg:96.40ms +[2025-08-22 13:49:55] [Rank 0] step:9541/10000 train_time:919756ms step_avg:96.40ms +[2025-08-22 13:49:57] [Rank 0] step:9561/10000 train_time:921775ms step_avg:96.41ms +[2025-08-22 13:49:57] [Rank 0] step:9561/10000 train_time:921775ms step_avg:96.41ms +[2025-08-22 13:49:59] [Rank 0] step:9581/10000 train_time:923797ms step_avg:96.42ms +[2025-08-22 13:49:59] [Rank 0] step:9581/10000 train_time:923797ms step_avg:96.42ms +[2025-08-22 13:50:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:50:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:50:15] [Rank 0] PRINT: step:9600/10000 val_loss:3.5020 svd_entropy: attn_qk:H=0.7466,top10E=0.30,eRank=163.2,q75/q25=58.26 attn_vo:H=0.8067,top10E=0.14,eRank=290.4,q75/q25=inf mlp_w1:H=0.9695,top10E=0.04,eRank=627.3,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.6,q75/q25=3.01 vo_prod:H=0.6437,top10E=0.29,eRank=108.9,q75/q25=inf train_time:925838ms step_avg:96.44ms +[2025-08-22 13:50:15] [Rank 0] PRINT: step:9600/10000 val_loss:3.5020 svd_entropy: attn_qk:H=0.7466,top10E=0.30,eRank=163.2,q75/q25=58.26 attn_vo:H=0.8067,top10E=0.14,eRank=290.4,q75/q25=inf mlp_w1:H=0.9695,top10E=0.04,eRank=627.3,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.6,q75/q25=3.01 vo_prod:H=0.6437,top10E=0.29,eRank=108.9,q75/q25=inf train_time:925838ms step_avg:96.44ms +[2025-08-22 13:50:15] [Rank 0] step:9601/10000 train_time:925859ms step_avg:96.43ms +[2025-08-22 13:50:15] [Rank 0] step:9601/10000 train_time:925859ms step_avg:96.43ms +[2025-08-22 13:50:17] [Rank 0] step:9621/10000 train_time:927872ms step_avg:96.44ms +[2025-08-22 13:50:17] [Rank 0] step:9621/10000 train_time:927872ms step_avg:96.44ms +[2025-08-22 13:50:19] [Rank 0] step:9641/10000 train_time:929901ms step_avg:96.45ms +[2025-08-22 13:50:19] [Rank 0] step:9641/10000 train_time:929901ms step_avg:96.45ms +[2025-08-22 13:50:21] [Rank 0] step:9661/10000 train_time:931956ms step_avg:96.47ms +[2025-08-22 13:50:21] [Rank 0] step:9661/10000 train_time:931956ms step_avg:96.47ms +[2025-08-22 13:50:23] [Rank 0] step:9681/10000 train_time:934004ms step_avg:96.48ms +[2025-08-22 13:50:23] [Rank 0] step:9681/10000 train_time:934004ms step_avg:96.48ms +[2025-08-22 13:50:25] [Rank 0] step:9701/10000 train_time:936065ms step_avg:96.49ms +[2025-08-22 13:50:25] [Rank 0] step:9701/10000 train_time:936065ms step_avg:96.49ms +[2025-08-22 13:50:27] [Rank 0] step:9721/10000 train_time:938115ms step_avg:96.50ms +[2025-08-22 13:50:27] [Rank 0] step:9721/10000 train_time:938115ms step_avg:96.50ms +[2025-08-22 13:50:30] [Rank 0] step:9741/10000 train_time:940251ms step_avg:96.53ms +[2025-08-22 13:50:30] [Rank 0] step:9741/10000 train_time:940251ms step_avg:96.53ms +[2025-08-22 13:50:32] [Rank 0] step:9761/10000 train_time:942367ms step_avg:96.54ms +[2025-08-22 13:50:32] [Rank 0] step:9761/10000 train_time:942367ms step_avg:96.54ms +[2025-08-22 13:50:34] [Rank 0] step:9781/10000 train_time:944429ms step_avg:96.56ms +[2025-08-22 13:50:34] [Rank 0] step:9781/10000 train_time:944429ms step_avg:96.56ms +[2025-08-22 13:50:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:50:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:50:49] [Rank 0] PRINT: step:9800/10000 val_loss:3.4937 svd_entropy: attn_qk:H=0.7467,top10E=0.30,eRank=163.2,q75/q25=58.19 attn_vo:H=0.8068,top10E=0.14,eRank=290.5,q75/q25=inf mlp_w1:H=0.9695,top10E=0.04,eRank=627.4,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.6,q75/q25=3.01 vo_prod:H=0.6440,top10E=0.29,eRank=109.1,q75/q25=inf train_time:946505ms step_avg:96.58ms +[2025-08-22 13:50:49] [Rank 0] PRINT: step:9800/10000 val_loss:3.4937 svd_entropy: attn_qk:H=0.7467,top10E=0.30,eRank=163.2,q75/q25=58.19 attn_vo:H=0.8068,top10E=0.14,eRank=290.5,q75/q25=inf mlp_w1:H=0.9695,top10E=0.04,eRank=627.4,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.6,q75/q25=3.01 vo_prod:H=0.6440,top10E=0.29,eRank=109.1,q75/q25=inf train_time:946505ms step_avg:96.58ms +[2025-08-22 13:50:50] [Rank 0] step:9801/10000 train_time:946526ms step_avg:96.57ms +[2025-08-22 13:50:50] [Rank 0] step:9801/10000 train_time:946526ms step_avg:96.57ms +[2025-08-22 13:50:52] [Rank 0] step:9821/10000 train_time:948576ms step_avg:96.59ms +[2025-08-22 13:50:52] [Rank 0] step:9821/10000 train_time:948576ms step_avg:96.59ms +[2025-08-22 13:50:54] [Rank 0] step:9841/10000 train_time:950631ms step_avg:96.60ms +[2025-08-22 13:50:54] [Rank 0] step:9841/10000 train_time:950631ms step_avg:96.60ms +[2025-08-22 13:50:56] [Rank 0] step:9861/10000 train_time:952669ms step_avg:96.61ms +[2025-08-22 13:50:56] [Rank 0] step:9861/10000 train_time:952669ms step_avg:96.61ms +[2025-08-22 13:50:58] [Rank 0] step:9881/10000 train_time:954708ms step_avg:96.62ms +[2025-08-22 13:50:58] [Rank 0] step:9881/10000 train_time:954708ms step_avg:96.62ms +[2025-08-22 13:51:00] [Rank 0] step:9901/10000 train_time:956763ms step_avg:96.63ms +[2025-08-22 13:51:00] [Rank 0] step:9901/10000 train_time:956763ms step_avg:96.63ms +[2025-08-22 13:51:02] [Rank 0] step:9921/10000 train_time:958803ms step_avg:96.64ms +[2025-08-22 13:51:02] [Rank 0] step:9921/10000 train_time:958803ms step_avg:96.64ms +[2025-08-22 13:51:04] [Rank 0] step:9941/10000 train_time:960861ms step_avg:96.66ms +[2025-08-22 13:51:04] [Rank 0] step:9941/10000 train_time:960861ms step_avg:96.66ms +[2025-08-22 13:51:06] [Rank 0] step:9961/10000 train_time:962902ms step_avg:96.67ms +[2025-08-22 13:51:06] [Rank 0] step:9961/10000 train_time:962902ms step_avg:96.67ms +[2025-08-22 13:51:08] [Rank 0] step:9981/10000 train_time:964955ms step_avg:96.68ms +[2025-08-22 13:51:08] [Rank 0] step:9981/10000 train_time:964955ms step_avg:96.68ms +[2025-08-22 13:51:10] [Rank 0] step:10000/10000 train_time:966911ms step_avg:96.69ms +[2025-08-22 13:51:10] [Rank 0] step:10000/10000 train_time:966911ms step_avg:96.69ms +[2025-08-22 13:51:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:51:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:51:24] [Rank 0] PRINT: step:10000/10000 val_loss:3.4867 svd_entropy: attn_qk:H=0.7467,top10E=0.30,eRank=163.3,q75/q25=58.16 attn_vo:H=0.8069,top10E=0.14,eRank=290.6,q75/q25=inf mlp_w1:H=0.9695,top10E=0.04,eRank=627.4,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.6,q75/q25=3.01 vo_prod:H=0.6443,top10E=0.29,eRank=109.3,q75/q25=inf train_time:967024ms step_avg:96.70ms +[2025-08-22 13:51:24] [Rank 0] PRINT: step:10000/10000 val_loss:3.4867 svd_entropy: attn_qk:H=0.7467,top10E=0.30,eRank=163.3,q75/q25=58.16 attn_vo:H=0.8069,top10E=0.14,eRank=290.6,q75/q25=inf mlp_w1:H=0.9695,top10E=0.04,eRank=627.4,q75/q25=2.94 mlp_w2:H=0.9658,top10E=0.05,eRank=612.6,q75/q25=3.01 vo_prod:H=0.6443,top10E=0.29,eRank=109.3,q75/q25=inf train_time:967024ms step_avg:96.70ms +[2025-08-22 13:51:24] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 13:51:24 2025 --- +[2025-08-22 13:51:24] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 13:51:24 2025 --- +[2025-08-22 13:51:24] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16916 MiB +[2025-08-22 13:51:24] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16916 MiB diff --git a/logs_svd_gated/mode_10_param_gated_seed_42/config.json b/logs_svd_gated/mode_10_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2fdbf687dce0da9d9f52b1a7e58435cae1dbe0fe --- /dev/null +++ b/logs_svd_gated/mode_10_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 10, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "e4e1a1fd-8b73-4e5a-b595-6aa9f71b5732", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_10_param_gated_seed_42/training_log_e4e1a1fd-8b73-4e5a-b595-6aa9f71b5732.txt b/logs_svd_gated/mode_10_param_gated_seed_42/training_log_e4e1a1fd-8b73-4e5a-b595-6aa9f71b5732.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f728791db8bd707ca8ec41eeb40d9cebec08ad7 --- /dev/null +++ b/logs_svd_gated/mode_10_param_gated_seed_42/training_log_e4e1a1fd-8b73-4e5a-b595-6aa9f71b5732.txt @@ -0,0 +1,2926 @@ +[2025-08-22 18:36:11] [Rank 0] PRINT: --- Script Start: Fri Aug 22 18:36:11 2025 --- +[2025-08-22 18:36:11] [Rank 0] PRINT: --- Script Start: Fri Aug 22 18:36:11 2025 --- +[2025-08-22 18:36:11] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=10, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 18:36:11] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=10, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 18:36:11] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 18:36:11] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 18:36:11] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 18:36:11] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 18:36:11] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_10_param_gated_seed_42 +[2025-08-22 18:36:11] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_10_param_gated_seed_42 +[2025-08-22 18:36:11] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 18:36:11] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 18:36:11] [Rank 0] PRINT: Constructing model... +[2025-08-22 18:36:11] [Rank 0] PRINT: Constructing model... +[2025-08-22 18:36:13] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 18:36:13] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 18:36:13] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 18:36:13] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 18:36:13] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 18:36:13] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 18:36:13] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 10 +[2025-08-22 18:36:13] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 10 +[2025-08-22 18:36:13] [Rank 0] PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: 0.05). +[2025-08-22 18:36:13] [Rank 0] PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: 0.05). +[2025-08-22 18:36:13] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 18:36:13] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 18:36:13] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 18:36:13] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 18:36:13] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 18:36:13] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 18:36:13] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 18:36:13] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 18:36:13] [Rank 0] PRINT: Starting warmup... +[2025-08-22 18:36:13] [Rank 0] PRINT: Starting warmup... +[2025-08-22 18:36:57] [Rank 0] PRINT: Warmup complete. +[2025-08-22 18:36:57] [Rank 0] PRINT: Warmup complete. +[2025-08-22 18:36:57] [Rank 0] PRINT: Starting training... +[2025-08-22 18:36:57] [Rank 0] PRINT: Starting training... +[2025-08-22 18:36:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:36:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:37:15] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 18:37:15] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 18:37:17] [Rank 0] step:21/10000 train_time:1855ms step_avg:88.34ms +[2025-08-22 18:37:17] [Rank 0] step:21/10000 train_time:1855ms step_avg:88.34ms +[2025-08-22 18:37:18] [Rank 0] step:41/10000 train_time:3649ms step_avg:89.00ms +[2025-08-22 18:37:18] [Rank 0] step:41/10000 train_time:3649ms step_avg:89.00ms +[2025-08-22 18:37:20] [Rank 0] step:61/10000 train_time:5444ms step_avg:89.25ms +[2025-08-22 18:37:20] [Rank 0] step:61/10000 train_time:5444ms step_avg:89.25ms +[2025-08-22 18:37:22] [Rank 0] step:81/10000 train_time:7241ms step_avg:89.40ms +[2025-08-22 18:37:22] [Rank 0] step:81/10000 train_time:7241ms step_avg:89.40ms +[2025-08-22 18:37:24] [Rank 0] step:101/10000 train_time:9040ms step_avg:89.51ms +[2025-08-22 18:37:24] [Rank 0] step:101/10000 train_time:9040ms step_avg:89.51ms +[2025-08-22 18:37:26] [Rank 0] step:121/10000 train_time:10841ms step_avg:89.59ms +[2025-08-22 18:37:26] [Rank 0] step:121/10000 train_time:10841ms step_avg:89.59ms +[2025-08-22 18:37:27] [Rank 0] step:141/10000 train_time:12641ms step_avg:89.65ms +[2025-08-22 18:37:27] [Rank 0] step:141/10000 train_time:12641ms step_avg:89.65ms +[2025-08-22 18:37:29] [Rank 0] step:161/10000 train_time:14442ms step_avg:89.70ms +[2025-08-22 18:37:29] [Rank 0] step:161/10000 train_time:14442ms step_avg:89.70ms +[2025-08-22 18:37:31] [Rank 0] step:181/10000 train_time:16244ms step_avg:89.75ms +[2025-08-22 18:37:31] [Rank 0] step:181/10000 train_time:16244ms step_avg:89.75ms +[2025-08-22 18:37:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:37:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:37:46] [Rank 0] PRINT: step:200/10000 val_loss:5.4467 svd_entropy: attn_qk:H=0.6350,top10E=0.50,eRank=95.0,q75/q25=42.48 attn_vo:H=0.6781,top10E=0.34,eRank=183.6,q75/q25=inf mlp_w1:H=0.9674,top10E=0.04,eRank=618.7,q75/q25=3.26 mlp_w2:H=0.9638,top10E=0.04,eRank=604.3,q75/q25=3.54 vo_prod:H=0.4319,top10E=0.65,eRank=22.8,q75/q25=inf train_time:18053ms step_avg:90.27ms +[2025-08-22 18:37:46] [Rank 0] PRINT: step:200/10000 val_loss:5.4467 svd_entropy: attn_qk:H=0.6350,top10E=0.50,eRank=95.0,q75/q25=42.48 attn_vo:H=0.6781,top10E=0.34,eRank=183.6,q75/q25=inf mlp_w1:H=0.9674,top10E=0.04,eRank=618.7,q75/q25=3.26 mlp_w2:H=0.9638,top10E=0.04,eRank=604.3,q75/q25=3.54 vo_prod:H=0.4319,top10E=0.65,eRank=22.8,q75/q25=inf train_time:18053ms step_avg:90.27ms +[2025-08-22 18:37:47] [Rank 0] step:201/10000 train_time:18074ms step_avg:89.92ms +[2025-08-22 18:37:47] [Rank 0] step:201/10000 train_time:18074ms step_avg:89.92ms +[2025-08-22 18:37:48] [Rank 0] step:221/10000 train_time:19876ms step_avg:89.94ms +[2025-08-22 18:37:48] [Rank 0] step:221/10000 train_time:19876ms step_avg:89.94ms +[2025-08-22 18:37:50] [Rank 0] step:241/10000 train_time:21675ms step_avg:89.94ms +[2025-08-22 18:37:50] [Rank 0] step:241/10000 train_time:21675ms step_avg:89.94ms +[2025-08-22 18:37:52] [Rank 0] step:261/10000 train_time:23475ms step_avg:89.94ms +[2025-08-22 18:37:52] [Rank 0] step:261/10000 train_time:23475ms step_avg:89.94ms +[2025-08-22 18:37:54] [Rank 0] step:281/10000 train_time:25276ms step_avg:89.95ms +[2025-08-22 18:37:54] [Rank 0] step:281/10000 train_time:25276ms step_avg:89.95ms +[2025-08-22 18:37:56] [Rank 0] step:301/10000 train_time:27077ms step_avg:89.96ms +[2025-08-22 18:37:56] [Rank 0] step:301/10000 train_time:27077ms step_avg:89.96ms +[2025-08-22 18:37:57] [Rank 0] step:321/10000 train_time:28879ms step_avg:89.96ms +[2025-08-22 18:37:57] [Rank 0] step:321/10000 train_time:28879ms step_avg:89.96ms +[2025-08-22 18:37:59] [Rank 0] step:341/10000 train_time:30682ms step_avg:89.98ms +[2025-08-22 18:37:59] [Rank 0] step:341/10000 train_time:30682ms step_avg:89.98ms +[2025-08-22 18:38:01] [Rank 0] step:361/10000 train_time:32485ms step_avg:89.98ms +[2025-08-22 18:38:01] [Rank 0] step:361/10000 train_time:32485ms step_avg:89.98ms +[2025-08-22 18:38:03] [Rank 0] step:381/10000 train_time:34289ms step_avg:90.00ms +[2025-08-22 18:38:03] [Rank 0] step:381/10000 train_time:34289ms step_avg:90.00ms +[2025-08-22 18:38:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:38:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:38:18] [Rank 0] PRINT: step:400/10000 val_loss:5.0572 svd_entropy: attn_qk:H=0.6776,top10E=0.41,eRank=114.2,q75/q25=54.66 attn_vo:H=0.7494,top10E=0.24,eRank=235.7,q75/q25=inf mlp_w1:H=0.9668,top10E=0.04,eRank=616.2,q75/q25=3.28 mlp_w2:H=0.9654,top10E=0.04,eRank=610.5,q75/q25=3.36 vo_prod:H=0.5301,top10E=0.50,eRank=45.7,q75/q25=inf train_time:36098ms step_avg:90.25ms +[2025-08-22 18:38:18] [Rank 0] PRINT: step:400/10000 val_loss:5.0572 svd_entropy: attn_qk:H=0.6776,top10E=0.41,eRank=114.2,q75/q25=54.66 attn_vo:H=0.7494,top10E=0.24,eRank=235.7,q75/q25=inf mlp_w1:H=0.9668,top10E=0.04,eRank=616.2,q75/q25=3.28 mlp_w2:H=0.9654,top10E=0.04,eRank=610.5,q75/q25=3.36 vo_prod:H=0.5301,top10E=0.50,eRank=45.7,q75/q25=inf train_time:36098ms step_avg:90.25ms +[2025-08-22 18:38:18] [Rank 0] step:401/10000 train_time:36118ms step_avg:90.07ms +[2025-08-22 18:38:18] [Rank 0] step:401/10000 train_time:36118ms step_avg:90.07ms +[2025-08-22 18:38:20] [Rank 0] step:421/10000 train_time:37919ms step_avg:90.07ms +[2025-08-22 18:38:20] [Rank 0] step:421/10000 train_time:37919ms step_avg:90.07ms +[2025-08-22 18:38:22] [Rank 0] step:441/10000 train_time:39715ms step_avg:90.06ms +[2025-08-22 18:38:22] [Rank 0] step:441/10000 train_time:39715ms step_avg:90.06ms +[2025-08-22 18:38:24] [Rank 0] step:461/10000 train_time:41513ms step_avg:90.05ms +[2025-08-22 18:38:24] [Rank 0] step:461/10000 train_time:41513ms step_avg:90.05ms +[2025-08-22 18:38:25] [Rank 0] step:481/10000 train_time:43312ms step_avg:90.05ms +[2025-08-22 18:38:25] [Rank 0] step:481/10000 train_time:43312ms step_avg:90.05ms +[2025-08-22 18:38:27] [Rank 0] step:501/10000 train_time:45111ms step_avg:90.04ms +[2025-08-22 18:38:27] [Rank 0] step:501/10000 train_time:45111ms step_avg:90.04ms +[2025-08-22 18:38:29] [Rank 0] step:521/10000 train_time:46911ms step_avg:90.04ms +[2025-08-22 18:38:29] [Rank 0] step:521/10000 train_time:46911ms step_avg:90.04ms +[2025-08-22 18:38:31] [Rank 0] step:541/10000 train_time:48714ms step_avg:90.04ms +[2025-08-22 18:38:31] [Rank 0] step:541/10000 train_time:48714ms step_avg:90.04ms +[2025-08-22 18:38:33] [Rank 0] step:561/10000 train_time:50518ms step_avg:90.05ms +[2025-08-22 18:38:33] [Rank 0] step:561/10000 train_time:50518ms step_avg:90.05ms +[2025-08-22 18:38:34] [Rank 0] step:581/10000 train_time:52324ms step_avg:90.06ms +[2025-08-22 18:38:34] [Rank 0] step:581/10000 train_time:52324ms step_avg:90.06ms +[2025-08-22 18:38:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:38:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:38:50] [Rank 0] PRINT: step:600/10000 val_loss:4.8372 svd_entropy: attn_qk:H=0.6851,top10E=0.39,eRank=119.0,q75/q25=60.11 attn_vo:H=0.7660,top10E=0.21,eRank=248.3,q75/q25=inf mlp_w1:H=0.9662,top10E=0.04,eRank=613.9,q75/q25=3.28 mlp_w2:H=0.9645,top10E=0.04,eRank=607.1,q75/q25=3.36 vo_prod:H=0.5612,top10E=0.44,eRank=57.4,q75/q25=inf train_time:54135ms step_avg:90.23ms +[2025-08-22 18:38:50] [Rank 0] PRINT: step:600/10000 val_loss:4.8372 svd_entropy: attn_qk:H=0.6851,top10E=0.39,eRank=119.0,q75/q25=60.11 attn_vo:H=0.7660,top10E=0.21,eRank=248.3,q75/q25=inf mlp_w1:H=0.9662,top10E=0.04,eRank=613.9,q75/q25=3.28 mlp_w2:H=0.9645,top10E=0.04,eRank=607.1,q75/q25=3.36 vo_prod:H=0.5612,top10E=0.44,eRank=57.4,q75/q25=inf train_time:54135ms step_avg:90.23ms +[2025-08-22 18:38:50] [Rank 0] step:601/10000 train_time:54156ms step_avg:90.11ms +[2025-08-22 18:38:50] [Rank 0] step:601/10000 train_time:54156ms step_avg:90.11ms +[2025-08-22 18:38:52] [Rank 0] step:621/10000 train_time:55969ms step_avg:90.13ms +[2025-08-22 18:38:52] [Rank 0] step:621/10000 train_time:55969ms step_avg:90.13ms +[2025-08-22 18:38:53] [Rank 0] step:641/10000 train_time:57771ms step_avg:90.13ms +[2025-08-22 18:38:53] [Rank 0] step:641/10000 train_time:57771ms step_avg:90.13ms +[2025-08-22 18:38:55] [Rank 0] step:661/10000 train_time:59576ms step_avg:90.13ms +[2025-08-22 18:38:55] [Rank 0] step:661/10000 train_time:59576ms step_avg:90.13ms +[2025-08-22 18:38:57] [Rank 0] step:681/10000 train_time:61382ms step_avg:90.14ms +[2025-08-22 18:38:57] [Rank 0] step:681/10000 train_time:61382ms step_avg:90.14ms +[2025-08-22 18:38:59] [Rank 0] step:701/10000 train_time:63189ms step_avg:90.14ms +[2025-08-22 18:38:59] [Rank 0] step:701/10000 train_time:63189ms step_avg:90.14ms +[2025-08-22 18:39:01] [Rank 0] step:721/10000 train_time:64998ms step_avg:90.15ms +[2025-08-22 18:39:01] [Rank 0] step:721/10000 train_time:64998ms step_avg:90.15ms +[2025-08-22 18:39:02] [Rank 0] step:741/10000 train_time:66806ms step_avg:90.16ms +[2025-08-22 18:39:02] [Rank 0] step:741/10000 train_time:66806ms step_avg:90.16ms +[2025-08-22 18:39:04] [Rank 0] step:761/10000 train_time:68629ms step_avg:90.18ms +[2025-08-22 18:39:04] [Rank 0] step:761/10000 train_time:68629ms step_avg:90.18ms +[2025-08-22 18:39:06] [Rank 0] step:781/10000 train_time:70452ms step_avg:90.21ms +[2025-08-22 18:39:06] [Rank 0] step:781/10000 train_time:70452ms step_avg:90.21ms +[2025-08-22 18:39:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:39:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:39:22] [Rank 0] PRINT: step:800/10000 val_loss:4.6628 svd_entropy: attn_qk:H=0.6905,top10E=0.38,eRank=122.8,q75/q25=62.74 attn_vo:H=0.7742,top10E=0.19,eRank=254.3,q75/q25=inf mlp_w1:H=0.9662,top10E=0.04,eRank=613.8,q75/q25=3.26 mlp_w2:H=0.9640,top10E=0.05,eRank=605.1,q75/q25=3.35 vo_prod:H=0.5821,top10E=0.40,eRank=66.3,q75/q25=inf train_time:72279ms step_avg:90.35ms +[2025-08-22 18:39:22] [Rank 0] PRINT: step:800/10000 val_loss:4.6628 svd_entropy: attn_qk:H=0.6905,top10E=0.38,eRank=122.8,q75/q25=62.74 attn_vo:H=0.7742,top10E=0.19,eRank=254.3,q75/q25=inf mlp_w1:H=0.9662,top10E=0.04,eRank=613.8,q75/q25=3.26 mlp_w2:H=0.9640,top10E=0.05,eRank=605.1,q75/q25=3.35 vo_prod:H=0.5821,top10E=0.40,eRank=66.3,q75/q25=inf train_time:72279ms step_avg:90.35ms +[2025-08-22 18:39:22] [Rank 0] step:801/10000 train_time:72300ms step_avg:90.26ms +[2025-08-22 18:39:22] [Rank 0] step:801/10000 train_time:72300ms step_avg:90.26ms +[2025-08-22 18:39:23] [Rank 0] step:821/10000 train_time:74119ms step_avg:90.28ms +[2025-08-22 18:39:23] [Rank 0] step:821/10000 train_time:74119ms step_avg:90.28ms +[2025-08-22 18:39:25] [Rank 0] step:841/10000 train_time:75929ms step_avg:90.28ms +[2025-08-22 18:39:25] [Rank 0] step:841/10000 train_time:75929ms step_avg:90.28ms +[2025-08-22 18:39:27] [Rank 0] step:861/10000 train_time:77742ms step_avg:90.29ms +[2025-08-22 18:39:27] [Rank 0] step:861/10000 train_time:77742ms step_avg:90.29ms +[2025-08-22 18:39:29] [Rank 0] step:881/10000 train_time:79556ms step_avg:90.30ms +[2025-08-22 18:39:29] [Rank 0] step:881/10000 train_time:79556ms step_avg:90.30ms +[2025-08-22 18:39:31] [Rank 0] step:901/10000 train_time:81372ms step_avg:90.31ms +[2025-08-22 18:39:31] [Rank 0] step:901/10000 train_time:81372ms step_avg:90.31ms +[2025-08-22 18:39:33] [Rank 0] step:921/10000 train_time:83189ms step_avg:90.32ms +[2025-08-22 18:39:33] [Rank 0] step:921/10000 train_time:83189ms step_avg:90.32ms +[2025-08-22 18:39:34] [Rank 0] step:941/10000 train_time:85005ms step_avg:90.33ms +[2025-08-22 18:39:34] [Rank 0] step:941/10000 train_time:85005ms step_avg:90.33ms +[2025-08-22 18:39:36] [Rank 0] step:961/10000 train_time:86822ms step_avg:90.35ms +[2025-08-22 18:39:36] [Rank 0] step:961/10000 train_time:86822ms step_avg:90.35ms +[2025-08-22 18:39:38] [Rank 0] step:981/10000 train_time:88641ms step_avg:90.36ms +[2025-08-22 18:39:38] [Rank 0] step:981/10000 train_time:88641ms step_avg:90.36ms +[2025-08-22 18:39:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:39:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:39:53] [Rank 0] PRINT: step:1000/10000 val_loss:4.5309 svd_entropy: attn_qk:H=0.6966,top10E=0.37,eRank=126.8,q75/q25=63.34 attn_vo:H=0.7813,top10E=0.18,eRank=260.4,q75/q25=inf mlp_w1:H=0.9663,top10E=0.04,eRank=614.5,q75/q25=3.24 mlp_w2:H=0.9638,top10E=0.05,eRank=604.2,q75/q25=3.33 vo_prod:H=0.6001,top10E=0.36,eRank=76.2,q75/q25=inf train_time:90464ms step_avg:90.46ms +[2025-08-22 18:39:53] [Rank 0] PRINT: step:1000/10000 val_loss:4.5309 svd_entropy: attn_qk:H=0.6966,top10E=0.37,eRank=126.8,q75/q25=63.34 attn_vo:H=0.7813,top10E=0.18,eRank=260.4,q75/q25=inf mlp_w1:H=0.9663,top10E=0.04,eRank=614.5,q75/q25=3.24 mlp_w2:H=0.9638,top10E=0.05,eRank=604.2,q75/q25=3.33 vo_prod:H=0.6001,top10E=0.36,eRank=76.2,q75/q25=inf train_time:90464ms step_avg:90.46ms +[2025-08-22 18:39:54] [Rank 0] step:1001/10000 train_time:90484ms step_avg:90.39ms +[2025-08-22 18:39:54] [Rank 0] step:1001/10000 train_time:90484ms step_avg:90.39ms +[2025-08-22 18:39:55] [Rank 0] step:1021/10000 train_time:92300ms step_avg:90.40ms +[2025-08-22 18:39:55] [Rank 0] step:1021/10000 train_time:92300ms step_avg:90.40ms +[2025-08-22 18:39:57] [Rank 0] step:1041/10000 train_time:94113ms step_avg:90.41ms +[2025-08-22 18:39:57] [Rank 0] step:1041/10000 train_time:94113ms step_avg:90.41ms +[2025-08-22 18:39:59] [Rank 0] step:1061/10000 train_time:95929ms step_avg:90.41ms +[2025-08-22 18:39:59] [Rank 0] step:1061/10000 train_time:95929ms step_avg:90.41ms +[2025-08-22 18:40:01] [Rank 0] step:1081/10000 train_time:97744ms step_avg:90.42ms +[2025-08-22 18:40:01] [Rank 0] step:1081/10000 train_time:97744ms step_avg:90.42ms +[2025-08-22 18:40:03] [Rank 0] step:1101/10000 train_time:99560ms step_avg:90.43ms +[2025-08-22 18:40:03] [Rank 0] step:1101/10000 train_time:99560ms step_avg:90.43ms +[2025-08-22 18:40:04] [Rank 0] step:1121/10000 train_time:101377ms step_avg:90.43ms +[2025-08-22 18:40:04] [Rank 0] step:1121/10000 train_time:101377ms step_avg:90.43ms +[2025-08-22 18:40:06] [Rank 0] step:1141/10000 train_time:103193ms step_avg:90.44ms +[2025-08-22 18:40:06] [Rank 0] step:1141/10000 train_time:103193ms step_avg:90.44ms +[2025-08-22 18:40:08] [Rank 0] step:1161/10000 train_time:105010ms step_avg:90.45ms +[2025-08-22 18:40:08] [Rank 0] step:1161/10000 train_time:105010ms step_avg:90.45ms +[2025-08-22 18:40:10] [Rank 0] step:1181/10000 train_time:106828ms step_avg:90.46ms +[2025-08-22 18:40:10] [Rank 0] step:1181/10000 train_time:106828ms step_avg:90.46ms +[2025-08-22 18:40:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:40:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:40:25] [Rank 0] PRINT: step:1200/10000 val_loss:4.4013 svd_entropy: attn_qk:H=0.7026,top10E=0.36,eRank=130.4,q75/q25=65.03 attn_vo:H=0.7865,top10E=0.17,eRank=265.6,q75/q25=inf mlp_w1:H=0.9665,top10E=0.04,eRank=615.2,q75/q25=3.22 mlp_w2:H=0.9636,top10E=0.05,eRank=603.4,q75/q25=3.31 vo_prod:H=0.6122,top10E=0.34,eRank=83.5,q75/q25=inf train_time:108651ms step_avg:90.54ms +[2025-08-22 18:40:25] [Rank 0] PRINT: step:1200/10000 val_loss:4.4013 svd_entropy: attn_qk:H=0.7026,top10E=0.36,eRank=130.4,q75/q25=65.03 attn_vo:H=0.7865,top10E=0.17,eRank=265.6,q75/q25=inf mlp_w1:H=0.9665,top10E=0.04,eRank=615.2,q75/q25=3.22 mlp_w2:H=0.9636,top10E=0.05,eRank=603.4,q75/q25=3.31 vo_prod:H=0.6122,top10E=0.34,eRank=83.5,q75/q25=inf train_time:108651ms step_avg:90.54ms +[2025-08-22 18:40:26] [Rank 0] step:1201/10000 train_time:108673ms step_avg:90.49ms +[2025-08-22 18:40:26] [Rank 0] step:1201/10000 train_time:108673ms step_avg:90.49ms +[2025-08-22 18:40:27] [Rank 0] step:1221/10000 train_time:110471ms step_avg:90.48ms +[2025-08-22 18:40:27] [Rank 0] step:1221/10000 train_time:110471ms step_avg:90.48ms +[2025-08-22 18:40:29] [Rank 0] step:1241/10000 train_time:112282ms step_avg:90.48ms +[2025-08-22 18:40:29] [Rank 0] step:1241/10000 train_time:112282ms step_avg:90.48ms +[2025-08-22 18:40:31] [Rank 0] step:1261/10000 train_time:114097ms step_avg:90.48ms +[2025-08-22 18:40:31] [Rank 0] step:1261/10000 train_time:114097ms step_avg:90.48ms +[2025-08-22 18:40:33] [Rank 0] step:1281/10000 train_time:115912ms step_avg:90.49ms +[2025-08-22 18:40:33] [Rank 0] step:1281/10000 train_time:115912ms step_avg:90.49ms +[2025-08-22 18:40:35] [Rank 0] step:1301/10000 train_time:117727ms step_avg:90.49ms +[2025-08-22 18:40:35] [Rank 0] step:1301/10000 train_time:117727ms step_avg:90.49ms +[2025-08-22 18:40:36] [Rank 0] step:1321/10000 train_time:119541ms step_avg:90.49ms +[2025-08-22 18:40:36] [Rank 0] step:1321/10000 train_time:119541ms step_avg:90.49ms +[2025-08-22 18:40:38] [Rank 0] step:1341/10000 train_time:121359ms step_avg:90.50ms +[2025-08-22 18:40:38] [Rank 0] step:1341/10000 train_time:121359ms step_avg:90.50ms +[2025-08-22 18:40:40] [Rank 0] step:1361/10000 train_time:123175ms step_avg:90.50ms +[2025-08-22 18:40:40] [Rank 0] step:1361/10000 train_time:123175ms step_avg:90.50ms +[2025-08-22 18:40:42] [Rank 0] step:1381/10000 train_time:124996ms step_avg:90.51ms +[2025-08-22 18:40:42] [Rank 0] step:1381/10000 train_time:124996ms step_avg:90.51ms +[2025-08-22 18:40:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:40:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:40:58] [Rank 0] PRINT: step:1400/10000 val_loss:4.3312 svd_entropy: attn_qk:H=0.7067,top10E=0.35,eRank=133.2,q75/q25=65.81 attn_vo:H=0.7899,top10E=0.16,eRank=269.2,q75/q25=inf mlp_w1:H=0.9667,top10E=0.04,eRank=616.0,q75/q25=3.20 mlp_w2:H=0.9635,top10E=0.05,eRank=602.8,q75/q25=3.29 vo_prod:H=0.6196,top10E=0.32,eRank=88.0,q75/q25=inf train_time:126822ms step_avg:90.59ms +[2025-08-22 18:40:58] [Rank 0] PRINT: step:1400/10000 val_loss:4.3312 svd_entropy: attn_qk:H=0.7067,top10E=0.35,eRank=133.2,q75/q25=65.81 attn_vo:H=0.7899,top10E=0.16,eRank=269.2,q75/q25=inf mlp_w1:H=0.9667,top10E=0.04,eRank=616.0,q75/q25=3.20 mlp_w2:H=0.9635,top10E=0.05,eRank=602.8,q75/q25=3.29 vo_prod:H=0.6196,top10E=0.32,eRank=88.0,q75/q25=inf train_time:126822ms step_avg:90.59ms +[2025-08-22 18:40:58] [Rank 0] step:1401/10000 train_time:126844ms step_avg:90.54ms +[2025-08-22 18:40:58] [Rank 0] step:1401/10000 train_time:126844ms step_avg:90.54ms +[2025-08-22 18:40:59] [Rank 0] step:1421/10000 train_time:128645ms step_avg:90.53ms +[2025-08-22 18:40:59] [Rank 0] step:1421/10000 train_time:128645ms step_avg:90.53ms +[2025-08-22 18:41:01] [Rank 0] step:1441/10000 train_time:130463ms step_avg:90.54ms +[2025-08-22 18:41:01] [Rank 0] step:1441/10000 train_time:130463ms step_avg:90.54ms +[2025-08-22 18:41:03] [Rank 0] step:1461/10000 train_time:132281ms step_avg:90.54ms +[2025-08-22 18:41:03] [Rank 0] step:1461/10000 train_time:132281ms step_avg:90.54ms +[2025-08-22 18:41:05] [Rank 0] step:1481/10000 train_time:134100ms step_avg:90.55ms +[2025-08-22 18:41:05] [Rank 0] step:1481/10000 train_time:134100ms step_avg:90.55ms +[2025-08-22 18:41:07] [Rank 0] step:1501/10000 train_time:135930ms step_avg:90.56ms +[2025-08-22 18:41:07] [Rank 0] step:1501/10000 train_time:135930ms step_avg:90.56ms +[2025-08-22 18:41:09] [Rank 0] step:1521/10000 train_time:137762ms step_avg:90.57ms +[2025-08-22 18:41:09] [Rank 0] step:1521/10000 train_time:137762ms step_avg:90.57ms +[2025-08-22 18:41:10] [Rank 0] step:1541/10000 train_time:139597ms step_avg:90.59ms +[2025-08-22 18:41:10] [Rank 0] step:1541/10000 train_time:139597ms step_avg:90.59ms +[2025-08-22 18:41:12] [Rank 0] step:1561/10000 train_time:141429ms step_avg:90.60ms +[2025-08-22 18:41:12] [Rank 0] step:1561/10000 train_time:141429ms step_avg:90.60ms +[2025-08-22 18:41:14] [Rank 0] step:1581/10000 train_time:143262ms step_avg:90.61ms +[2025-08-22 18:41:14] [Rank 0] step:1581/10000 train_time:143262ms step_avg:90.61ms +[2025-08-22 18:41:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:41:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:41:30] [Rank 0] PRINT: step:1600/10000 val_loss:4.2253 svd_entropy: attn_qk:H=0.7103,top10E=0.35,eRank=135.5,q75/q25=65.81 attn_vo:H=0.7924,top10E=0.16,eRank=272.2,q75/q25=inf mlp_w1:H=0.9669,top10E=0.04,eRank=616.6,q75/q25=3.18 mlp_w2:H=0.9633,top10E=0.05,eRank=602.3,q75/q25=3.28 vo_prod:H=0.6242,top10E=0.31,eRank=90.8,q75/q25=inf train_time:145099ms step_avg:90.69ms +[2025-08-22 18:41:30] [Rank 0] PRINT: step:1600/10000 val_loss:4.2253 svd_entropy: attn_qk:H=0.7103,top10E=0.35,eRank=135.5,q75/q25=65.81 attn_vo:H=0.7924,top10E=0.16,eRank=272.2,q75/q25=inf mlp_w1:H=0.9669,top10E=0.04,eRank=616.6,q75/q25=3.18 mlp_w2:H=0.9633,top10E=0.05,eRank=602.3,q75/q25=3.28 vo_prod:H=0.6242,top10E=0.31,eRank=90.8,q75/q25=inf train_time:145099ms step_avg:90.69ms +[2025-08-22 18:41:30] [Rank 0] step:1601/10000 train_time:145120ms step_avg:90.64ms +[2025-08-22 18:41:30] [Rank 0] step:1601/10000 train_time:145120ms step_avg:90.64ms +[2025-08-22 18:41:32] [Rank 0] step:1621/10000 train_time:146945ms step_avg:90.65ms +[2025-08-22 18:41:32] [Rank 0] step:1621/10000 train_time:146945ms step_avg:90.65ms +[2025-08-22 18:41:34] [Rank 0] step:1641/10000 train_time:148768ms step_avg:90.66ms +[2025-08-22 18:41:34] [Rank 0] step:1641/10000 train_time:148768ms step_avg:90.66ms +[2025-08-22 18:41:35] [Rank 0] step:1661/10000 train_time:150593ms step_avg:90.66ms +[2025-08-22 18:41:35] [Rank 0] step:1661/10000 train_time:150593ms step_avg:90.66ms +[2025-08-22 18:41:37] [Rank 0] step:1681/10000 train_time:152417ms step_avg:90.67ms +[2025-08-22 18:41:37] [Rank 0] step:1681/10000 train_time:152417ms step_avg:90.67ms +[2025-08-22 18:41:39] [Rank 0] step:1701/10000 train_time:154240ms step_avg:90.68ms +[2025-08-22 18:41:39] [Rank 0] step:1701/10000 train_time:154240ms step_avg:90.68ms +[2025-08-22 18:41:41] [Rank 0] step:1721/10000 train_time:156065ms step_avg:90.68ms +[2025-08-22 18:41:41] [Rank 0] step:1721/10000 train_time:156065ms step_avg:90.68ms +[2025-08-22 18:41:43] [Rank 0] step:1741/10000 train_time:157891ms step_avg:90.69ms +[2025-08-22 18:41:43] [Rank 0] step:1741/10000 train_time:157891ms step_avg:90.69ms +[2025-08-22 18:41:44] [Rank 0] step:1761/10000 train_time:159720ms step_avg:90.70ms +[2025-08-22 18:41:44] [Rank 0] step:1761/10000 train_time:159720ms step_avg:90.70ms +[2025-08-22 18:41:46] [Rank 0] step:1781/10000 train_time:161547ms step_avg:90.71ms +[2025-08-22 18:41:46] [Rank 0] step:1781/10000 train_time:161547ms step_avg:90.71ms +[2025-08-22 18:41:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:41:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:42:02] [Rank 0] PRINT: step:1800/10000 val_loss:4.1605 svd_entropy: attn_qk:H=0.7130,top10E=0.35,eRank=137.4,q75/q25=66.17 attn_vo:H=0.7939,top10E=0.16,eRank=274.2,q75/q25=inf mlp_w1:H=0.9670,top10E=0.04,eRank=617.1,q75/q25=3.16 mlp_w2:H=0.9633,top10E=0.05,eRank=602.0,q75/q25=3.27 vo_prod:H=0.6269,top10E=0.31,eRank=92.6,q75/q25=inf train_time:163382ms step_avg:90.77ms +[2025-08-22 18:42:02] [Rank 0] PRINT: step:1800/10000 val_loss:4.1605 svd_entropy: attn_qk:H=0.7130,top10E=0.35,eRank=137.4,q75/q25=66.17 attn_vo:H=0.7939,top10E=0.16,eRank=274.2,q75/q25=inf mlp_w1:H=0.9670,top10E=0.04,eRank=617.1,q75/q25=3.16 mlp_w2:H=0.9633,top10E=0.05,eRank=602.0,q75/q25=3.27 vo_prod:H=0.6269,top10E=0.31,eRank=92.6,q75/q25=inf train_time:163382ms step_avg:90.77ms +[2025-08-22 18:42:02] [Rank 0] step:1801/10000 train_time:163404ms step_avg:90.73ms +[2025-08-22 18:42:02] [Rank 0] step:1801/10000 train_time:163404ms step_avg:90.73ms +[2025-08-22 18:42:04] [Rank 0] step:1821/10000 train_time:165213ms step_avg:90.73ms +[2025-08-22 18:42:04] [Rank 0] step:1821/10000 train_time:165213ms step_avg:90.73ms +[2025-08-22 18:42:06] [Rank 0] step:1841/10000 train_time:167036ms step_avg:90.73ms +[2025-08-22 18:42:06] [Rank 0] step:1841/10000 train_time:167036ms step_avg:90.73ms +[2025-08-22 18:42:07] [Rank 0] step:1861/10000 train_time:168859ms step_avg:90.74ms +[2025-08-22 18:42:07] [Rank 0] step:1861/10000 train_time:168859ms step_avg:90.74ms +[2025-08-22 18:42:09] [Rank 0] step:1881/10000 train_time:170684ms step_avg:90.74ms +[2025-08-22 18:42:09] [Rank 0] step:1881/10000 train_time:170684ms step_avg:90.74ms +[2025-08-22 18:42:11] [Rank 0] step:1901/10000 train_time:172521ms step_avg:90.75ms +[2025-08-22 18:42:11] [Rank 0] step:1901/10000 train_time:172521ms step_avg:90.75ms +[2025-08-22 18:42:13] [Rank 0] step:1921/10000 train_time:174348ms step_avg:90.76ms +[2025-08-22 18:42:13] [Rank 0] step:1921/10000 train_time:174348ms step_avg:90.76ms +[2025-08-22 18:42:15] [Rank 0] step:1941/10000 train_time:176176ms step_avg:90.77ms +[2025-08-22 18:42:15] [Rank 0] step:1941/10000 train_time:176176ms step_avg:90.77ms +[2025-08-22 18:42:16] [Rank 0] step:1961/10000 train_time:178004ms step_avg:90.77ms +[2025-08-22 18:42:16] [Rank 0] step:1961/10000 train_time:178004ms step_avg:90.77ms +[2025-08-22 18:42:18] [Rank 0] step:1981/10000 train_time:179833ms step_avg:90.78ms +[2025-08-22 18:42:18] [Rank 0] step:1981/10000 train_time:179833ms step_avg:90.78ms +[2025-08-22 18:42:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:42:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:42:34] [Rank 0] PRINT: step:2000/10000 val_loss:4.1244 svd_entropy: attn_qk:H=0.7156,top10E=0.34,eRank=139.3,q75/q25=66.62 attn_vo:H=0.7953,top10E=0.15,eRank=276.0,q75/q25=inf mlp_w1:H=0.9672,top10E=0.04,eRank=617.7,q75/q25=3.15 mlp_w2:H=0.9632,top10E=0.05,eRank=601.9,q75/q25=3.25 vo_prod:H=0.6295,top10E=0.31,eRank=94.8,q75/q25=inf train_time:181668ms step_avg:90.83ms +[2025-08-22 18:42:34] [Rank 0] PRINT: step:2000/10000 val_loss:4.1244 svd_entropy: attn_qk:H=0.7156,top10E=0.34,eRank=139.3,q75/q25=66.62 attn_vo:H=0.7953,top10E=0.15,eRank=276.0,q75/q25=inf mlp_w1:H=0.9672,top10E=0.04,eRank=617.7,q75/q25=3.15 mlp_w2:H=0.9632,top10E=0.05,eRank=601.9,q75/q25=3.25 vo_prod:H=0.6295,top10E=0.31,eRank=94.8,q75/q25=inf train_time:181668ms step_avg:90.83ms +[2025-08-22 18:42:34] [Rank 0] step:2001/10000 train_time:181689ms step_avg:90.80ms +[2025-08-22 18:42:34] [Rank 0] step:2001/10000 train_time:181689ms step_avg:90.80ms +[2025-08-22 18:42:36] [Rank 0] step:2021/10000 train_time:183521ms step_avg:90.81ms +[2025-08-22 18:42:36] [Rank 0] step:2021/10000 train_time:183521ms step_avg:90.81ms +[2025-08-22 18:42:38] [Rank 0] step:2041/10000 train_time:185999ms step_avg:91.13ms +[2025-08-22 18:42:38] [Rank 0] step:2041/10000 train_time:185999ms step_avg:91.13ms +[2025-08-22 18:42:40] [Rank 0] step:2061/10000 train_time:187827ms step_avg:91.13ms +[2025-08-22 18:42:40] [Rank 0] step:2061/10000 train_time:187827ms step_avg:91.13ms +[2025-08-22 18:42:42] [Rank 0] step:2081/10000 train_time:189659ms step_avg:91.14ms +[2025-08-22 18:42:42] [Rank 0] step:2081/10000 train_time:189659ms step_avg:91.14ms +[2025-08-22 18:42:44] [Rank 0] step:2101/10000 train_time:191490ms step_avg:91.14ms +[2025-08-22 18:42:44] [Rank 0] step:2101/10000 train_time:191490ms step_avg:91.14ms +[2025-08-22 18:42:46] [Rank 0] step:2121/10000 train_time:193324ms step_avg:91.15ms +[2025-08-22 18:42:46] [Rank 0] step:2121/10000 train_time:193324ms step_avg:91.15ms +[2025-08-22 18:42:47] [Rank 0] step:2141/10000 train_time:195156ms step_avg:91.15ms +[2025-08-22 18:42:47] [Rank 0] step:2141/10000 train_time:195156ms step_avg:91.15ms +[2025-08-22 18:42:49] [Rank 0] step:2161/10000 train_time:196988ms step_avg:91.16ms +[2025-08-22 18:42:49] [Rank 0] step:2161/10000 train_time:196988ms step_avg:91.16ms +[2025-08-22 18:42:51] [Rank 0] step:2181/10000 train_time:198821ms step_avg:91.16ms +[2025-08-22 18:42:51] [Rank 0] step:2181/10000 train_time:198821ms step_avg:91.16ms +[2025-08-22 18:42:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:42:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:43:06] [Rank 0] PRINT: step:2200/10000 val_loss:4.0778 svd_entropy: attn_qk:H=0.7180,top10E=0.34,eRank=140.9,q75/q25=66.24 attn_vo:H=0.7970,top10E=0.15,eRank=278.4,q75/q25=inf mlp_w1:H=0.9673,top10E=0.04,eRank=618.3,q75/q25=3.14 mlp_w2:H=0.9632,top10E=0.05,eRank=601.7,q75/q25=3.24 vo_prod:H=0.6334,top10E=0.30,eRank=97.9,q75/q25=inf train_time:200659ms step_avg:91.21ms +[2025-08-22 18:43:06] [Rank 0] PRINT: step:2200/10000 val_loss:4.0778 svd_entropy: attn_qk:H=0.7180,top10E=0.34,eRank=140.9,q75/q25=66.24 attn_vo:H=0.7970,top10E=0.15,eRank=278.4,q75/q25=inf mlp_w1:H=0.9673,top10E=0.04,eRank=618.3,q75/q25=3.14 mlp_w2:H=0.9632,top10E=0.05,eRank=601.7,q75/q25=3.24 vo_prod:H=0.6334,top10E=0.30,eRank=97.9,q75/q25=inf train_time:200659ms step_avg:91.21ms +[2025-08-22 18:43:07] [Rank 0] step:2201/10000 train_time:200678ms step_avg:91.18ms +[2025-08-22 18:43:07] [Rank 0] step:2201/10000 train_time:200678ms step_avg:91.18ms +[2025-08-22 18:43:08] [Rank 0] step:2221/10000 train_time:202507ms step_avg:91.18ms +[2025-08-22 18:43:08] [Rank 0] step:2221/10000 train_time:202507ms step_avg:91.18ms +[2025-08-22 18:43:10] [Rank 0] step:2241/10000 train_time:204368ms step_avg:91.20ms +[2025-08-22 18:43:10] [Rank 0] step:2241/10000 train_time:204368ms step_avg:91.20ms +[2025-08-22 18:43:12] [Rank 0] step:2261/10000 train_time:206235ms step_avg:91.21ms +[2025-08-22 18:43:12] [Rank 0] step:2261/10000 train_time:206235ms step_avg:91.21ms +[2025-08-22 18:43:14] [Rank 0] step:2281/10000 train_time:208105ms step_avg:91.23ms +[2025-08-22 18:43:14] [Rank 0] step:2281/10000 train_time:208105ms step_avg:91.23ms +[2025-08-22 18:43:16] [Rank 0] step:2301/10000 train_time:209974ms step_avg:91.25ms +[2025-08-22 18:43:16] [Rank 0] step:2301/10000 train_time:209974ms step_avg:91.25ms +[2025-08-22 18:43:18] [Rank 0] step:2321/10000 train_time:211846ms step_avg:91.27ms +[2025-08-22 18:43:18] [Rank 0] step:2321/10000 train_time:211846ms step_avg:91.27ms +[2025-08-22 18:43:20] [Rank 0] step:2341/10000 train_time:213717ms step_avg:91.29ms +[2025-08-22 18:43:20] [Rank 0] step:2341/10000 train_time:213717ms step_avg:91.29ms +[2025-08-22 18:43:21] [Rank 0] step:2361/10000 train_time:215588ms step_avg:91.31ms +[2025-08-22 18:43:21] [Rank 0] step:2361/10000 train_time:215588ms step_avg:91.31ms +[2025-08-22 18:43:23] [Rank 0] step:2381/10000 train_time:217461ms step_avg:91.33ms +[2025-08-22 18:43:23] [Rank 0] step:2381/10000 train_time:217461ms step_avg:91.33ms +[2025-08-22 18:43:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:43:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:43:39] [Rank 0] PRINT: step:2400/10000 val_loss:4.0041 svd_entropy: attn_qk:H=0.7200,top10E=0.33,eRank=142.3,q75/q25=65.93 attn_vo:H=0.7979,top10E=0.15,eRank=279.8,q75/q25=inf mlp_w1:H=0.9674,top10E=0.04,eRank=618.8,q75/q25=3.13 mlp_w2:H=0.9631,top10E=0.05,eRank=601.6,q75/q25=3.24 vo_prod:H=0.6360,top10E=0.29,eRank=99.7,q75/q25=inf train_time:219338ms step_avg:91.39ms +[2025-08-22 18:43:39] [Rank 0] PRINT: step:2400/10000 val_loss:4.0041 svd_entropy: attn_qk:H=0.7200,top10E=0.33,eRank=142.3,q75/q25=65.93 attn_vo:H=0.7979,top10E=0.15,eRank=279.8,q75/q25=inf mlp_w1:H=0.9674,top10E=0.04,eRank=618.8,q75/q25=3.13 mlp_w2:H=0.9631,top10E=0.05,eRank=601.6,q75/q25=3.24 vo_prod:H=0.6360,top10E=0.29,eRank=99.7,q75/q25=inf train_time:219338ms step_avg:91.39ms +[2025-08-22 18:43:39] [Rank 0] step:2401/10000 train_time:219359ms step_avg:91.36ms +[2025-08-22 18:43:39] [Rank 0] step:2401/10000 train_time:219359ms step_avg:91.36ms +[2025-08-22 18:43:41] [Rank 0] step:2421/10000 train_time:221220ms step_avg:91.38ms +[2025-08-22 18:43:41] [Rank 0] step:2421/10000 train_time:221220ms step_avg:91.38ms +[2025-08-22 18:43:43] [Rank 0] step:2441/10000 train_time:223086ms step_avg:91.39ms +[2025-08-22 18:43:43] [Rank 0] step:2441/10000 train_time:223086ms step_avg:91.39ms +[2025-08-22 18:43:45] [Rank 0] step:2461/10000 train_time:224953ms step_avg:91.41ms +[2025-08-22 18:43:45] [Rank 0] step:2461/10000 train_time:224953ms step_avg:91.41ms +[2025-08-22 18:43:46] [Rank 0] step:2481/10000 train_time:226820ms step_avg:91.42ms +[2025-08-22 18:43:46] [Rank 0] step:2481/10000 train_time:226820ms step_avg:91.42ms +[2025-08-22 18:43:48] [Rank 0] step:2501/10000 train_time:228688ms step_avg:91.44ms +[2025-08-22 18:43:48] [Rank 0] step:2501/10000 train_time:228688ms step_avg:91.44ms +[2025-08-22 18:43:50] [Rank 0] step:2521/10000 train_time:230557ms step_avg:91.45ms +[2025-08-22 18:43:50] [Rank 0] step:2521/10000 train_time:230557ms step_avg:91.45ms +[2025-08-22 18:43:52] [Rank 0] step:2541/10000 train_time:232425ms step_avg:91.47ms +[2025-08-22 18:43:52] [Rank 0] step:2541/10000 train_time:232425ms step_avg:91.47ms +[2025-08-22 18:43:54] [Rank 0] step:2561/10000 train_time:234294ms step_avg:91.49ms +[2025-08-22 18:43:54] [Rank 0] step:2561/10000 train_time:234294ms step_avg:91.49ms +[2025-08-22 18:43:56] [Rank 0] step:2581/10000 train_time:236166ms step_avg:91.50ms +[2025-08-22 18:43:56] [Rank 0] step:2581/10000 train_time:236166ms step_avg:91.50ms +[2025-08-22 18:43:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:43:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:44:11] [Rank 0] PRINT: step:2600/10000 val_loss:3.9745 svd_entropy: attn_qk:H=0.7222,top10E=0.33,eRank=143.9,q75/q25=65.59 attn_vo:H=0.7992,top10E=0.15,eRank=281.7,q75/q25=inf mlp_w1:H=0.9675,top10E=0.04,eRank=619.3,q75/q25=3.11 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.23 vo_prod:H=0.6394,top10E=0.29,eRank=102.2,q75/q25=inf train_time:238043ms step_avg:91.55ms +[2025-08-22 18:44:11] [Rank 0] PRINT: step:2600/10000 val_loss:3.9745 svd_entropy: attn_qk:H=0.7222,top10E=0.33,eRank=143.9,q75/q25=65.59 attn_vo:H=0.7992,top10E=0.15,eRank=281.7,q75/q25=inf mlp_w1:H=0.9675,top10E=0.04,eRank=619.3,q75/q25=3.11 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.23 vo_prod:H=0.6394,top10E=0.29,eRank=102.2,q75/q25=inf train_time:238043ms step_avg:91.55ms +[2025-08-22 18:44:11] [Rank 0] step:2601/10000 train_time:238062ms step_avg:91.53ms +[2025-08-22 18:44:11] [Rank 0] step:2601/10000 train_time:238062ms step_avg:91.53ms +[2025-08-22 18:44:13] [Rank 0] step:2621/10000 train_time:239917ms step_avg:91.54ms +[2025-08-22 18:44:13] [Rank 0] step:2621/10000 train_time:239917ms step_avg:91.54ms +[2025-08-22 18:44:15] [Rank 0] step:2641/10000 train_time:241782ms step_avg:91.55ms +[2025-08-22 18:44:15] [Rank 0] step:2641/10000 train_time:241782ms step_avg:91.55ms +[2025-08-22 18:44:17] [Rank 0] step:2661/10000 train_time:243650ms step_avg:91.56ms +[2025-08-22 18:44:17] [Rank 0] step:2661/10000 train_time:243650ms step_avg:91.56ms +[2025-08-22 18:44:19] [Rank 0] step:2681/10000 train_time:245518ms step_avg:91.58ms +[2025-08-22 18:44:19] [Rank 0] step:2681/10000 train_time:245518ms step_avg:91.58ms +[2025-08-22 18:44:21] [Rank 0] step:2701/10000 train_time:247386ms step_avg:91.59ms +[2025-08-22 18:44:21] [Rank 0] step:2701/10000 train_time:247386ms step_avg:91.59ms +[2025-08-22 18:44:22] [Rank 0] step:2721/10000 train_time:249256ms step_avg:91.60ms +[2025-08-22 18:44:22] [Rank 0] step:2721/10000 train_time:249256ms step_avg:91.60ms +[2025-08-22 18:44:24] [Rank 0] step:2741/10000 train_time:251127ms step_avg:91.62ms +[2025-08-22 18:44:24] [Rank 0] step:2741/10000 train_time:251127ms step_avg:91.62ms +[2025-08-22 18:44:26] [Rank 0] step:2761/10000 train_time:252999ms step_avg:91.63ms +[2025-08-22 18:44:26] [Rank 0] step:2761/10000 train_time:252999ms step_avg:91.63ms +[2025-08-22 18:44:28] [Rank 0] step:2781/10000 train_time:254873ms step_avg:91.65ms +[2025-08-22 18:44:28] [Rank 0] step:2781/10000 train_time:254873ms step_avg:91.65ms +[2025-08-22 18:44:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:44:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:44:44] [Rank 0] PRINT: step:2800/10000 val_loss:3.9542 svd_entropy: attn_qk:H=0.7241,top10E=0.33,eRank=145.4,q75/q25=65.54 attn_vo:H=0.8003,top10E=0.15,eRank=283.2,q75/q25=inf mlp_w1:H=0.9676,top10E=0.04,eRank=619.7,q75/q25=3.11 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.22 vo_prod:H=0.6420,top10E=0.29,eRank=104.0,q75/q25=inf train_time:256753ms step_avg:91.70ms +[2025-08-22 18:44:44] [Rank 0] PRINT: step:2800/10000 val_loss:3.9542 svd_entropy: attn_qk:H=0.7241,top10E=0.33,eRank=145.4,q75/q25=65.54 attn_vo:H=0.8003,top10E=0.15,eRank=283.2,q75/q25=inf mlp_w1:H=0.9676,top10E=0.04,eRank=619.7,q75/q25=3.11 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.22 vo_prod:H=0.6420,top10E=0.29,eRank=104.0,q75/q25=inf train_time:256753ms step_avg:91.70ms +[2025-08-22 18:44:44] [Rank 0] step:2801/10000 train_time:256773ms step_avg:91.67ms +[2025-08-22 18:44:44] [Rank 0] step:2801/10000 train_time:256773ms step_avg:91.67ms +[2025-08-22 18:44:46] [Rank 0] step:2821/10000 train_time:258643ms step_avg:91.69ms +[2025-08-22 18:44:46] [Rank 0] step:2821/10000 train_time:258643ms step_avg:91.69ms +[2025-08-22 18:44:47] [Rank 0] step:2841/10000 train_time:260516ms step_avg:91.70ms +[2025-08-22 18:44:47] [Rank 0] step:2841/10000 train_time:260516ms step_avg:91.70ms +[2025-08-22 18:44:49] [Rank 0] step:2861/10000 train_time:262388ms step_avg:91.71ms +[2025-08-22 18:44:49] [Rank 0] step:2861/10000 train_time:262388ms step_avg:91.71ms +[2025-08-22 18:44:51] [Rank 0] step:2881/10000 train_time:264263ms step_avg:91.73ms +[2025-08-22 18:44:51] [Rank 0] step:2881/10000 train_time:264263ms step_avg:91.73ms +[2025-08-22 18:44:53] [Rank 0] step:2901/10000 train_time:266137ms step_avg:91.74ms +[2025-08-22 18:44:53] [Rank 0] step:2901/10000 train_time:266137ms step_avg:91.74ms +[2025-08-22 18:44:55] [Rank 0] step:2921/10000 train_time:268013ms step_avg:91.75ms +[2025-08-22 18:44:55] [Rank 0] step:2921/10000 train_time:268013ms step_avg:91.75ms +[2025-08-22 18:44:57] [Rank 0] step:2941/10000 train_time:269891ms step_avg:91.77ms +[2025-08-22 18:44:57] [Rank 0] step:2941/10000 train_time:269891ms step_avg:91.77ms +[2025-08-22 18:44:59] [Rank 0] step:2961/10000 train_time:271768ms step_avg:91.78ms +[2025-08-22 18:44:59] [Rank 0] step:2961/10000 train_time:271768ms step_avg:91.78ms +[2025-08-22 18:45:01] [Rank 0] step:2981/10000 train_time:273652ms step_avg:91.80ms +[2025-08-22 18:45:01] [Rank 0] step:2981/10000 train_time:273652ms step_avg:91.80ms +[2025-08-22 18:45:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:45:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:45:16] [Rank 0] PRINT: step:3000/10000 val_loss:3.9201 svd_entropy: attn_qk:H=0.7258,top10E=0.33,eRank=146.7,q75/q25=64.89 attn_vo:H=0.8015,top10E=0.14,eRank=285.0,q75/q25=inf mlp_w1:H=0.9677,top10E=0.04,eRank=620.1,q75/q25=3.10 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.22 vo_prod:H=0.6448,top10E=0.28,eRank=106.1,q75/q25=inf train_time:275543ms step_avg:91.85ms +[2025-08-22 18:45:16] [Rank 0] PRINT: step:3000/10000 val_loss:3.9201 svd_entropy: attn_qk:H=0.7258,top10E=0.33,eRank=146.7,q75/q25=64.89 attn_vo:H=0.8015,top10E=0.14,eRank=285.0,q75/q25=inf mlp_w1:H=0.9677,top10E=0.04,eRank=620.1,q75/q25=3.10 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.22 vo_prod:H=0.6448,top10E=0.28,eRank=106.1,q75/q25=inf train_time:275543ms step_avg:91.85ms +[2025-08-22 18:45:16] [Rank 0] step:3001/10000 train_time:275564ms step_avg:91.82ms +[2025-08-22 18:45:16] [Rank 0] step:3001/10000 train_time:275564ms step_avg:91.82ms +[2025-08-22 18:45:18] [Rank 0] step:3021/10000 train_time:277441ms step_avg:91.84ms +[2025-08-22 18:45:18] [Rank 0] step:3021/10000 train_time:277441ms step_avg:91.84ms +[2025-08-22 18:45:20] [Rank 0] step:3041/10000 train_time:279317ms step_avg:91.85ms +[2025-08-22 18:45:20] [Rank 0] step:3041/10000 train_time:279317ms step_avg:91.85ms +[2025-08-22 18:45:22] [Rank 0] step:3061/10000 train_time:281192ms step_avg:91.86ms +[2025-08-22 18:45:22] [Rank 0] step:3061/10000 train_time:281192ms step_avg:91.86ms +[2025-08-22 18:45:24] [Rank 0] step:3081/10000 train_time:283067ms step_avg:91.88ms +[2025-08-22 18:45:24] [Rank 0] step:3081/10000 train_time:283067ms step_avg:91.88ms +[2025-08-22 18:45:26] [Rank 0] step:3101/10000 train_time:284947ms step_avg:91.89ms +[2025-08-22 18:45:26] [Rank 0] step:3101/10000 train_time:284947ms step_avg:91.89ms +[2025-08-22 18:45:27] [Rank 0] step:3121/10000 train_time:286825ms step_avg:91.90ms +[2025-08-22 18:45:27] [Rank 0] step:3121/10000 train_time:286825ms step_avg:91.90ms +[2025-08-22 18:45:29] [Rank 0] step:3141/10000 train_time:288705ms step_avg:91.92ms +[2025-08-22 18:45:29] [Rank 0] step:3141/10000 train_time:288705ms step_avg:91.92ms +[2025-08-22 18:45:31] [Rank 0] step:3161/10000 train_time:290585ms step_avg:91.93ms +[2025-08-22 18:45:31] [Rank 0] step:3161/10000 train_time:290585ms step_avg:91.93ms +[2025-08-22 18:45:33] [Rank 0] step:3181/10000 train_time:292467ms step_avg:91.94ms +[2025-08-22 18:45:33] [Rank 0] step:3181/10000 train_time:292467ms step_avg:91.94ms +[2025-08-22 18:45:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:45:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:45:49] [Rank 0] PRINT: step:3200/10000 val_loss:3.8970 svd_entropy: attn_qk:H=0.7277,top10E=0.32,eRank=148.1,q75/q25=64.92 attn_vo:H=0.8025,top10E=0.14,eRank=286.4,q75/q25=inf mlp_w1:H=0.9678,top10E=0.04,eRank=620.5,q75/q25=3.09 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.21 vo_prod:H=0.6472,top10E=0.28,eRank=107.7,q75/q25=inf train_time:294354ms step_avg:91.99ms +[2025-08-22 18:45:49] [Rank 0] PRINT: step:3200/10000 val_loss:3.8970 svd_entropy: attn_qk:H=0.7277,top10E=0.32,eRank=148.1,q75/q25=64.92 attn_vo:H=0.8025,top10E=0.14,eRank=286.4,q75/q25=inf mlp_w1:H=0.9678,top10E=0.04,eRank=620.5,q75/q25=3.09 mlp_w2:H=0.9631,top10E=0.05,eRank=601.5,q75/q25=3.21 vo_prod:H=0.6472,top10E=0.28,eRank=107.7,q75/q25=inf train_time:294354ms step_avg:91.99ms +[2025-08-22 18:45:49] [Rank 0] step:3201/10000 train_time:294374ms step_avg:91.96ms +[2025-08-22 18:45:49] [Rank 0] step:3201/10000 train_time:294374ms step_avg:91.96ms +[2025-08-22 18:45:51] [Rank 0] step:3221/10000 train_time:296248ms step_avg:91.97ms +[2025-08-22 18:45:51] [Rank 0] step:3221/10000 train_time:296248ms step_avg:91.97ms +[2025-08-22 18:45:52] [Rank 0] step:3241/10000 train_time:298122ms step_avg:91.98ms +[2025-08-22 18:45:52] [Rank 0] step:3241/10000 train_time:298122ms step_avg:91.98ms +[2025-08-22 18:45:54] [Rank 0] step:3261/10000 train_time:299997ms step_avg:92.00ms +[2025-08-22 18:45:54] [Rank 0] step:3261/10000 train_time:299997ms step_avg:92.00ms +[2025-08-22 18:45:56] [Rank 0] step:3281/10000 train_time:301873ms step_avg:92.01ms +[2025-08-22 18:45:56] [Rank 0] step:3281/10000 train_time:301873ms step_avg:92.01ms +[2025-08-22 18:45:58] [Rank 0] step:3301/10000 train_time:303750ms step_avg:92.02ms +[2025-08-22 18:45:58] [Rank 0] step:3301/10000 train_time:303750ms step_avg:92.02ms +[2025-08-22 18:46:00] [Rank 0] step:3321/10000 train_time:305630ms step_avg:92.03ms +[2025-08-22 18:46:00] [Rank 0] step:3321/10000 train_time:305630ms step_avg:92.03ms +[2025-08-22 18:46:02] [Rank 0] step:3341/10000 train_time:307509ms step_avg:92.04ms +[2025-08-22 18:46:02] [Rank 0] step:3341/10000 train_time:307509ms step_avg:92.04ms +[2025-08-22 18:46:04] [Rank 0] step:3361/10000 train_time:309389ms step_avg:92.05ms +[2025-08-22 18:46:04] [Rank 0] step:3361/10000 train_time:309389ms step_avg:92.05ms +[2025-08-22 18:46:06] [Rank 0] step:3381/10000 train_time:311269ms step_avg:92.06ms +[2025-08-22 18:46:06] [Rank 0] step:3381/10000 train_time:311269ms step_avg:92.06ms +[2025-08-22 18:46:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:46:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:46:21] [Rank 0] PRINT: step:3400/10000 val_loss:3.8731 svd_entropy: attn_qk:H=0.7294,top10E=0.32,eRank=149.4,q75/q25=64.68 attn_vo:H=0.8038,top10E=0.14,eRank=288.2,q75/q25=inf mlp_w1:H=0.9679,top10E=0.04,eRank=620.7,q75/q25=3.08 mlp_w2:H=0.9631,top10E=0.05,eRank=601.6,q75/q25=3.21 vo_prod:H=0.6500,top10E=0.27,eRank=109.7,q75/q25=inf train_time:313155ms step_avg:92.10ms +[2025-08-22 18:46:21] [Rank 0] PRINT: step:3400/10000 val_loss:3.8731 svd_entropy: attn_qk:H=0.7294,top10E=0.32,eRank=149.4,q75/q25=64.68 attn_vo:H=0.8038,top10E=0.14,eRank=288.2,q75/q25=inf mlp_w1:H=0.9679,top10E=0.04,eRank=620.7,q75/q25=3.08 mlp_w2:H=0.9631,top10E=0.05,eRank=601.6,q75/q25=3.21 vo_prod:H=0.6500,top10E=0.27,eRank=109.7,q75/q25=inf train_time:313155ms step_avg:92.10ms +[2025-08-22 18:46:21] [Rank 0] step:3401/10000 train_time:313176ms step_avg:92.08ms +[2025-08-22 18:46:21] [Rank 0] step:3401/10000 train_time:313176ms step_avg:92.08ms +[2025-08-22 18:46:23] [Rank 0] step:3421/10000 train_time:315050ms step_avg:92.09ms +[2025-08-22 18:46:23] [Rank 0] step:3421/10000 train_time:315050ms step_avg:92.09ms +[2025-08-22 18:46:25] [Rank 0] step:3441/10000 train_time:316923ms step_avg:92.10ms +[2025-08-22 18:46:25] [Rank 0] step:3441/10000 train_time:316923ms step_avg:92.10ms +[2025-08-22 18:46:27] [Rank 0] step:3461/10000 train_time:318799ms step_avg:92.11ms +[2025-08-22 18:46:27] [Rank 0] step:3461/10000 train_time:318799ms step_avg:92.11ms +[2025-08-22 18:46:29] [Rank 0] step:3481/10000 train_time:320677ms step_avg:92.12ms +[2025-08-22 18:46:29] [Rank 0] step:3481/10000 train_time:320677ms step_avg:92.12ms +[2025-08-22 18:46:31] [Rank 0] step:3501/10000 train_time:322557ms step_avg:92.13ms +[2025-08-22 18:46:31] [Rank 0] step:3501/10000 train_time:322557ms step_avg:92.13ms +[2025-08-22 18:46:32] [Rank 0] step:3521/10000 train_time:324439ms step_avg:92.14ms +[2025-08-22 18:46:32] [Rank 0] step:3521/10000 train_time:324439ms step_avg:92.14ms +[2025-08-22 18:46:34] [Rank 0] step:3541/10000 train_time:326318ms step_avg:92.15ms +[2025-08-22 18:46:34] [Rank 0] step:3541/10000 train_time:326318ms step_avg:92.15ms +[2025-08-22 18:46:36] [Rank 0] step:3561/10000 train_time:328197ms step_avg:92.16ms +[2025-08-22 18:46:36] [Rank 0] step:3561/10000 train_time:328197ms step_avg:92.16ms +[2025-08-22 18:46:38] [Rank 0] step:3581/10000 train_time:330079ms step_avg:92.18ms +[2025-08-22 18:46:38] [Rank 0] step:3581/10000 train_time:330079ms step_avg:92.18ms +[2025-08-22 18:46:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:46:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:46:54] [Rank 0] PRINT: step:3600/10000 val_loss:3.8644 svd_entropy: attn_qk:H=0.7309,top10E=0.32,eRank=150.6,q75/q25=64.66 attn_vo:H=0.8046,top10E=0.14,eRank=289.3,q75/q25=inf mlp_w1:H=0.9680,top10E=0.04,eRank=621.1,q75/q25=3.08 mlp_w2:H=0.9632,top10E=0.05,eRank=601.7,q75/q25=3.19 vo_prod:H=0.6515,top10E=0.27,eRank=110.6,q75/q25=inf train_time:331968ms step_avg:92.21ms +[2025-08-22 18:46:54] [Rank 0] PRINT: step:3600/10000 val_loss:3.8644 svd_entropy: attn_qk:H=0.7309,top10E=0.32,eRank=150.6,q75/q25=64.66 attn_vo:H=0.8046,top10E=0.14,eRank=289.3,q75/q25=inf mlp_w1:H=0.9680,top10E=0.04,eRank=621.1,q75/q25=3.08 mlp_w2:H=0.9632,top10E=0.05,eRank=601.7,q75/q25=3.19 vo_prod:H=0.6515,top10E=0.27,eRank=110.6,q75/q25=inf train_time:331968ms step_avg:92.21ms +[2025-08-22 18:46:54] [Rank 0] step:3601/10000 train_time:331991ms step_avg:92.19ms +[2025-08-22 18:46:54] [Rank 0] step:3601/10000 train_time:331991ms step_avg:92.19ms +[2025-08-22 18:46:56] [Rank 0] step:3621/10000 train_time:333870ms step_avg:92.20ms +[2025-08-22 18:46:56] [Rank 0] step:3621/10000 train_time:333870ms step_avg:92.20ms +[2025-08-22 18:46:57] [Rank 0] step:3641/10000 train_time:335749ms step_avg:92.21ms +[2025-08-22 18:46:57] [Rank 0] step:3641/10000 train_time:335749ms step_avg:92.21ms +[2025-08-22 18:46:59] [Rank 0] step:3661/10000 train_time:337630ms step_avg:92.22ms +[2025-08-22 18:46:59] [Rank 0] step:3661/10000 train_time:337630ms step_avg:92.22ms +[2025-08-22 18:47:01] [Rank 0] step:3681/10000 train_time:339511ms step_avg:92.23ms +[2025-08-22 18:47:01] [Rank 0] step:3681/10000 train_time:339511ms step_avg:92.23ms +[2025-08-22 18:47:03] [Rank 0] step:3701/10000 train_time:341394ms step_avg:92.24ms +[2025-08-22 18:47:03] [Rank 0] step:3701/10000 train_time:341394ms step_avg:92.24ms +[2025-08-22 18:47:05] [Rank 0] step:3721/10000 train_time:343306ms step_avg:92.26ms +[2025-08-22 18:47:05] [Rank 0] step:3721/10000 train_time:343306ms step_avg:92.26ms +[2025-08-22 18:47:07] [Rank 0] step:3741/10000 train_time:345227ms step_avg:92.28ms +[2025-08-22 18:47:07] [Rank 0] step:3741/10000 train_time:345227ms step_avg:92.28ms +[2025-08-22 18:47:09] [Rank 0] step:3761/10000 train_time:347150ms step_avg:92.30ms +[2025-08-22 18:47:09] [Rank 0] step:3761/10000 train_time:347150ms step_avg:92.30ms +[2025-08-22 18:47:11] [Rank 0] step:3781/10000 train_time:349074ms step_avg:92.32ms +[2025-08-22 18:47:11] [Rank 0] step:3781/10000 train_time:349074ms step_avg:92.32ms +[2025-08-22 18:47:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:47:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:47:26] [Rank 0] PRINT: step:3800/10000 val_loss:3.8310 svd_entropy: attn_qk:H=0.7322,top10E=0.32,eRank=151.6,q75/q25=64.46 attn_vo:H=0.8055,top10E=0.14,eRank=290.5,q75/q25=inf mlp_w1:H=0.9680,top10E=0.04,eRank=621.4,q75/q25=3.07 mlp_w2:H=0.9632,top10E=0.05,eRank=601.7,q75/q25=3.19 vo_prod:H=0.6533,top10E=0.27,eRank=112.0,q75/q25=inf train_time:351000ms step_avg:92.37ms +[2025-08-22 18:47:26] [Rank 0] PRINT: step:3800/10000 val_loss:3.8310 svd_entropy: attn_qk:H=0.7322,top10E=0.32,eRank=151.6,q75/q25=64.46 attn_vo:H=0.8055,top10E=0.14,eRank=290.5,q75/q25=inf mlp_w1:H=0.9680,top10E=0.04,eRank=621.4,q75/q25=3.07 mlp_w2:H=0.9632,top10E=0.05,eRank=601.7,q75/q25=3.19 vo_prod:H=0.6533,top10E=0.27,eRank=112.0,q75/q25=inf train_time:351000ms step_avg:92.37ms +[2025-08-22 18:47:26] [Rank 0] step:3801/10000 train_time:351021ms step_avg:92.35ms +[2025-08-22 18:47:26] [Rank 0] step:3801/10000 train_time:351021ms step_avg:92.35ms +[2025-08-22 18:47:28] [Rank 0] step:3821/10000 train_time:352925ms step_avg:92.36ms +[2025-08-22 18:47:28] [Rank 0] step:3821/10000 train_time:352925ms step_avg:92.36ms +[2025-08-22 18:47:30] [Rank 0] step:3841/10000 train_time:354840ms step_avg:92.38ms +[2025-08-22 18:47:30] [Rank 0] step:3841/10000 train_time:354840ms step_avg:92.38ms +[2025-08-22 18:47:32] [Rank 0] step:3861/10000 train_time:356754ms step_avg:92.40ms +[2025-08-22 18:47:32] [Rank 0] step:3861/10000 train_time:356754ms step_avg:92.40ms +[2025-08-22 18:47:34] [Rank 0] step:3881/10000 train_time:358666ms step_avg:92.42ms +[2025-08-22 18:47:34] [Rank 0] step:3881/10000 train_time:358666ms step_avg:92.42ms +[2025-08-22 18:47:36] [Rank 0] step:3901/10000 train_time:360580ms step_avg:92.43ms +[2025-08-22 18:47:36] [Rank 0] step:3901/10000 train_time:360580ms step_avg:92.43ms +[2025-08-22 18:47:38] [Rank 0] step:3921/10000 train_time:362493ms step_avg:92.45ms +[2025-08-22 18:47:38] [Rank 0] step:3921/10000 train_time:362493ms step_avg:92.45ms +[2025-08-22 18:47:40] [Rank 0] step:3941/10000 train_time:364408ms step_avg:92.47ms +[2025-08-22 18:47:40] [Rank 0] step:3941/10000 train_time:364408ms step_avg:92.47ms +[2025-08-22 18:47:42] [Rank 0] step:3961/10000 train_time:366321ms step_avg:92.48ms +[2025-08-22 18:47:42] [Rank 0] step:3961/10000 train_time:366321ms step_avg:92.48ms +[2025-08-22 18:47:44] [Rank 0] step:3981/10000 train_time:368292ms step_avg:92.51ms +[2025-08-22 18:47:44] [Rank 0] step:3981/10000 train_time:368292ms step_avg:92.51ms +[2025-08-22 18:47:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:47:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:47:59] [Rank 0] PRINT: step:4000/10000 val_loss:3.8110 svd_entropy: attn_qk:H=0.7340,top10E=0.31,eRank=152.9,q75/q25=64.25 attn_vo:H=0.8064,top10E=0.14,eRank=291.7,q75/q25=inf mlp_w1:H=0.9681,top10E=0.04,eRank=621.7,q75/q25=3.06 mlp_w2:H=0.9632,top10E=0.05,eRank=601.8,q75/q25=3.19 vo_prod:H=0.6553,top10E=0.27,eRank=113.4,q75/q25=inf train_time:370295ms step_avg:92.57ms +[2025-08-22 18:47:59] [Rank 0] PRINT: step:4000/10000 val_loss:3.8110 svd_entropy: attn_qk:H=0.7340,top10E=0.31,eRank=152.9,q75/q25=64.25 attn_vo:H=0.8064,top10E=0.14,eRank=291.7,q75/q25=inf mlp_w1:H=0.9681,top10E=0.04,eRank=621.7,q75/q25=3.06 mlp_w2:H=0.9632,top10E=0.05,eRank=601.8,q75/q25=3.19 vo_prod:H=0.6553,top10E=0.27,eRank=113.4,q75/q25=inf train_time:370295ms step_avg:92.57ms +[2025-08-22 18:47:59] [Rank 0] step:4001/10000 train_time:370316ms step_avg:92.56ms +[2025-08-22 18:47:59] [Rank 0] step:4001/10000 train_time:370316ms step_avg:92.56ms +[2025-08-22 18:48:01] [Rank 0] step:4021/10000 train_time:372230ms step_avg:92.57ms +[2025-08-22 18:48:01] [Rank 0] step:4021/10000 train_time:372230ms step_avg:92.57ms +[2025-08-22 18:48:03] [Rank 0] step:4041/10000 train_time:374141ms step_avg:92.59ms +[2025-08-22 18:48:03] [Rank 0] step:4041/10000 train_time:374141ms step_avg:92.59ms +[2025-08-22 18:48:05] [Rank 0] step:4061/10000 train_time:376052ms step_avg:92.60ms +[2025-08-22 18:48:05] [Rank 0] step:4061/10000 train_time:376052ms step_avg:92.60ms +[2025-08-22 18:48:07] [Rank 0] step:4081/10000 train_time:378213ms step_avg:92.68ms +[2025-08-22 18:48:07] [Rank 0] step:4081/10000 train_time:378213ms step_avg:92.68ms +[2025-08-22 18:48:09] [Rank 0] step:4101/10000 train_time:380125ms step_avg:92.69ms +[2025-08-22 18:48:09] [Rank 0] step:4101/10000 train_time:380125ms step_avg:92.69ms +[2025-08-22 18:48:11] [Rank 0] step:4121/10000 train_time:382036ms step_avg:92.70ms +[2025-08-22 18:48:11] [Rank 0] step:4121/10000 train_time:382036ms step_avg:92.70ms +[2025-08-22 18:48:13] [Rank 0] step:4141/10000 train_time:383949ms step_avg:92.72ms +[2025-08-22 18:48:13] [Rank 0] step:4141/10000 train_time:383949ms step_avg:92.72ms +[2025-08-22 18:48:15] [Rank 0] step:4161/10000 train_time:385861ms step_avg:92.73ms +[2025-08-22 18:48:15] [Rank 0] step:4161/10000 train_time:385861ms step_avg:92.73ms +[2025-08-22 18:48:17] [Rank 0] step:4181/10000 train_time:387776ms step_avg:92.75ms +[2025-08-22 18:48:17] [Rank 0] step:4181/10000 train_time:387776ms step_avg:92.75ms +[2025-08-22 18:48:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:48:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:48:33] [Rank 0] PRINT: step:4200/10000 val_loss:3.7994 svd_entropy: attn_qk:H=0.7352,top10E=0.31,eRank=153.9,q75/q25=63.71 attn_vo:H=0.8071,top10E=0.14,eRank=292.7,q75/q25=inf mlp_w1:H=0.9682,top10E=0.04,eRank=621.9,q75/q25=3.05 mlp_w2:H=0.9632,top10E=0.05,eRank=601.8,q75/q25=3.18 vo_prod:H=0.6568,top10E=0.26,eRank=114.4,q75/q25=inf train_time:389694ms step_avg:92.78ms +[2025-08-22 18:48:33] [Rank 0] PRINT: step:4200/10000 val_loss:3.7994 svd_entropy: attn_qk:H=0.7352,top10E=0.31,eRank=153.9,q75/q25=63.71 attn_vo:H=0.8071,top10E=0.14,eRank=292.7,q75/q25=inf mlp_w1:H=0.9682,top10E=0.04,eRank=621.9,q75/q25=3.05 mlp_w2:H=0.9632,top10E=0.05,eRank=601.8,q75/q25=3.18 vo_prod:H=0.6568,top10E=0.26,eRank=114.4,q75/q25=inf train_time:389694ms step_avg:92.78ms +[2025-08-22 18:48:33] [Rank 0] step:4201/10000 train_time:389714ms step_avg:92.77ms +[2025-08-22 18:48:33] [Rank 0] step:4201/10000 train_time:389714ms step_avg:92.77ms +[2025-08-22 18:48:35] [Rank 0] step:4221/10000 train_time:391622ms step_avg:92.78ms +[2025-08-22 18:48:35] [Rank 0] step:4221/10000 train_time:391622ms step_avg:92.78ms +[2025-08-22 18:48:37] [Rank 0] step:4241/10000 train_time:393534ms step_avg:92.79ms +[2025-08-22 18:48:37] [Rank 0] step:4241/10000 train_time:393534ms step_avg:92.79ms +[2025-08-22 18:48:39] [Rank 0] step:4261/10000 train_time:395444ms step_avg:92.81ms +[2025-08-22 18:48:39] [Rank 0] step:4261/10000 train_time:395444ms step_avg:92.81ms +[2025-08-22 18:48:40] [Rank 0] step:4281/10000 train_time:397356ms step_avg:92.82ms +[2025-08-22 18:48:40] [Rank 0] step:4281/10000 train_time:397356ms step_avg:92.82ms +[2025-08-22 18:48:42] [Rank 0] step:4301/10000 train_time:399267ms step_avg:92.83ms +[2025-08-22 18:48:42] [Rank 0] step:4301/10000 train_time:399267ms step_avg:92.83ms +[2025-08-22 18:48:44] [Rank 0] step:4321/10000 train_time:401181ms step_avg:92.84ms +[2025-08-22 18:48:44] [Rank 0] step:4321/10000 train_time:401181ms step_avg:92.84ms +[2025-08-22 18:48:46] [Rank 0] step:4341/10000 train_time:403093ms step_avg:92.86ms +[2025-08-22 18:48:46] [Rank 0] step:4341/10000 train_time:403093ms step_avg:92.86ms +[2025-08-22 18:48:48] [Rank 0] step:4361/10000 train_time:405077ms step_avg:92.89ms +[2025-08-22 18:48:48] [Rank 0] step:4361/10000 train_time:405077ms step_avg:92.89ms +[2025-08-22 18:48:50] [Rank 0] step:4381/10000 train_time:407073ms step_avg:92.92ms +[2025-08-22 18:48:50] [Rank 0] step:4381/10000 train_time:407073ms step_avg:92.92ms +[2025-08-22 18:48:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:48:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:49:06] [Rank 0] PRINT: step:4400/10000 val_loss:3.7861 svd_entropy: attn_qk:H=0.7367,top10E=0.31,eRank=155.0,q75/q25=63.32 attn_vo:H=0.8079,top10E=0.14,eRank=293.9,q75/q25=inf mlp_w1:H=0.9682,top10E=0.04,eRank=622.2,q75/q25=3.05 mlp_w2:H=0.9632,top10E=0.05,eRank=601.9,q75/q25=3.17 vo_prod:H=0.6582,top10E=0.26,eRank=115.5,q75/q25=inf train_time:408996ms step_avg:92.95ms +[2025-08-22 18:49:06] [Rank 0] PRINT: step:4400/10000 val_loss:3.7861 svd_entropy: attn_qk:H=0.7367,top10E=0.31,eRank=155.0,q75/q25=63.32 attn_vo:H=0.8079,top10E=0.14,eRank=293.9,q75/q25=inf mlp_w1:H=0.9682,top10E=0.04,eRank=622.2,q75/q25=3.05 mlp_w2:H=0.9632,top10E=0.05,eRank=601.9,q75/q25=3.17 vo_prod:H=0.6582,top10E=0.26,eRank=115.5,q75/q25=inf train_time:408996ms step_avg:92.95ms +[2025-08-22 18:49:06] [Rank 0] step:4401/10000 train_time:409017ms step_avg:92.94ms +[2025-08-22 18:49:06] [Rank 0] step:4401/10000 train_time:409017ms step_avg:92.94ms +[2025-08-22 18:49:08] [Rank 0] step:4421/10000 train_time:410929ms step_avg:92.95ms +[2025-08-22 18:49:08] [Rank 0] step:4421/10000 train_time:410929ms step_avg:92.95ms +[2025-08-22 18:49:10] [Rank 0] step:4441/10000 train_time:412842ms step_avg:92.96ms +[2025-08-22 18:49:10] [Rank 0] step:4441/10000 train_time:412842ms step_avg:92.96ms +[2025-08-22 18:49:12] [Rank 0] step:4461/10000 train_time:414763ms step_avg:92.98ms +[2025-08-22 18:49:12] [Rank 0] step:4461/10000 train_time:414763ms step_avg:92.98ms +[2025-08-22 18:49:14] [Rank 0] step:4481/10000 train_time:416687ms step_avg:92.99ms +[2025-08-22 18:49:14] [Rank 0] step:4481/10000 train_time:416687ms step_avg:92.99ms +[2025-08-22 18:49:16] [Rank 0] step:4501/10000 train_time:418608ms step_avg:93.00ms +[2025-08-22 18:49:16] [Rank 0] step:4501/10000 train_time:418608ms step_avg:93.00ms +[2025-08-22 18:49:17] [Rank 0] step:4521/10000 train_time:420533ms step_avg:93.02ms +[2025-08-22 18:49:17] [Rank 0] step:4521/10000 train_time:420533ms step_avg:93.02ms +[2025-08-22 18:49:19] [Rank 0] step:4541/10000 train_time:422462ms step_avg:93.03ms +[2025-08-22 18:49:19] [Rank 0] step:4541/10000 train_time:422462ms step_avg:93.03ms +[2025-08-22 18:49:21] [Rank 0] step:4561/10000 train_time:424389ms step_avg:93.05ms +[2025-08-22 18:49:21] [Rank 0] step:4561/10000 train_time:424389ms step_avg:93.05ms +[2025-08-22 18:49:23] [Rank 0] step:4581/10000 train_time:426318ms step_avg:93.06ms +[2025-08-22 18:49:23] [Rank 0] step:4581/10000 train_time:426318ms step_avg:93.06ms +[2025-08-22 18:49:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:49:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:49:39] [Rank 0] PRINT: step:4600/10000 val_loss:3.7672 svd_entropy: attn_qk:H=0.7380,top10E=0.31,eRank=156.1,q75/q25=63.03 attn_vo:H=0.8089,top10E=0.13,eRank=295.3,q75/q25=inf mlp_w1:H=0.9683,top10E=0.04,eRank=622.4,q75/q25=3.05 mlp_w2:H=0.9632,top10E=0.05,eRank=602.0,q75/q25=3.17 vo_prod:H=0.6600,top10E=0.26,eRank=116.9,q75/q25=inf train_time:428251ms step_avg:93.10ms +[2025-08-22 18:49:39] [Rank 0] PRINT: step:4600/10000 val_loss:3.7672 svd_entropy: attn_qk:H=0.7380,top10E=0.31,eRank=156.1,q75/q25=63.03 attn_vo:H=0.8089,top10E=0.13,eRank=295.3,q75/q25=inf mlp_w1:H=0.9683,top10E=0.04,eRank=622.4,q75/q25=3.05 mlp_w2:H=0.9632,top10E=0.05,eRank=602.0,q75/q25=3.17 vo_prod:H=0.6600,top10E=0.26,eRank=116.9,q75/q25=inf train_time:428251ms step_avg:93.10ms +[2025-08-22 18:49:39] [Rank 0] step:4601/10000 train_time:428271ms step_avg:93.08ms +[2025-08-22 18:49:39] [Rank 0] step:4601/10000 train_time:428271ms step_avg:93.08ms +[2025-08-22 18:49:41] [Rank 0] step:4621/10000 train_time:430197ms step_avg:93.10ms +[2025-08-22 18:49:41] [Rank 0] step:4621/10000 train_time:430197ms step_avg:93.10ms +[2025-08-22 18:49:43] [Rank 0] step:4641/10000 train_time:432119ms step_avg:93.11ms +[2025-08-22 18:49:43] [Rank 0] step:4641/10000 train_time:432119ms step_avg:93.11ms +[2025-08-22 18:49:45] [Rank 0] step:4661/10000 train_time:434037ms step_avg:93.12ms +[2025-08-22 18:49:45] [Rank 0] step:4661/10000 train_time:434037ms step_avg:93.12ms +[2025-08-22 18:49:47] [Rank 0] step:4681/10000 train_time:435956ms step_avg:93.13ms +[2025-08-22 18:49:47] [Rank 0] step:4681/10000 train_time:435956ms step_avg:93.13ms +[2025-08-22 18:49:49] [Rank 0] step:4701/10000 train_time:437877ms step_avg:93.15ms +[2025-08-22 18:49:49] [Rank 0] step:4701/10000 train_time:437877ms step_avg:93.15ms +[2025-08-22 18:49:51] [Rank 0] step:4721/10000 train_time:439864ms step_avg:93.17ms +[2025-08-22 18:49:51] [Rank 0] step:4721/10000 train_time:439864ms step_avg:93.17ms +[2025-08-22 18:49:53] [Rank 0] step:4741/10000 train_time:441870ms step_avg:93.20ms +[2025-08-22 18:49:53] [Rank 0] step:4741/10000 train_time:441870ms step_avg:93.20ms +[2025-08-22 18:49:55] [Rank 0] step:4761/10000 train_time:443796ms step_avg:93.21ms +[2025-08-22 18:49:55] [Rank 0] step:4761/10000 train_time:443796ms step_avg:93.21ms +[2025-08-22 18:49:57] [Rank 0] step:4781/10000 train_time:445718ms step_avg:93.23ms +[2025-08-22 18:49:57] [Rank 0] step:4781/10000 train_time:445718ms step_avg:93.23ms +[2025-08-22 18:49:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:49:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:50:12] [Rank 0] PRINT: step:4800/10000 val_loss:3.7597 svd_entropy: attn_qk:H=0.7391,top10E=0.31,eRank=157.0,q75/q25=63.03 attn_vo:H=0.8096,top10E=0.13,eRank=296.3,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=622.6,q75/q25=3.04 mlp_w2:H=0.9632,top10E=0.05,eRank=602.1,q75/q25=3.16 vo_prod:H=0.6615,top10E=0.26,eRank=117.9,q75/q25=inf train_time:447646ms step_avg:93.26ms +[2025-08-22 18:50:12] [Rank 0] PRINT: step:4800/10000 val_loss:3.7597 svd_entropy: attn_qk:H=0.7391,top10E=0.31,eRank=157.0,q75/q25=63.03 attn_vo:H=0.8096,top10E=0.13,eRank=296.3,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=622.6,q75/q25=3.04 mlp_w2:H=0.9632,top10E=0.05,eRank=602.1,q75/q25=3.16 vo_prod:H=0.6615,top10E=0.26,eRank=117.9,q75/q25=inf train_time:447646ms step_avg:93.26ms +[2025-08-22 18:50:13] [Rank 0] step:4801/10000 train_time:447665ms step_avg:93.24ms +[2025-08-22 18:50:13] [Rank 0] step:4801/10000 train_time:447665ms step_avg:93.24ms +[2025-08-22 18:50:14] [Rank 0] step:4821/10000 train_time:449588ms step_avg:93.26ms +[2025-08-22 18:50:14] [Rank 0] step:4821/10000 train_time:449588ms step_avg:93.26ms +[2025-08-22 18:50:16] [Rank 0] step:4841/10000 train_time:451501ms step_avg:93.27ms +[2025-08-22 18:50:16] [Rank 0] step:4841/10000 train_time:451501ms step_avg:93.27ms +[2025-08-22 18:50:18] [Rank 0] step:4861/10000 train_time:453418ms step_avg:93.28ms +[2025-08-22 18:50:18] [Rank 0] step:4861/10000 train_time:453418ms step_avg:93.28ms +[2025-08-22 18:50:20] [Rank 0] step:4881/10000 train_time:455336ms step_avg:93.29ms +[2025-08-22 18:50:20] [Rank 0] step:4881/10000 train_time:455336ms step_avg:93.29ms +[2025-08-22 18:50:22] [Rank 0] step:4901/10000 train_time:457253ms step_avg:93.30ms +[2025-08-22 18:50:22] [Rank 0] step:4901/10000 train_time:457253ms step_avg:93.30ms +[2025-08-22 18:50:24] [Rank 0] step:4921/10000 train_time:459173ms step_avg:93.31ms +[2025-08-22 18:50:24] [Rank 0] step:4921/10000 train_time:459173ms step_avg:93.31ms +[2025-08-22 18:50:26] [Rank 0] step:4941/10000 train_time:461093ms step_avg:93.32ms +[2025-08-22 18:50:26] [Rank 0] step:4941/10000 train_time:461093ms step_avg:93.32ms +[2025-08-22 18:50:28] [Rank 0] step:4961/10000 train_time:463012ms step_avg:93.33ms +[2025-08-22 18:50:28] [Rank 0] step:4961/10000 train_time:463012ms step_avg:93.33ms +[2025-08-22 18:50:30] [Rank 0] step:4981/10000 train_time:464934ms step_avg:93.34ms +[2025-08-22 18:50:30] [Rank 0] step:4981/10000 train_time:464934ms step_avg:93.34ms +[2025-08-22 18:50:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:50:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:50:46] [Rank 0] PRINT: step:5000/10000 val_loss:3.7459 svd_entropy: attn_qk:H=0.7402,top10E=0.31,eRank=157.9,q75/q25=63.01 attn_vo:H=0.8103,top10E=0.13,eRank=297.5,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=622.8,q75/q25=3.04 mlp_w2:H=0.9633,top10E=0.05,eRank=602.1,q75/q25=3.16 vo_prod:H=0.6629,top10E=0.26,eRank=119.2,q75/q25=inf train_time:466861ms step_avg:93.37ms +[2025-08-22 18:50:46] [Rank 0] PRINT: step:5000/10000 val_loss:3.7459 svd_entropy: attn_qk:H=0.7402,top10E=0.31,eRank=157.9,q75/q25=63.01 attn_vo:H=0.8103,top10E=0.13,eRank=297.5,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=622.8,q75/q25=3.04 mlp_w2:H=0.9633,top10E=0.05,eRank=602.1,q75/q25=3.16 vo_prod:H=0.6629,top10E=0.26,eRank=119.2,q75/q25=inf train_time:466861ms step_avg:93.37ms +[2025-08-22 18:50:46] [Rank 0] step:5001/10000 train_time:466881ms step_avg:93.36ms +[2025-08-22 18:50:46] [Rank 0] step:5001/10000 train_time:466881ms step_avg:93.36ms +[2025-08-22 18:50:48] [Rank 0] step:5021/10000 train_time:468786ms step_avg:93.37ms +[2025-08-22 18:50:48] [Rank 0] step:5021/10000 train_time:468786ms step_avg:93.37ms +[2025-08-22 18:50:50] [Rank 0] step:5041/10000 train_time:470704ms step_avg:93.38ms +[2025-08-22 18:50:50] [Rank 0] step:5041/10000 train_time:470704ms step_avg:93.38ms +[2025-08-22 18:50:51] [Rank 0] step:5061/10000 train_time:472620ms step_avg:93.38ms +[2025-08-22 18:50:51] [Rank 0] step:5061/10000 train_time:472620ms step_avg:93.38ms +[2025-08-22 18:50:53] [Rank 0] step:5081/10000 train_time:474540ms step_avg:93.39ms +[2025-08-22 18:50:53] [Rank 0] step:5081/10000 train_time:474540ms step_avg:93.39ms +[2025-08-22 18:50:55] [Rank 0] step:5101/10000 train_time:476516ms step_avg:93.42ms +[2025-08-22 18:50:55] [Rank 0] step:5101/10000 train_time:476516ms step_avg:93.42ms +[2025-08-22 18:50:57] [Rank 0] step:5121/10000 train_time:478500ms step_avg:93.44ms +[2025-08-22 18:50:57] [Rank 0] step:5121/10000 train_time:478500ms step_avg:93.44ms +[2025-08-22 18:50:59] [Rank 0] step:5141/10000 train_time:480421ms step_avg:93.45ms +[2025-08-22 18:50:59] [Rank 0] step:5141/10000 train_time:480421ms step_avg:93.45ms +[2025-08-22 18:51:01] [Rank 0] step:5161/10000 train_time:482340ms step_avg:93.46ms +[2025-08-22 18:51:01] [Rank 0] step:5161/10000 train_time:482340ms step_avg:93.46ms +[2025-08-22 18:51:03] [Rank 0] step:5181/10000 train_time:484264ms step_avg:93.47ms +[2025-08-22 18:51:03] [Rank 0] step:5181/10000 train_time:484264ms step_avg:93.47ms +[2025-08-22 18:51:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:51:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:51:19] [Rank 0] PRINT: step:5200/10000 val_loss:3.7330 svd_entropy: attn_qk:H=0.7414,top10E=0.30,eRank=158.9,q75/q25=62.42 attn_vo:H=0.8111,top10E=0.13,eRank=298.6,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=623.0,q75/q25=3.04 mlp_w2:H=0.9633,top10E=0.05,eRank=602.2,q75/q25=3.16 vo_prod:H=0.6644,top10E=0.25,eRank=120.3,q75/q25=inf train_time:486216ms step_avg:93.50ms +[2025-08-22 18:51:19] [Rank 0] PRINT: step:5200/10000 val_loss:3.7330 svd_entropy: attn_qk:H=0.7414,top10E=0.30,eRank=158.9,q75/q25=62.42 attn_vo:H=0.8111,top10E=0.13,eRank=298.6,q75/q25=inf mlp_w1:H=0.9684,top10E=0.04,eRank=623.0,q75/q25=3.04 mlp_w2:H=0.9633,top10E=0.05,eRank=602.2,q75/q25=3.16 vo_prod:H=0.6644,top10E=0.25,eRank=120.3,q75/q25=inf train_time:486216ms step_avg:93.50ms +[2025-08-22 18:51:19] [Rank 0] step:5201/10000 train_time:486237ms step_avg:93.49ms +[2025-08-22 18:51:19] [Rank 0] step:5201/10000 train_time:486237ms step_avg:93.49ms +[2025-08-22 18:51:21] [Rank 0] step:5221/10000 train_time:488189ms step_avg:93.50ms +[2025-08-22 18:51:21] [Rank 0] step:5221/10000 train_time:488189ms step_avg:93.50ms +[2025-08-22 18:51:23] [Rank 0] step:5241/10000 train_time:490142ms step_avg:93.52ms +[2025-08-22 18:51:23] [Rank 0] step:5241/10000 train_time:490142ms step_avg:93.52ms +[2025-08-22 18:51:25] [Rank 0] step:5261/10000 train_time:492094ms step_avg:93.54ms +[2025-08-22 18:51:25] [Rank 0] step:5261/10000 train_time:492094ms step_avg:93.54ms +[2025-08-22 18:51:27] [Rank 0] step:5281/10000 train_time:494048ms step_avg:93.55ms +[2025-08-22 18:51:27] [Rank 0] step:5281/10000 train_time:494048ms step_avg:93.55ms +[2025-08-22 18:51:29] [Rank 0] step:5301/10000 train_time:496014ms step_avg:93.57ms +[2025-08-22 18:51:29] [Rank 0] step:5301/10000 train_time:496014ms step_avg:93.57ms +[2025-08-22 18:51:31] [Rank 0] step:5321/10000 train_time:497970ms step_avg:93.59ms +[2025-08-22 18:51:31] [Rank 0] step:5321/10000 train_time:497970ms step_avg:93.59ms +[2025-08-22 18:51:33] [Rank 0] step:5341/10000 train_time:499925ms step_avg:93.60ms +[2025-08-22 18:51:33] [Rank 0] step:5341/10000 train_time:499925ms step_avg:93.60ms +[2025-08-22 18:51:35] [Rank 0] step:5361/10000 train_time:501883ms step_avg:93.62ms +[2025-08-22 18:51:35] [Rank 0] step:5361/10000 train_time:501883ms step_avg:93.62ms +[2025-08-22 18:51:37] [Rank 0] step:5381/10000 train_time:503840ms step_avg:93.63ms +[2025-08-22 18:51:37] [Rank 0] step:5381/10000 train_time:503840ms step_avg:93.63ms +[2025-08-22 18:51:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:51:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:51:52] [Rank 0] PRINT: step:5400/10000 val_loss:3.7205 svd_entropy: attn_qk:H=0.7425,top10E=0.30,eRank=159.7,q75/q25=61.87 attn_vo:H=0.8117,top10E=0.13,eRank=299.6,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.2,q75/q25=3.03 mlp_w2:H=0.9633,top10E=0.05,eRank=602.3,q75/q25=3.16 vo_prod:H=0.6656,top10E=0.25,eRank=121.3,q75/q25=inf train_time:505801ms step_avg:93.67ms +[2025-08-22 18:51:52] [Rank 0] PRINT: step:5400/10000 val_loss:3.7205 svd_entropy: attn_qk:H=0.7425,top10E=0.30,eRank=159.7,q75/q25=61.87 attn_vo:H=0.8117,top10E=0.13,eRank=299.6,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.2,q75/q25=3.03 mlp_w2:H=0.9633,top10E=0.05,eRank=602.3,q75/q25=3.16 vo_prod:H=0.6656,top10E=0.25,eRank=121.3,q75/q25=inf train_time:505801ms step_avg:93.67ms +[2025-08-22 18:51:52] [Rank 0] step:5401/10000 train_time:505820ms step_avg:93.65ms +[2025-08-22 18:51:52] [Rank 0] step:5401/10000 train_time:505820ms step_avg:93.65ms +[2025-08-22 18:51:54] [Rank 0] step:5421/10000 train_time:507772ms step_avg:93.67ms +[2025-08-22 18:51:54] [Rank 0] step:5421/10000 train_time:507772ms step_avg:93.67ms +[2025-08-22 18:51:56] [Rank 0] step:5441/10000 train_time:509717ms step_avg:93.68ms +[2025-08-22 18:51:56] [Rank 0] step:5441/10000 train_time:509717ms step_avg:93.68ms +[2025-08-22 18:51:58] [Rank 0] step:5461/10000 train_time:511724ms step_avg:93.71ms +[2025-08-22 18:51:58] [Rank 0] step:5461/10000 train_time:511724ms step_avg:93.71ms +[2025-08-22 18:52:00] [Rank 0] step:5481/10000 train_time:513742ms step_avg:93.73ms +[2025-08-22 18:52:00] [Rank 0] step:5481/10000 train_time:513742ms step_avg:93.73ms +[2025-08-22 18:52:02] [Rank 0] step:5501/10000 train_time:515701ms step_avg:93.75ms +[2025-08-22 18:52:02] [Rank 0] step:5501/10000 train_time:515701ms step_avg:93.75ms +[2025-08-22 18:52:04] [Rank 0] step:5521/10000 train_time:517659ms step_avg:93.76ms +[2025-08-22 18:52:04] [Rank 0] step:5521/10000 train_time:517659ms step_avg:93.76ms +[2025-08-22 18:52:06] [Rank 0] step:5541/10000 train_time:519613ms step_avg:93.78ms +[2025-08-22 18:52:06] [Rank 0] step:5541/10000 train_time:519613ms step_avg:93.78ms +[2025-08-22 18:52:08] [Rank 0] step:5561/10000 train_time:521566ms step_avg:93.79ms +[2025-08-22 18:52:08] [Rank 0] step:5561/10000 train_time:521566ms step_avg:93.79ms +[2025-08-22 18:52:10] [Rank 0] step:5581/10000 train_time:523516ms step_avg:93.80ms +[2025-08-22 18:52:10] [Rank 0] step:5581/10000 train_time:523516ms step_avg:93.80ms +[2025-08-22 18:52:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:52:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:52:26] [Rank 0] PRINT: step:5600/10000 val_loss:3.7125 svd_entropy: attn_qk:H=0.7435,top10E=0.30,eRank=160.6,q75/q25=61.86 attn_vo:H=0.8124,top10E=0.13,eRank=300.5,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.3,q75/q25=3.03 mlp_w2:H=0.9633,top10E=0.05,eRank=602.3,q75/q25=3.15 vo_prod:H=0.6667,top10E=0.25,eRank=122.1,q75/q25=inf train_time:525479ms step_avg:93.84ms +[2025-08-22 18:52:26] [Rank 0] PRINT: step:5600/10000 val_loss:3.7125 svd_entropy: attn_qk:H=0.7435,top10E=0.30,eRank=160.6,q75/q25=61.86 attn_vo:H=0.8124,top10E=0.13,eRank=300.5,q75/q25=inf mlp_w1:H=0.9685,top10E=0.04,eRank=623.3,q75/q25=3.03 mlp_w2:H=0.9633,top10E=0.05,eRank=602.3,q75/q25=3.15 vo_prod:H=0.6667,top10E=0.25,eRank=122.1,q75/q25=inf train_time:525479ms step_avg:93.84ms +[2025-08-22 18:52:26] [Rank 0] step:5601/10000 train_time:525500ms step_avg:93.82ms +[2025-08-22 18:52:26] [Rank 0] step:5601/10000 train_time:525500ms step_avg:93.82ms +[2025-08-22 18:52:28] [Rank 0] step:5621/10000 train_time:527435ms step_avg:93.83ms +[2025-08-22 18:52:28] [Rank 0] step:5621/10000 train_time:527435ms step_avg:93.83ms +[2025-08-22 18:52:30] [Rank 0] step:5641/10000 train_time:529383ms step_avg:93.85ms +[2025-08-22 18:52:30] [Rank 0] step:5641/10000 train_time:529383ms step_avg:93.85ms +[2025-08-22 18:52:32] [Rank 0] step:5661/10000 train_time:531328ms step_avg:93.86ms +[2025-08-22 18:52:32] [Rank 0] step:5661/10000 train_time:531328ms step_avg:93.86ms +[2025-08-22 18:52:33] [Rank 0] step:5681/10000 train_time:533279ms step_avg:93.87ms +[2025-08-22 18:52:33] [Rank 0] step:5681/10000 train_time:533279ms step_avg:93.87ms +[2025-08-22 18:52:35] [Rank 0] step:5701/10000 train_time:535229ms step_avg:93.88ms +[2025-08-22 18:52:35] [Rank 0] step:5701/10000 train_time:535229ms step_avg:93.88ms +[2025-08-22 18:52:37] [Rank 0] step:5721/10000 train_time:537184ms step_avg:93.90ms +[2025-08-22 18:52:37] [Rank 0] step:5721/10000 train_time:537184ms step_avg:93.90ms +[2025-08-22 18:52:39] [Rank 0] step:5741/10000 train_time:539132ms step_avg:93.91ms +[2025-08-22 18:52:39] [Rank 0] step:5741/10000 train_time:539132ms step_avg:93.91ms +[2025-08-22 18:52:41] [Rank 0] step:5761/10000 train_time:541086ms step_avg:93.92ms +[2025-08-22 18:52:41] [Rank 0] step:5761/10000 train_time:541086ms step_avg:93.92ms +[2025-08-22 18:52:43] [Rank 0] step:5781/10000 train_time:543037ms step_avg:93.93ms +[2025-08-22 18:52:43] [Rank 0] step:5781/10000 train_time:543037ms step_avg:93.93ms +[2025-08-22 18:52:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:52:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:52:59] [Rank 0] PRINT: step:5800/10000 val_loss:3.7116 svd_entropy: attn_qk:H=0.7445,top10E=0.30,eRank=161.4,q75/q25=61.73 attn_vo:H=0.8129,top10E=0.13,eRank=301.2,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.5,q75/q25=3.03 mlp_w2:H=0.9633,top10E=0.05,eRank=602.4,q75/q25=3.15 vo_prod:H=0.6676,top10E=0.25,eRank=122.8,q75/q25=inf train_time:544999ms step_avg:93.97ms +[2025-08-22 18:52:59] [Rank 0] PRINT: step:5800/10000 val_loss:3.7116 svd_entropy: attn_qk:H=0.7445,top10E=0.30,eRank=161.4,q75/q25=61.73 attn_vo:H=0.8129,top10E=0.13,eRank=301.2,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.5,q75/q25=3.03 mlp_w2:H=0.9633,top10E=0.05,eRank=602.4,q75/q25=3.15 vo_prod:H=0.6676,top10E=0.25,eRank=122.8,q75/q25=inf train_time:544999ms step_avg:93.97ms +[2025-08-22 18:52:59] [Rank 0] step:5801/10000 train_time:545020ms step_avg:93.95ms +[2025-08-22 18:52:59] [Rank 0] step:5801/10000 train_time:545020ms step_avg:93.95ms +[2025-08-22 18:53:01] [Rank 0] step:5821/10000 train_time:547024ms step_avg:93.97ms +[2025-08-22 18:53:01] [Rank 0] step:5821/10000 train_time:547024ms step_avg:93.97ms +[2025-08-22 18:53:03] [Rank 0] step:5841/10000 train_time:549058ms step_avg:94.00ms +[2025-08-22 18:53:03] [Rank 0] step:5841/10000 train_time:549058ms step_avg:94.00ms +[2025-08-22 18:53:05] [Rank 0] step:5861/10000 train_time:551011ms step_avg:94.01ms +[2025-08-22 18:53:05] [Rank 0] step:5861/10000 train_time:551011ms step_avg:94.01ms +[2025-08-22 18:53:07] [Rank 0] step:5881/10000 train_time:552961ms step_avg:94.03ms +[2025-08-22 18:53:07] [Rank 0] step:5881/10000 train_time:552961ms step_avg:94.03ms +[2025-08-22 18:53:09] [Rank 0] step:5901/10000 train_time:554912ms step_avg:94.04ms +[2025-08-22 18:53:09] [Rank 0] step:5901/10000 train_time:554912ms step_avg:94.04ms +[2025-08-22 18:53:11] [Rank 0] step:5921/10000 train_time:556863ms step_avg:94.05ms +[2025-08-22 18:53:11] [Rank 0] step:5921/10000 train_time:556863ms step_avg:94.05ms +[2025-08-22 18:53:13] [Rank 0] step:5941/10000 train_time:558819ms step_avg:94.06ms +[2025-08-22 18:53:13] [Rank 0] step:5941/10000 train_time:558819ms step_avg:94.06ms +[2025-08-22 18:53:15] [Rank 0] step:5961/10000 train_time:560776ms step_avg:94.07ms +[2025-08-22 18:53:15] [Rank 0] step:5961/10000 train_time:560776ms step_avg:94.07ms +[2025-08-22 18:53:17] [Rank 0] step:5981/10000 train_time:562733ms step_avg:94.09ms +[2025-08-22 18:53:17] [Rank 0] step:5981/10000 train_time:562733ms step_avg:94.09ms +[2025-08-22 18:53:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:53:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:53:32] [Rank 0] PRINT: step:6000/10000 val_loss:3.6897 svd_entropy: attn_qk:H=0.7454,top10E=0.30,eRank=162.2,q75/q25=61.34 attn_vo:H=0.8136,top10E=0.13,eRank=302.3,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.6,q75/q25=3.02 mlp_w2:H=0.9633,top10E=0.05,eRank=602.4,q75/q25=3.14 vo_prod:H=0.6689,top10E=0.25,eRank=123.8,q75/q25=inf train_time:564693ms step_avg:94.12ms +[2025-08-22 18:53:32] [Rank 0] PRINT: step:6000/10000 val_loss:3.6897 svd_entropy: attn_qk:H=0.7454,top10E=0.30,eRank=162.2,q75/q25=61.34 attn_vo:H=0.8136,top10E=0.13,eRank=302.3,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.6,q75/q25=3.02 mlp_w2:H=0.9633,top10E=0.05,eRank=602.4,q75/q25=3.14 vo_prod:H=0.6689,top10E=0.25,eRank=123.8,q75/q25=inf train_time:564693ms step_avg:94.12ms +[2025-08-22 18:53:32] [Rank 0] step:6001/10000 train_time:564713ms step_avg:94.10ms +[2025-08-22 18:53:32] [Rank 0] step:6001/10000 train_time:564713ms step_avg:94.10ms +[2025-08-22 18:53:34] [Rank 0] step:6021/10000 train_time:566678ms step_avg:94.12ms +[2025-08-22 18:53:34] [Rank 0] step:6021/10000 train_time:566678ms step_avg:94.12ms +[2025-08-22 18:53:36] [Rank 0] step:6041/10000 train_time:568636ms step_avg:94.13ms +[2025-08-22 18:53:36] [Rank 0] step:6041/10000 train_time:568636ms step_avg:94.13ms +[2025-08-22 18:53:38] [Rank 0] step:6061/10000 train_time:570599ms step_avg:94.14ms +[2025-08-22 18:53:38] [Rank 0] step:6061/10000 train_time:570599ms step_avg:94.14ms +[2025-08-22 18:53:40] [Rank 0] step:6081/10000 train_time:572557ms step_avg:94.16ms +[2025-08-22 18:53:40] [Rank 0] step:6081/10000 train_time:572557ms step_avg:94.16ms +[2025-08-22 18:53:42] [Rank 0] step:6101/10000 train_time:574525ms step_avg:94.17ms +[2025-08-22 18:53:42] [Rank 0] step:6101/10000 train_time:574525ms step_avg:94.17ms +[2025-08-22 18:53:44] [Rank 0] step:6121/10000 train_time:576561ms step_avg:94.19ms +[2025-08-22 18:53:44] [Rank 0] step:6121/10000 train_time:576561ms step_avg:94.19ms +[2025-08-22 18:53:46] [Rank 0] step:6141/10000 train_time:578532ms step_avg:94.21ms +[2025-08-22 18:53:46] [Rank 0] step:6141/10000 train_time:578532ms step_avg:94.21ms +[2025-08-22 18:53:48] [Rank 0] step:6161/10000 train_time:580493ms step_avg:94.22ms +[2025-08-22 18:53:48] [Rank 0] step:6161/10000 train_time:580493ms step_avg:94.22ms +[2025-08-22 18:53:50] [Rank 0] step:6181/10000 train_time:582454ms step_avg:94.23ms +[2025-08-22 18:53:50] [Rank 0] step:6181/10000 train_time:582454ms step_avg:94.23ms +[2025-08-22 18:53:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:53:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:54:06] [Rank 0] PRINT: step:6200/10000 val_loss:3.6771 svd_entropy: attn_qk:H=0.7465,top10E=0.30,eRank=163.0,q75/q25=61.01 attn_vo:H=0.8142,top10E=0.13,eRank=303.3,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.8,q75/q25=3.02 mlp_w2:H=0.9633,top10E=0.05,eRank=602.5,q75/q25=3.14 vo_prod:H=0.6701,top10E=0.25,eRank=124.9,q75/q25=inf train_time:584423ms step_avg:94.26ms +[2025-08-22 18:54:06] [Rank 0] PRINT: step:6200/10000 val_loss:3.6771 svd_entropy: attn_qk:H=0.7465,top10E=0.30,eRank=163.0,q75/q25=61.01 attn_vo:H=0.8142,top10E=0.13,eRank=303.3,q75/q25=inf mlp_w1:H=0.9686,top10E=0.04,eRank=623.8,q75/q25=3.02 mlp_w2:H=0.9633,top10E=0.05,eRank=602.5,q75/q25=3.14 vo_prod:H=0.6701,top10E=0.25,eRank=124.9,q75/q25=inf train_time:584423ms step_avg:94.26ms +[2025-08-22 18:54:06] [Rank 0] step:6201/10000 train_time:584443ms step_avg:94.25ms +[2025-08-22 18:54:06] [Rank 0] step:6201/10000 train_time:584443ms step_avg:94.25ms +[2025-08-22 18:54:08] [Rank 0] step:6221/10000 train_time:586386ms step_avg:94.26ms +[2025-08-22 18:54:08] [Rank 0] step:6221/10000 train_time:586386ms step_avg:94.26ms +[2025-08-22 18:54:10] [Rank 0] step:6241/10000 train_time:588337ms step_avg:94.27ms +[2025-08-22 18:54:10] [Rank 0] step:6241/10000 train_time:588337ms step_avg:94.27ms +[2025-08-22 18:54:12] [Rank 0] step:6261/10000 train_time:590295ms step_avg:94.28ms +[2025-08-22 18:54:12] [Rank 0] step:6261/10000 train_time:590295ms step_avg:94.28ms +[2025-08-22 18:54:14] [Rank 0] step:6281/10000 train_time:592257ms step_avg:94.29ms +[2025-08-22 18:54:14] [Rank 0] step:6281/10000 train_time:592257ms step_avg:94.29ms +[2025-08-22 18:54:16] [Rank 0] step:6301/10000 train_time:594217ms step_avg:94.31ms +[2025-08-22 18:54:16] [Rank 0] step:6301/10000 train_time:594217ms step_avg:94.31ms +[2025-08-22 18:54:18] [Rank 0] step:6321/10000 train_time:596180ms step_avg:94.32ms +[2025-08-22 18:54:18] [Rank 0] step:6321/10000 train_time:596180ms step_avg:94.32ms +[2025-08-22 18:54:19] [Rank 0] step:6341/10000 train_time:598142ms step_avg:94.33ms +[2025-08-22 18:54:19] [Rank 0] step:6341/10000 train_time:598142ms step_avg:94.33ms +[2025-08-22 18:54:21] [Rank 0] step:6361/10000 train_time:600109ms step_avg:94.34ms +[2025-08-22 18:54:21] [Rank 0] step:6361/10000 train_time:600109ms step_avg:94.34ms +[2025-08-22 18:54:23] [Rank 0] step:6381/10000 train_time:602073ms step_avg:94.35ms +[2025-08-22 18:54:23] [Rank 0] step:6381/10000 train_time:602073ms step_avg:94.35ms +[2025-08-22 18:54:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:54:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:54:39] [Rank 0] PRINT: step:6400/10000 val_loss:3.6637 svd_entropy: attn_qk:H=0.7474,top10E=0.30,eRank=163.8,q75/q25=60.75 attn_vo:H=0.8147,top10E=0.13,eRank=304.1,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=623.9,q75/q25=3.01 mlp_w2:H=0.9633,top10E=0.05,eRank=602.6,q75/q25=3.14 vo_prod:H=0.6709,top10E=0.24,eRank=125.5,q75/q25=inf train_time:604039ms step_avg:94.38ms +[2025-08-22 18:54:39] [Rank 0] PRINT: step:6400/10000 val_loss:3.6637 svd_entropy: attn_qk:H=0.7474,top10E=0.30,eRank=163.8,q75/q25=60.75 attn_vo:H=0.8147,top10E=0.13,eRank=304.1,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=623.9,q75/q25=3.01 mlp_w2:H=0.9633,top10E=0.05,eRank=602.6,q75/q25=3.14 vo_prod:H=0.6709,top10E=0.24,eRank=125.5,q75/q25=inf train_time:604039ms step_avg:94.38ms +[2025-08-22 18:54:39] [Rank 0] step:6401/10000 train_time:604060ms step_avg:94.37ms +[2025-08-22 18:54:39] [Rank 0] step:6401/10000 train_time:604060ms step_avg:94.37ms +[2025-08-22 18:54:41] [Rank 0] step:6421/10000 train_time:606017ms step_avg:94.38ms +[2025-08-22 18:54:41] [Rank 0] step:6421/10000 train_time:606017ms step_avg:94.38ms +[2025-08-22 18:54:43] [Rank 0] step:6441/10000 train_time:607970ms step_avg:94.39ms +[2025-08-22 18:54:43] [Rank 0] step:6441/10000 train_time:607970ms step_avg:94.39ms +[2025-08-22 18:54:45] [Rank 0] step:6461/10000 train_time:609930ms step_avg:94.40ms +[2025-08-22 18:54:45] [Rank 0] step:6461/10000 train_time:609930ms step_avg:94.40ms +[2025-08-22 18:54:47] [Rank 0] step:6481/10000 train_time:611893ms step_avg:94.41ms +[2025-08-22 18:54:47] [Rank 0] step:6481/10000 train_time:611893ms step_avg:94.41ms +[2025-08-22 18:54:49] [Rank 0] step:6501/10000 train_time:613847ms step_avg:94.42ms +[2025-08-22 18:54:49] [Rank 0] step:6501/10000 train_time:613847ms step_avg:94.42ms +[2025-08-22 18:54:51] [Rank 0] step:6521/10000 train_time:615801ms step_avg:94.43ms +[2025-08-22 18:54:51] [Rank 0] step:6521/10000 train_time:615801ms step_avg:94.43ms +[2025-08-22 18:54:53] [Rank 0] step:6541/10000 train_time:617761ms step_avg:94.44ms +[2025-08-22 18:54:53] [Rank 0] step:6541/10000 train_time:617761ms step_avg:94.44ms +[2025-08-22 18:54:55] [Rank 0] step:6561/10000 train_time:619721ms step_avg:94.46ms +[2025-08-22 18:54:55] [Rank 0] step:6561/10000 train_time:619721ms step_avg:94.46ms +[2025-08-22 18:54:57] [Rank 0] step:6581/10000 train_time:621675ms step_avg:94.47ms +[2025-08-22 18:54:57] [Rank 0] step:6581/10000 train_time:621675ms step_avg:94.47ms +[2025-08-22 18:54:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:54:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:55:12] [Rank 0] PRINT: step:6600/10000 val_loss:3.6514 svd_entropy: attn_qk:H=0.7483,top10E=0.29,eRank=164.5,q75/q25=60.45 attn_vo:H=0.8152,top10E=0.13,eRank=304.9,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.0,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.6,q75/q25=3.14 vo_prod:H=0.6718,top10E=0.24,eRank=126.3,q75/q25=inf train_time:623640ms step_avg:94.49ms +[2025-08-22 18:55:12] [Rank 0] PRINT: step:6600/10000 val_loss:3.6514 svd_entropy: attn_qk:H=0.7483,top10E=0.29,eRank=164.5,q75/q25=60.45 attn_vo:H=0.8152,top10E=0.13,eRank=304.9,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.0,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.6,q75/q25=3.14 vo_prod:H=0.6718,top10E=0.24,eRank=126.3,q75/q25=inf train_time:623640ms step_avg:94.49ms +[2025-08-22 18:55:12] [Rank 0] step:6601/10000 train_time:623660ms step_avg:94.48ms +[2025-08-22 18:55:12] [Rank 0] step:6601/10000 train_time:623660ms step_avg:94.48ms +[2025-08-22 18:55:14] [Rank 0] step:6621/10000 train_time:625614ms step_avg:94.49ms +[2025-08-22 18:55:14] [Rank 0] step:6621/10000 train_time:625614ms step_avg:94.49ms +[2025-08-22 18:55:16] [Rank 0] step:6641/10000 train_time:627574ms step_avg:94.50ms +[2025-08-22 18:55:16] [Rank 0] step:6641/10000 train_time:627574ms step_avg:94.50ms +[2025-08-22 18:55:18] [Rank 0] step:6661/10000 train_time:629526ms step_avg:94.51ms +[2025-08-22 18:55:18] [Rank 0] step:6661/10000 train_time:629526ms step_avg:94.51ms +[2025-08-22 18:55:20] [Rank 0] step:6681/10000 train_time:631497ms step_avg:94.52ms +[2025-08-22 18:55:20] [Rank 0] step:6681/10000 train_time:631497ms step_avg:94.52ms +[2025-08-22 18:55:22] [Rank 0] step:6701/10000 train_time:633490ms step_avg:94.54ms +[2025-08-22 18:55:22] [Rank 0] step:6701/10000 train_time:633490ms step_avg:94.54ms +[2025-08-22 18:55:24] [Rank 0] step:6721/10000 train_time:635476ms step_avg:94.55ms +[2025-08-22 18:55:24] [Rank 0] step:6721/10000 train_time:635476ms step_avg:94.55ms +[2025-08-22 18:55:26] [Rank 0] step:6741/10000 train_time:637459ms step_avg:94.56ms +[2025-08-22 18:55:26] [Rank 0] step:6741/10000 train_time:637459ms step_avg:94.56ms +[2025-08-22 18:55:28] [Rank 0] step:6761/10000 train_time:639441ms step_avg:94.58ms +[2025-08-22 18:55:28] [Rank 0] step:6761/10000 train_time:639441ms step_avg:94.58ms +[2025-08-22 18:55:30] [Rank 0] step:6781/10000 train_time:641430ms step_avg:94.59ms +[2025-08-22 18:55:30] [Rank 0] step:6781/10000 train_time:641430ms step_avg:94.59ms +[2025-08-22 18:55:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:55:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:55:46] [Rank 0] PRINT: step:6800/10000 val_loss:3.6357 svd_entropy: attn_qk:H=0.7489,top10E=0.29,eRank=165.0,q75/q25=60.09 attn_vo:H=0.8158,top10E=0.13,eRank=305.7,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.1,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.7,q75/q25=3.13 vo_prod:H=0.6727,top10E=0.24,eRank=127.1,q75/q25=inf train_time:643427ms step_avg:94.62ms +[2025-08-22 18:55:46] [Rank 0] PRINT: step:6800/10000 val_loss:3.6357 svd_entropy: attn_qk:H=0.7489,top10E=0.29,eRank=165.0,q75/q25=60.09 attn_vo:H=0.8158,top10E=0.13,eRank=305.7,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.1,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.7,q75/q25=3.13 vo_prod:H=0.6727,top10E=0.24,eRank=127.1,q75/q25=inf train_time:643427ms step_avg:94.62ms +[2025-08-22 18:55:46] [Rank 0] step:6801/10000 train_time:643448ms step_avg:94.61ms +[2025-08-22 18:55:46] [Rank 0] step:6801/10000 train_time:643448ms step_avg:94.61ms +[2025-08-22 18:55:48] [Rank 0] step:6821/10000 train_time:645431ms step_avg:94.62ms +[2025-08-22 18:55:48] [Rank 0] step:6821/10000 train_time:645431ms step_avg:94.62ms +[2025-08-22 18:55:50] [Rank 0] step:6841/10000 train_time:647417ms step_avg:94.64ms +[2025-08-22 18:55:50] [Rank 0] step:6841/10000 train_time:647417ms step_avg:94.64ms +[2025-08-22 18:55:52] [Rank 0] step:6861/10000 train_time:649398ms step_avg:94.65ms +[2025-08-22 18:55:52] [Rank 0] step:6861/10000 train_time:649398ms step_avg:94.65ms +[2025-08-22 18:55:54] [Rank 0] step:6881/10000 train_time:651386ms step_avg:94.66ms +[2025-08-22 18:55:54] [Rank 0] step:6881/10000 train_time:651386ms step_avg:94.66ms +[2025-08-22 18:55:56] [Rank 0] step:6901/10000 train_time:653368ms step_avg:94.68ms +[2025-08-22 18:55:56] [Rank 0] step:6901/10000 train_time:653368ms step_avg:94.68ms +[2025-08-22 18:55:58] [Rank 0] step:6921/10000 train_time:655351ms step_avg:94.69ms +[2025-08-22 18:55:58] [Rank 0] step:6921/10000 train_time:655351ms step_avg:94.69ms +[2025-08-22 18:56:00] [Rank 0] step:6941/10000 train_time:657344ms step_avg:94.70ms +[2025-08-22 18:56:00] [Rank 0] step:6941/10000 train_time:657344ms step_avg:94.70ms +[2025-08-22 18:56:02] [Rank 0] step:6961/10000 train_time:659348ms step_avg:94.72ms +[2025-08-22 18:56:02] [Rank 0] step:6961/10000 train_time:659348ms step_avg:94.72ms +[2025-08-22 18:56:04] [Rank 0] step:6981/10000 train_time:661342ms step_avg:94.73ms +[2025-08-22 18:56:04] [Rank 0] step:6981/10000 train_time:661342ms step_avg:94.73ms +[2025-08-22 18:56:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:56:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:56:20] [Rank 0] PRINT: step:7000/10000 val_loss:3.6209 svd_entropy: attn_qk:H=0.7496,top10E=0.29,eRank=165.6,q75/q25=59.91 attn_vo:H=0.8163,top10E=0.13,eRank=306.5,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.2,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.8,q75/q25=3.13 vo_prod:H=0.6737,top10E=0.24,eRank=128.0,q75/q25=inf train_time:663340ms step_avg:94.76ms +[2025-08-22 18:56:20] [Rank 0] PRINT: step:7000/10000 val_loss:3.6209 svd_entropy: attn_qk:H=0.7496,top10E=0.29,eRank=165.6,q75/q25=59.91 attn_vo:H=0.8163,top10E=0.13,eRank=306.5,q75/q25=inf mlp_w1:H=0.9687,top10E=0.04,eRank=624.2,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.8,q75/q25=3.13 vo_prod:H=0.6737,top10E=0.24,eRank=128.0,q75/q25=inf train_time:663340ms step_avg:94.76ms +[2025-08-22 18:56:20] [Rank 0] step:7001/10000 train_time:663362ms step_avg:94.75ms +[2025-08-22 18:56:20] [Rank 0] step:7001/10000 train_time:663362ms step_avg:94.75ms +[2025-08-22 18:56:22] [Rank 0] step:7021/10000 train_time:665360ms step_avg:94.77ms +[2025-08-22 18:56:22] [Rank 0] step:7021/10000 train_time:665360ms step_avg:94.77ms +[2025-08-22 18:56:24] [Rank 0] step:7041/10000 train_time:667348ms step_avg:94.78ms +[2025-08-22 18:56:24] [Rank 0] step:7041/10000 train_time:667348ms step_avg:94.78ms +[2025-08-22 18:56:26] [Rank 0] step:7061/10000 train_time:669335ms step_avg:94.79ms +[2025-08-22 18:56:26] [Rank 0] step:7061/10000 train_time:669335ms step_avg:94.79ms +[2025-08-22 18:56:28] [Rank 0] step:7081/10000 train_time:671323ms step_avg:94.81ms +[2025-08-22 18:56:28] [Rank 0] step:7081/10000 train_time:671323ms step_avg:94.81ms +[2025-08-22 18:56:30] [Rank 0] step:7101/10000 train_time:673319ms step_avg:94.82ms +[2025-08-22 18:56:30] [Rank 0] step:7101/10000 train_time:673319ms step_avg:94.82ms +[2025-08-22 18:56:32] [Rank 0] step:7121/10000 train_time:675307ms step_avg:94.83ms +[2025-08-22 18:56:32] [Rank 0] step:7121/10000 train_time:675307ms step_avg:94.83ms +[2025-08-22 18:56:34] [Rank 0] step:7141/10000 train_time:677298ms step_avg:94.85ms +[2025-08-22 18:56:34] [Rank 0] step:7141/10000 train_time:677298ms step_avg:94.85ms +[2025-08-22 18:56:36] [Rank 0] step:7161/10000 train_time:679293ms step_avg:94.86ms +[2025-08-22 18:56:36] [Rank 0] step:7161/10000 train_time:679293ms step_avg:94.86ms +[2025-08-22 18:56:38] [Rank 0] step:7181/10000 train_time:681288ms step_avg:94.87ms +[2025-08-22 18:56:38] [Rank 0] step:7181/10000 train_time:681288ms step_avg:94.87ms +[2025-08-22 18:56:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:56:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:56:54] [Rank 0] PRINT: step:7200/10000 val_loss:3.6090 svd_entropy: attn_qk:H=0.7500,top10E=0.29,eRank=166.0,q75/q25=59.73 attn_vo:H=0.8168,top10E=0.12,eRank=307.3,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.3,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.9,q75/q25=3.13 vo_prod:H=0.6748,top10E=0.24,eRank=129.0,q75/q25=inf train_time:683290ms step_avg:94.90ms +[2025-08-22 18:56:54] [Rank 0] PRINT: step:7200/10000 val_loss:3.6090 svd_entropy: attn_qk:H=0.7500,top10E=0.29,eRank=166.0,q75/q25=59.73 attn_vo:H=0.8168,top10E=0.12,eRank=307.3,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.3,q75/q25=3.01 mlp_w2:H=0.9634,top10E=0.05,eRank=602.9,q75/q25=3.13 vo_prod:H=0.6748,top10E=0.24,eRank=129.0,q75/q25=inf train_time:683290ms step_avg:94.90ms +[2025-08-22 18:56:54] [Rank 0] step:7201/10000 train_time:683309ms step_avg:94.89ms +[2025-08-22 18:56:54] [Rank 0] step:7201/10000 train_time:683309ms step_avg:94.89ms +[2025-08-22 18:56:56] [Rank 0] step:7221/10000 train_time:685291ms step_avg:94.90ms +[2025-08-22 18:56:56] [Rank 0] step:7221/10000 train_time:685291ms step_avg:94.90ms +[2025-08-22 18:56:58] [Rank 0] step:7241/10000 train_time:687276ms step_avg:94.91ms +[2025-08-22 18:56:58] [Rank 0] step:7241/10000 train_time:687276ms step_avg:94.91ms +[2025-08-22 18:57:00] [Rank 0] step:7261/10000 train_time:689257ms step_avg:94.93ms +[2025-08-22 18:57:00] [Rank 0] step:7261/10000 train_time:689257ms step_avg:94.93ms +[2025-08-22 18:57:02] [Rank 0] step:7281/10000 train_time:691251ms step_avg:94.94ms +[2025-08-22 18:57:02] [Rank 0] step:7281/10000 train_time:691251ms step_avg:94.94ms +[2025-08-22 18:57:04] [Rank 0] step:7301/10000 train_time:693238ms step_avg:94.95ms +[2025-08-22 18:57:04] [Rank 0] step:7301/10000 train_time:693238ms step_avg:94.95ms +[2025-08-22 18:57:06] [Rank 0] step:7321/10000 train_time:695240ms step_avg:94.97ms +[2025-08-22 18:57:06] [Rank 0] step:7321/10000 train_time:695240ms step_avg:94.97ms +[2025-08-22 18:57:08] [Rank 0] step:7341/10000 train_time:697227ms step_avg:94.98ms +[2025-08-22 18:57:08] [Rank 0] step:7341/10000 train_time:697227ms step_avg:94.98ms +[2025-08-22 18:57:10] [Rank 0] step:7361/10000 train_time:699223ms step_avg:94.99ms +[2025-08-22 18:57:10] [Rank 0] step:7361/10000 train_time:699223ms step_avg:94.99ms +[2025-08-22 18:57:12] [Rank 0] step:7381/10000 train_time:701217ms step_avg:95.00ms +[2025-08-22 18:57:12] [Rank 0] step:7381/10000 train_time:701217ms step_avg:95.00ms +[2025-08-22 18:57:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:57:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:57:28] [Rank 0] PRINT: step:7400/10000 val_loss:3.5924 svd_entropy: attn_qk:H=0.7505,top10E=0.29,eRank=166.4,q75/q25=59.47 attn_vo:H=0.8173,top10E=0.12,eRank=308.1,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.4,q75/q25=3.00 mlp_w2:H=0.9635,top10E=0.05,eRank=603.0,q75/q25=3.12 vo_prod:H=0.6758,top10E=0.24,eRank=130.0,q75/q25=inf train_time:703255ms step_avg:95.03ms +[2025-08-22 18:57:28] [Rank 0] PRINT: step:7400/10000 val_loss:3.5924 svd_entropy: attn_qk:H=0.7505,top10E=0.29,eRank=166.4,q75/q25=59.47 attn_vo:H=0.8173,top10E=0.12,eRank=308.1,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.4,q75/q25=3.00 mlp_w2:H=0.9635,top10E=0.05,eRank=603.0,q75/q25=3.12 vo_prod:H=0.6758,top10E=0.24,eRank=130.0,q75/q25=inf train_time:703255ms step_avg:95.03ms +[2025-08-22 18:57:28] [Rank 0] step:7401/10000 train_time:703276ms step_avg:95.02ms +[2025-08-22 18:57:28] [Rank 0] step:7401/10000 train_time:703276ms step_avg:95.02ms +[2025-08-22 18:57:30] [Rank 0] step:7421/10000 train_time:705266ms step_avg:95.04ms +[2025-08-22 18:57:30] [Rank 0] step:7421/10000 train_time:705266ms step_avg:95.04ms +[2025-08-22 18:57:32] [Rank 0] step:7441/10000 train_time:707246ms step_avg:95.05ms +[2025-08-22 18:57:32] [Rank 0] step:7441/10000 train_time:707246ms step_avg:95.05ms +[2025-08-22 18:57:34] [Rank 0] step:7461/10000 train_time:709231ms step_avg:95.06ms +[2025-08-22 18:57:34] [Rank 0] step:7461/10000 train_time:709231ms step_avg:95.06ms +[2025-08-22 18:57:36] [Rank 0] step:7481/10000 train_time:711225ms step_avg:95.07ms +[2025-08-22 18:57:36] [Rank 0] step:7481/10000 train_time:711225ms step_avg:95.07ms +[2025-08-22 18:57:38] [Rank 0] step:7501/10000 train_time:713216ms step_avg:95.08ms +[2025-08-22 18:57:38] [Rank 0] step:7501/10000 train_time:713216ms step_avg:95.08ms +[2025-08-22 18:57:40] [Rank 0] step:7521/10000 train_time:715208ms step_avg:95.09ms +[2025-08-22 18:57:40] [Rank 0] step:7521/10000 train_time:715208ms step_avg:95.09ms +[2025-08-22 18:57:42] [Rank 0] step:7541/10000 train_time:717209ms step_avg:95.11ms +[2025-08-22 18:57:42] [Rank 0] step:7541/10000 train_time:717209ms step_avg:95.11ms +[2025-08-22 18:57:44] [Rank 0] step:7561/10000 train_time:719194ms step_avg:95.12ms +[2025-08-22 18:57:44] [Rank 0] step:7561/10000 train_time:719194ms step_avg:95.12ms +[2025-08-22 18:57:46] [Rank 0] step:7581/10000 train_time:721195ms step_avg:95.13ms +[2025-08-22 18:57:46] [Rank 0] step:7581/10000 train_time:721195ms step_avg:95.13ms +[2025-08-22 18:57:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:57:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:58:01] [Rank 0] PRINT: step:7600/10000 val_loss:3.5825 svd_entropy: attn_qk:H=0.7510,top10E=0.29,eRank=166.8,q75/q25=59.36 attn_vo:H=0.8177,top10E=0.12,eRank=308.6,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.5,q75/q25=3.00 mlp_w2:H=0.9635,top10E=0.05,eRank=603.1,q75/q25=3.12 vo_prod:H=0.6763,top10E=0.24,eRank=130.5,q75/q25=inf train_time:723201ms step_avg:95.16ms +[2025-08-22 18:58:01] [Rank 0] PRINT: step:7600/10000 val_loss:3.5825 svd_entropy: attn_qk:H=0.7510,top10E=0.29,eRank=166.8,q75/q25=59.36 attn_vo:H=0.8177,top10E=0.12,eRank=308.6,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.5,q75/q25=3.00 mlp_w2:H=0.9635,top10E=0.05,eRank=603.1,q75/q25=3.12 vo_prod:H=0.6763,top10E=0.24,eRank=130.5,q75/q25=inf train_time:723201ms step_avg:95.16ms +[2025-08-22 18:58:02] [Rank 0] step:7601/10000 train_time:723221ms step_avg:95.15ms +[2025-08-22 18:58:02] [Rank 0] step:7601/10000 train_time:723221ms step_avg:95.15ms +[2025-08-22 18:58:04] [Rank 0] step:7621/10000 train_time:725213ms step_avg:95.16ms +[2025-08-22 18:58:04] [Rank 0] step:7621/10000 train_time:725213ms step_avg:95.16ms +[2025-08-22 18:58:06] [Rank 0] step:7641/10000 train_time:727197ms step_avg:95.17ms +[2025-08-22 18:58:06] [Rank 0] step:7641/10000 train_time:727197ms step_avg:95.17ms +[2025-08-22 18:58:08] [Rank 0] step:7661/10000 train_time:729187ms step_avg:95.18ms +[2025-08-22 18:58:08] [Rank 0] step:7661/10000 train_time:729187ms step_avg:95.18ms +[2025-08-22 18:58:10] [Rank 0] step:7681/10000 train_time:731190ms step_avg:95.19ms +[2025-08-22 18:58:10] [Rank 0] step:7681/10000 train_time:731190ms step_avg:95.19ms +[2025-08-22 18:58:12] [Rank 0] step:7701/10000 train_time:733180ms step_avg:95.21ms +[2025-08-22 18:58:12] [Rank 0] step:7701/10000 train_time:733180ms step_avg:95.21ms +[2025-08-22 18:58:14] [Rank 0] step:7721/10000 train_time:735182ms step_avg:95.22ms +[2025-08-22 18:58:14] [Rank 0] step:7721/10000 train_time:735182ms step_avg:95.22ms +[2025-08-22 18:58:16] [Rank 0] step:7741/10000 train_time:737173ms step_avg:95.23ms +[2025-08-22 18:58:16] [Rank 0] step:7741/10000 train_time:737173ms step_avg:95.23ms +[2025-08-22 18:58:18] [Rank 0] step:7761/10000 train_time:739226ms step_avg:95.25ms +[2025-08-22 18:58:18] [Rank 0] step:7761/10000 train_time:739226ms step_avg:95.25ms +[2025-08-22 18:58:20] [Rank 0] step:7781/10000 train_time:741286ms step_avg:95.27ms +[2025-08-22 18:58:20] [Rank 0] step:7781/10000 train_time:741286ms step_avg:95.27ms +[2025-08-22 18:58:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:58:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:58:36] [Rank 0] PRINT: step:7800/10000 val_loss:3.5701 svd_entropy: attn_qk:H=0.7513,top10E=0.29,eRank=167.1,q75/q25=59.25 attn_vo:H=0.8181,top10E=0.12,eRank=309.2,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.6,q75/q25=3.00 mlp_w2:H=0.9635,top10E=0.05,eRank=603.2,q75/q25=3.12 vo_prod:H=0.6771,top10E=0.24,eRank=131.2,q75/q25=inf train_time:743297ms step_avg:95.29ms +[2025-08-22 18:58:36] [Rank 0] PRINT: step:7800/10000 val_loss:3.5701 svd_entropy: attn_qk:H=0.7513,top10E=0.29,eRank=167.1,q75/q25=59.25 attn_vo:H=0.8181,top10E=0.12,eRank=309.2,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.6,q75/q25=3.00 mlp_w2:H=0.9635,top10E=0.05,eRank=603.2,q75/q25=3.12 vo_prod:H=0.6771,top10E=0.24,eRank=131.2,q75/q25=inf train_time:743297ms step_avg:95.29ms +[2025-08-22 18:58:36] [Rank 0] step:7801/10000 train_time:743316ms step_avg:95.28ms +[2025-08-22 18:58:36] [Rank 0] step:7801/10000 train_time:743316ms step_avg:95.28ms +[2025-08-22 18:58:38] [Rank 0] step:7821/10000 train_time:745299ms step_avg:95.29ms +[2025-08-22 18:58:38] [Rank 0] step:7821/10000 train_time:745299ms step_avg:95.29ms +[2025-08-22 18:58:40] [Rank 0] step:7841/10000 train_time:747280ms step_avg:95.30ms +[2025-08-22 18:58:40] [Rank 0] step:7841/10000 train_time:747280ms step_avg:95.30ms +[2025-08-22 18:58:42] [Rank 0] step:7861/10000 train_time:749275ms step_avg:95.32ms +[2025-08-22 18:58:42] [Rank 0] step:7861/10000 train_time:749275ms step_avg:95.32ms +[2025-08-22 18:58:44] [Rank 0] step:7881/10000 train_time:751276ms step_avg:95.33ms +[2025-08-22 18:58:44] [Rank 0] step:7881/10000 train_time:751276ms step_avg:95.33ms +[2025-08-22 18:58:46] [Rank 0] step:7901/10000 train_time:753266ms step_avg:95.34ms +[2025-08-22 18:58:46] [Rank 0] step:7901/10000 train_time:753266ms step_avg:95.34ms +[2025-08-22 18:58:48] [Rank 0] step:7921/10000 train_time:755268ms step_avg:95.35ms +[2025-08-22 18:58:48] [Rank 0] step:7921/10000 train_time:755268ms step_avg:95.35ms +[2025-08-22 18:58:50] [Rank 0] step:7941/10000 train_time:757273ms step_avg:95.36ms +[2025-08-22 18:58:50] [Rank 0] step:7941/10000 train_time:757273ms step_avg:95.36ms +[2025-08-22 18:58:52] [Rank 0] step:7961/10000 train_time:759274ms step_avg:95.37ms +[2025-08-22 18:58:52] [Rank 0] step:7961/10000 train_time:759274ms step_avg:95.37ms +[2025-08-22 18:58:54] [Rank 0] step:7981/10000 train_time:761265ms step_avg:95.38ms +[2025-08-22 18:58:54] [Rank 0] step:7981/10000 train_time:761265ms step_avg:95.38ms +[2025-08-22 18:58:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:58:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:59:09] [Rank 0] PRINT: step:8000/10000 val_loss:3.5532 svd_entropy: attn_qk:H=0.7517,top10E=0.29,eRank=167.5,q75/q25=59.02 attn_vo:H=0.8185,top10E=0.12,eRank=309.8,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.7,q75/q25=2.99 mlp_w2:H=0.9635,top10E=0.05,eRank=603.4,q75/q25=3.12 vo_prod:H=0.6780,top10E=0.23,eRank=132.0,q75/q25=inf train_time:763274ms step_avg:95.41ms +[2025-08-22 18:59:09] [Rank 0] PRINT: step:8000/10000 val_loss:3.5532 svd_entropy: attn_qk:H=0.7517,top10E=0.29,eRank=167.5,q75/q25=59.02 attn_vo:H=0.8185,top10E=0.12,eRank=309.8,q75/q25=inf mlp_w1:H=0.9688,top10E=0.04,eRank=624.7,q75/q25=2.99 mlp_w2:H=0.9635,top10E=0.05,eRank=603.4,q75/q25=3.12 vo_prod:H=0.6780,top10E=0.23,eRank=132.0,q75/q25=inf train_time:763274ms step_avg:95.41ms +[2025-08-22 18:59:10] [Rank 0] step:8001/10000 train_time:763295ms step_avg:95.40ms +[2025-08-22 18:59:10] [Rank 0] step:8001/10000 train_time:763295ms step_avg:95.40ms +[2025-08-22 18:59:12] [Rank 0] step:8021/10000 train_time:765277ms step_avg:95.41ms +[2025-08-22 18:59:12] [Rank 0] step:8021/10000 train_time:765277ms step_avg:95.41ms +[2025-08-22 18:59:14] [Rank 0] step:8041/10000 train_time:767276ms step_avg:95.42ms +[2025-08-22 18:59:14] [Rank 0] step:8041/10000 train_time:767276ms step_avg:95.42ms +[2025-08-22 18:59:16] [Rank 0] step:8061/10000 train_time:769269ms step_avg:95.43ms +[2025-08-22 18:59:16] [Rank 0] step:8061/10000 train_time:769269ms step_avg:95.43ms +[2025-08-22 18:59:18] [Rank 0] step:8081/10000 train_time:771252ms step_avg:95.44ms +[2025-08-22 18:59:18] [Rank 0] step:8081/10000 train_time:771252ms step_avg:95.44ms +[2025-08-22 18:59:20] [Rank 0] step:8101/10000 train_time:773309ms step_avg:95.46ms +[2025-08-22 18:59:20] [Rank 0] step:8101/10000 train_time:773309ms step_avg:95.46ms +[2025-08-22 18:59:22] [Rank 0] step:8121/10000 train_time:775367ms step_avg:95.48ms +[2025-08-22 18:59:22] [Rank 0] step:8121/10000 train_time:775367ms step_avg:95.48ms +[2025-08-22 18:59:24] [Rank 0] step:8141/10000 train_time:777590ms step_avg:95.52ms +[2025-08-22 18:59:24] [Rank 0] step:8141/10000 train_time:777590ms step_avg:95.52ms +[2025-08-22 18:59:26] [Rank 0] step:8161/10000 train_time:779599ms step_avg:95.53ms +[2025-08-22 18:59:26] [Rank 0] step:8161/10000 train_time:779599ms step_avg:95.53ms +[2025-08-22 18:59:28] [Rank 0] step:8181/10000 train_time:781618ms step_avg:95.54ms +[2025-08-22 18:59:28] [Rank 0] step:8181/10000 train_time:781618ms step_avg:95.54ms +[2025-08-22 18:59:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:59:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:59:44] [Rank 0] PRINT: step:8200/10000 val_loss:3.5426 svd_entropy: attn_qk:H=0.7520,top10E=0.29,eRank=167.7,q75/q25=58.64 attn_vo:H=0.8188,top10E=0.12,eRank=310.3,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.5,q75/q25=3.12 vo_prod:H=0.6786,top10E=0.23,eRank=132.6,q75/q25=inf train_time:783663ms step_avg:95.57ms +[2025-08-22 18:59:44] [Rank 0] PRINT: step:8200/10000 val_loss:3.5426 svd_entropy: attn_qk:H=0.7520,top10E=0.29,eRank=167.7,q75/q25=58.64 attn_vo:H=0.8188,top10E=0.12,eRank=310.3,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.5,q75/q25=3.12 vo_prod:H=0.6786,top10E=0.23,eRank=132.6,q75/q25=inf train_time:783663ms step_avg:95.57ms +[2025-08-22 18:59:44] [Rank 0] step:8201/10000 train_time:783683ms step_avg:95.56ms +[2025-08-22 18:59:44] [Rank 0] step:8201/10000 train_time:783683ms step_avg:95.56ms +[2025-08-22 18:59:46] [Rank 0] step:8221/10000 train_time:785704ms step_avg:95.57ms +[2025-08-22 18:59:46] [Rank 0] step:8221/10000 train_time:785704ms step_avg:95.57ms +[2025-08-22 18:59:48] [Rank 0] step:8241/10000 train_time:787725ms step_avg:95.59ms +[2025-08-22 18:59:48] [Rank 0] step:8241/10000 train_time:787725ms step_avg:95.59ms +[2025-08-22 18:59:50] [Rank 0] step:8261/10000 train_time:789749ms step_avg:95.60ms +[2025-08-22 18:59:50] [Rank 0] step:8261/10000 train_time:789749ms step_avg:95.60ms +[2025-08-22 18:59:52] [Rank 0] step:8281/10000 train_time:791765ms step_avg:95.61ms +[2025-08-22 18:59:52] [Rank 0] step:8281/10000 train_time:791765ms step_avg:95.61ms +[2025-08-22 18:59:54] [Rank 0] step:8301/10000 train_time:793782ms step_avg:95.62ms +[2025-08-22 18:59:54] [Rank 0] step:8301/10000 train_time:793782ms step_avg:95.62ms +[2025-08-22 18:59:56] [Rank 0] step:8321/10000 train_time:795798ms step_avg:95.64ms +[2025-08-22 18:59:56] [Rank 0] step:8321/10000 train_time:795798ms step_avg:95.64ms +[2025-08-22 18:59:58] [Rank 0] step:8341/10000 train_time:797825ms step_avg:95.65ms +[2025-08-22 18:59:58] [Rank 0] step:8341/10000 train_time:797825ms step_avg:95.65ms +[2025-08-22 19:00:00] [Rank 0] step:8361/10000 train_time:799843ms step_avg:95.66ms +[2025-08-22 19:00:00] [Rank 0] step:8361/10000 train_time:799843ms step_avg:95.66ms +[2025-08-22 19:00:02] [Rank 0] step:8381/10000 train_time:801862ms step_avg:95.68ms +[2025-08-22 19:00:02] [Rank 0] step:8381/10000 train_time:801862ms step_avg:95.68ms +[2025-08-22 19:00:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:00:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:00:18] [Rank 0] PRINT: step:8400/10000 val_loss:3.5290 svd_entropy: attn_qk:H=0.7523,top10E=0.29,eRank=168.0,q75/q25=58.37 attn_vo:H=0.8191,top10E=0.12,eRank=310.7,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.6,q75/q25=3.12 vo_prod:H=0.6792,top10E=0.23,eRank=133.1,q75/q25=inf train_time:803888ms step_avg:95.70ms +[2025-08-22 19:00:18] [Rank 0] PRINT: step:8400/10000 val_loss:3.5290 svd_entropy: attn_qk:H=0.7523,top10E=0.29,eRank=168.0,q75/q25=58.37 attn_vo:H=0.8191,top10E=0.12,eRank=310.7,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.8,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.6,q75/q25=3.12 vo_prod:H=0.6792,top10E=0.23,eRank=133.1,q75/q25=inf train_time:803888ms step_avg:95.70ms +[2025-08-22 19:00:18] [Rank 0] step:8401/10000 train_time:803908ms step_avg:95.69ms +[2025-08-22 19:00:18] [Rank 0] step:8401/10000 train_time:803908ms step_avg:95.69ms +[2025-08-22 19:00:20] [Rank 0] step:8421/10000 train_time:805907ms step_avg:95.70ms +[2025-08-22 19:00:20] [Rank 0] step:8421/10000 train_time:805907ms step_avg:95.70ms +[2025-08-22 19:00:22] [Rank 0] step:8441/10000 train_time:807925ms step_avg:95.71ms +[2025-08-22 19:00:22] [Rank 0] step:8441/10000 train_time:807925ms step_avg:95.71ms +[2025-08-22 19:00:24] [Rank 0] step:8461/10000 train_time:810002ms step_avg:95.73ms +[2025-08-22 19:00:24] [Rank 0] step:8461/10000 train_time:810002ms step_avg:95.73ms +[2025-08-22 19:00:26] [Rank 0] step:8481/10000 train_time:812037ms step_avg:95.75ms +[2025-08-22 19:00:26] [Rank 0] step:8481/10000 train_time:812037ms step_avg:95.75ms +[2025-08-22 19:00:28] [Rank 0] step:8501/10000 train_time:814079ms step_avg:95.76ms +[2025-08-22 19:00:28] [Rank 0] step:8501/10000 train_time:814079ms step_avg:95.76ms +[2025-08-22 19:00:30] [Rank 0] step:8521/10000 train_time:816101ms step_avg:95.78ms +[2025-08-22 19:00:30] [Rank 0] step:8521/10000 train_time:816101ms step_avg:95.78ms +[2025-08-22 19:00:32] [Rank 0] step:8541/10000 train_time:818138ms step_avg:95.79ms +[2025-08-22 19:00:32] [Rank 0] step:8541/10000 train_time:818138ms step_avg:95.79ms +[2025-08-22 19:00:34] [Rank 0] step:8561/10000 train_time:820168ms step_avg:95.80ms +[2025-08-22 19:00:34] [Rank 0] step:8561/10000 train_time:820168ms step_avg:95.80ms +[2025-08-22 19:00:36] [Rank 0] step:8581/10000 train_time:822191ms step_avg:95.82ms +[2025-08-22 19:00:36] [Rank 0] step:8581/10000 train_time:822191ms step_avg:95.82ms +[2025-08-22 19:00:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:00:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:00:52] [Rank 0] PRINT: step:8600/10000 val_loss:3.5199 svd_entropy: attn_qk:H=0.7525,top10E=0.29,eRank=168.2,q75/q25=58.40 attn_vo:H=0.8194,top10E=0.12,eRank=311.2,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.9,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.7,q75/q25=3.11 vo_prod:H=0.6799,top10E=0.23,eRank=133.8,q75/q25=inf train_time:824217ms step_avg:95.84ms +[2025-08-22 19:00:52] [Rank 0] PRINT: step:8600/10000 val_loss:3.5199 svd_entropy: attn_qk:H=0.7525,top10E=0.29,eRank=168.2,q75/q25=58.40 attn_vo:H=0.8194,top10E=0.12,eRank=311.2,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=624.9,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.7,q75/q25=3.11 vo_prod:H=0.6799,top10E=0.23,eRank=133.8,q75/q25=inf train_time:824217ms step_avg:95.84ms +[2025-08-22 19:00:53] [Rank 0] step:8601/10000 train_time:824237ms step_avg:95.83ms +[2025-08-22 19:00:53] [Rank 0] step:8601/10000 train_time:824237ms step_avg:95.83ms +[2025-08-22 19:00:55] [Rank 0] step:8621/10000 train_time:826280ms step_avg:95.84ms +[2025-08-22 19:00:55] [Rank 0] step:8621/10000 train_time:826280ms step_avg:95.84ms +[2025-08-22 19:00:57] [Rank 0] step:8641/10000 train_time:828297ms step_avg:95.86ms +[2025-08-22 19:00:57] [Rank 0] step:8641/10000 train_time:828297ms step_avg:95.86ms +[2025-08-22 19:00:59] [Rank 0] step:8661/10000 train_time:830321ms step_avg:95.87ms +[2025-08-22 19:00:59] [Rank 0] step:8661/10000 train_time:830321ms step_avg:95.87ms +[2025-08-22 19:01:01] [Rank 0] step:8681/10000 train_time:832348ms step_avg:95.88ms +[2025-08-22 19:01:01] [Rank 0] step:8681/10000 train_time:832348ms step_avg:95.88ms +[2025-08-22 19:01:03] [Rank 0] step:8701/10000 train_time:834364ms step_avg:95.89ms +[2025-08-22 19:01:03] [Rank 0] step:8701/10000 train_time:834364ms step_avg:95.89ms +[2025-08-22 19:01:05] [Rank 0] step:8721/10000 train_time:836394ms step_avg:95.91ms +[2025-08-22 19:01:05] [Rank 0] step:8721/10000 train_time:836394ms step_avg:95.91ms +[2025-08-22 19:01:07] [Rank 0] step:8741/10000 train_time:838412ms step_avg:95.92ms +[2025-08-22 19:01:07] [Rank 0] step:8741/10000 train_time:838412ms step_avg:95.92ms +[2025-08-22 19:01:09] [Rank 0] step:8761/10000 train_time:840438ms step_avg:95.93ms +[2025-08-22 19:01:09] [Rank 0] step:8761/10000 train_time:840438ms step_avg:95.93ms +[2025-08-22 19:01:11] [Rank 0] step:8781/10000 train_time:842469ms step_avg:95.94ms +[2025-08-22 19:01:11] [Rank 0] step:8781/10000 train_time:842469ms step_avg:95.94ms +[2025-08-22 19:01:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:01:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:01:27] [Rank 0] PRINT: step:8800/10000 val_loss:3.5072 svd_entropy: attn_qk:H=0.7527,top10E=0.29,eRank=168.4,q75/q25=58.38 attn_vo:H=0.8196,top10E=0.12,eRank=311.7,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.0,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.8,q75/q25=3.12 vo_prod:H=0.6803,top10E=0.23,eRank=134.2,q75/q25=inf train_time:844504ms step_avg:95.97ms +[2025-08-22 19:01:27] [Rank 0] PRINT: step:8800/10000 val_loss:3.5072 svd_entropy: attn_qk:H=0.7527,top10E=0.29,eRank=168.4,q75/q25=58.38 attn_vo:H=0.8196,top10E=0.12,eRank=311.7,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.0,q75/q25=2.99 mlp_w2:H=0.9636,top10E=0.05,eRank=603.8,q75/q25=3.12 vo_prod:H=0.6803,top10E=0.23,eRank=134.2,q75/q25=inf train_time:844504ms step_avg:95.97ms +[2025-08-22 19:01:27] [Rank 0] step:8801/10000 train_time:844525ms step_avg:95.96ms +[2025-08-22 19:01:27] [Rank 0] step:8801/10000 train_time:844525ms step_avg:95.96ms +[2025-08-22 19:01:29] [Rank 0] step:8821/10000 train_time:846592ms step_avg:95.97ms +[2025-08-22 19:01:29] [Rank 0] step:8821/10000 train_time:846592ms step_avg:95.97ms +[2025-08-22 19:01:31] [Rank 0] step:8841/10000 train_time:848634ms step_avg:95.99ms +[2025-08-22 19:01:31] [Rank 0] step:8841/10000 train_time:848634ms step_avg:95.99ms +[2025-08-22 19:01:33] [Rank 0] step:8861/10000 train_time:850651ms step_avg:96.00ms +[2025-08-22 19:01:33] [Rank 0] step:8861/10000 train_time:850651ms step_avg:96.00ms +[2025-08-22 19:01:35] [Rank 0] step:8881/10000 train_time:852679ms step_avg:96.01ms +[2025-08-22 19:01:35] [Rank 0] step:8881/10000 train_time:852679ms step_avg:96.01ms +[2025-08-22 19:01:37] [Rank 0] step:8901/10000 train_time:854706ms step_avg:96.02ms +[2025-08-22 19:01:37] [Rank 0] step:8901/10000 train_time:854706ms step_avg:96.02ms +[2025-08-22 19:01:39] [Rank 0] step:8921/10000 train_time:856745ms step_avg:96.04ms +[2025-08-22 19:01:39] [Rank 0] step:8921/10000 train_time:856745ms step_avg:96.04ms +[2025-08-22 19:01:41] [Rank 0] step:8941/10000 train_time:858775ms step_avg:96.05ms +[2025-08-22 19:01:41] [Rank 0] step:8941/10000 train_time:858775ms step_avg:96.05ms +[2025-08-22 19:01:43] [Rank 0] step:8961/10000 train_time:860802ms step_avg:96.06ms +[2025-08-22 19:01:43] [Rank 0] step:8961/10000 train_time:860802ms step_avg:96.06ms +[2025-08-22 19:01:45] [Rank 0] step:8981/10000 train_time:862830ms step_avg:96.07ms +[2025-08-22 19:01:45] [Rank 0] step:8981/10000 train_time:862830ms step_avg:96.07ms +[2025-08-22 19:01:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:01:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:02:01] [Rank 0] PRINT: step:9000/10000 val_loss:3.4970 svd_entropy: attn_qk:H=0.7528,top10E=0.29,eRank=168.5,q75/q25=58.28 attn_vo:H=0.8199,top10E=0.12,eRank=312.0,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.0,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=603.9,q75/q25=3.11 vo_prod:H=0.6808,top10E=0.23,eRank=134.6,q75/q25=inf train_time:864862ms step_avg:96.10ms +[2025-08-22 19:02:01] [Rank 0] PRINT: step:9000/10000 val_loss:3.4970 svd_entropy: attn_qk:H=0.7528,top10E=0.29,eRank=168.5,q75/q25=58.28 attn_vo:H=0.8199,top10E=0.12,eRank=312.0,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.0,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=603.9,q75/q25=3.11 vo_prod:H=0.6808,top10E=0.23,eRank=134.6,q75/q25=inf train_time:864862ms step_avg:96.10ms +[2025-08-22 19:02:01] [Rank 0] step:9001/10000 train_time:864882ms step_avg:96.09ms +[2025-08-22 19:02:01] [Rank 0] step:9001/10000 train_time:864882ms step_avg:96.09ms +[2025-08-22 19:02:03] [Rank 0] step:9021/10000 train_time:866907ms step_avg:96.10ms +[2025-08-22 19:02:03] [Rank 0] step:9021/10000 train_time:866907ms step_avg:96.10ms +[2025-08-22 19:02:05] [Rank 0] step:9041/10000 train_time:868932ms step_avg:96.11ms +[2025-08-22 19:02:05] [Rank 0] step:9041/10000 train_time:868932ms step_avg:96.11ms +[2025-08-22 19:02:07] [Rank 0] step:9061/10000 train_time:870961ms step_avg:96.12ms +[2025-08-22 19:02:07] [Rank 0] step:9061/10000 train_time:870961ms step_avg:96.12ms +[2025-08-22 19:02:09] [Rank 0] step:9081/10000 train_time:872990ms step_avg:96.13ms +[2025-08-22 19:02:09] [Rank 0] step:9081/10000 train_time:872990ms step_avg:96.13ms +[2025-08-22 19:02:11] [Rank 0] step:9101/10000 train_time:875028ms step_avg:96.15ms +[2025-08-22 19:02:11] [Rank 0] step:9101/10000 train_time:875028ms step_avg:96.15ms +[2025-08-22 19:02:13] [Rank 0] step:9121/10000 train_time:877054ms step_avg:96.16ms +[2025-08-22 19:02:13] [Rank 0] step:9121/10000 train_time:877054ms step_avg:96.16ms +[2025-08-22 19:02:15] [Rank 0] step:9141/10000 train_time:879065ms step_avg:96.17ms +[2025-08-22 19:02:15] [Rank 0] step:9141/10000 train_time:879065ms step_avg:96.17ms +[2025-08-22 19:02:17] [Rank 0] step:9161/10000 train_time:881082ms step_avg:96.18ms +[2025-08-22 19:02:17] [Rank 0] step:9161/10000 train_time:881082ms step_avg:96.18ms +[2025-08-22 19:02:19] [Rank 0] step:9181/10000 train_time:883139ms step_avg:96.19ms +[2025-08-22 19:02:19] [Rank 0] step:9181/10000 train_time:883139ms step_avg:96.19ms +[2025-08-22 19:02:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:02:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:02:35] [Rank 0] PRINT: step:9200/10000 val_loss:3.4886 svd_entropy: attn_qk:H=0.7530,top10E=0.29,eRank=168.7,q75/q25=58.05 attn_vo:H=0.8201,top10E=0.12,eRank=312.3,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=604.0,q75/q25=3.11 vo_prod:H=0.6812,top10E=0.23,eRank=135.1,q75/q25=inf train_time:885164ms step_avg:96.21ms +[2025-08-22 19:02:35] [Rank 0] PRINT: step:9200/10000 val_loss:3.4886 svd_entropy: attn_qk:H=0.7530,top10E=0.29,eRank=168.7,q75/q25=58.05 attn_vo:H=0.8201,top10E=0.12,eRank=312.3,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=604.0,q75/q25=3.11 vo_prod:H=0.6812,top10E=0.23,eRank=135.1,q75/q25=inf train_time:885164ms step_avg:96.21ms +[2025-08-22 19:02:35] [Rank 0] step:9201/10000 train_time:885185ms step_avg:96.21ms +[2025-08-22 19:02:35] [Rank 0] step:9201/10000 train_time:885185ms step_avg:96.21ms +[2025-08-22 19:02:37] [Rank 0] step:9221/10000 train_time:887203ms step_avg:96.22ms +[2025-08-22 19:02:37] [Rank 0] step:9221/10000 train_time:887203ms step_avg:96.22ms +[2025-08-22 19:02:39] [Rank 0] step:9241/10000 train_time:889236ms step_avg:96.23ms +[2025-08-22 19:02:39] [Rank 0] step:9241/10000 train_time:889236ms step_avg:96.23ms +[2025-08-22 19:02:42] [Rank 0] step:9261/10000 train_time:891263ms step_avg:96.24ms +[2025-08-22 19:02:42] [Rank 0] step:9261/10000 train_time:891263ms step_avg:96.24ms +[2025-08-22 19:02:44] [Rank 0] step:9281/10000 train_time:893274ms step_avg:96.25ms +[2025-08-22 19:02:44] [Rank 0] step:9281/10000 train_time:893274ms step_avg:96.25ms +[2025-08-22 19:02:46] [Rank 0] step:9301/10000 train_time:895290ms step_avg:96.26ms +[2025-08-22 19:02:46] [Rank 0] step:9301/10000 train_time:895290ms step_avg:96.26ms +[2025-08-22 19:02:48] [Rank 0] step:9321/10000 train_time:897313ms step_avg:96.27ms +[2025-08-22 19:02:48] [Rank 0] step:9321/10000 train_time:897313ms step_avg:96.27ms +[2025-08-22 19:02:50] [Rank 0] step:9341/10000 train_time:899336ms step_avg:96.28ms +[2025-08-22 19:02:50] [Rank 0] step:9341/10000 train_time:899336ms step_avg:96.28ms +[2025-08-22 19:02:52] [Rank 0] step:9361/10000 train_time:901364ms step_avg:96.29ms +[2025-08-22 19:02:52] [Rank 0] step:9361/10000 train_time:901364ms step_avg:96.29ms +[2025-08-22 19:02:54] [Rank 0] step:9381/10000 train_time:903400ms step_avg:96.30ms +[2025-08-22 19:02:54] [Rank 0] step:9381/10000 train_time:903400ms step_avg:96.30ms +[2025-08-22 19:02:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:02:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:03:10] [Rank 0] PRINT: step:9400/10000 val_loss:3.4789 svd_entropy: attn_qk:H=0.7532,top10E=0.29,eRank=168.8,q75/q25=57.93 attn_vo:H=0.8202,top10E=0.12,eRank=312.6,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=604.1,q75/q25=3.11 vo_prod:H=0.6816,top10E=0.23,eRank=135.4,q75/q25=inf train_time:905435ms step_avg:96.32ms +[2025-08-22 19:03:10] [Rank 0] PRINT: step:9400/10000 val_loss:3.4789 svd_entropy: attn_qk:H=0.7532,top10E=0.29,eRank=168.8,q75/q25=57.93 attn_vo:H=0.8202,top10E=0.12,eRank=312.6,q75/q25=inf mlp_w1:H=0.9689,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=604.1,q75/q25=3.11 vo_prod:H=0.6816,top10E=0.23,eRank=135.4,q75/q25=inf train_time:905435ms step_avg:96.32ms +[2025-08-22 19:03:10] [Rank 0] step:9401/10000 train_time:905455ms step_avg:96.31ms +[2025-08-22 19:03:10] [Rank 0] step:9401/10000 train_time:905455ms step_avg:96.31ms +[2025-08-22 19:03:12] [Rank 0] step:9421/10000 train_time:907470ms step_avg:96.32ms +[2025-08-22 19:03:12] [Rank 0] step:9421/10000 train_time:907470ms step_avg:96.32ms +[2025-08-22 19:03:14] [Rank 0] step:9441/10000 train_time:909490ms step_avg:96.33ms +[2025-08-22 19:03:14] [Rank 0] step:9441/10000 train_time:909490ms step_avg:96.33ms +[2025-08-22 19:03:16] [Rank 0] step:9461/10000 train_time:911517ms step_avg:96.34ms +[2025-08-22 19:03:16] [Rank 0] step:9461/10000 train_time:911517ms step_avg:96.34ms +[2025-08-22 19:03:18] [Rank 0] step:9481/10000 train_time:913548ms step_avg:96.36ms +[2025-08-22 19:03:18] [Rank 0] step:9481/10000 train_time:913548ms step_avg:96.36ms +[2025-08-22 19:03:20] [Rank 0] step:9501/10000 train_time:915583ms step_avg:96.37ms +[2025-08-22 19:03:20] [Rank 0] step:9501/10000 train_time:915583ms step_avg:96.37ms +[2025-08-22 19:03:22] [Rank 0] step:9521/10000 train_time:917600ms step_avg:96.38ms +[2025-08-22 19:03:22] [Rank 0] step:9521/10000 train_time:917600ms step_avg:96.38ms +[2025-08-22 19:03:24] [Rank 0] step:9541/10000 train_time:919619ms step_avg:96.39ms +[2025-08-22 19:03:24] [Rank 0] step:9541/10000 train_time:919619ms step_avg:96.39ms +[2025-08-22 19:03:26] [Rank 0] step:9561/10000 train_time:921636ms step_avg:96.40ms +[2025-08-22 19:03:26] [Rank 0] step:9561/10000 train_time:921636ms step_avg:96.40ms +[2025-08-22 19:03:28] [Rank 0] step:9581/10000 train_time:923661ms step_avg:96.41ms +[2025-08-22 19:03:28] [Rank 0] step:9581/10000 train_time:923661ms step_avg:96.41ms +[2025-08-22 19:03:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:03:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:03:44] [Rank 0] PRINT: step:9600/10000 val_loss:3.4703 svd_entropy: attn_qk:H=0.7532,top10E=0.29,eRank=168.9,q75/q25=57.95 attn_vo:H=0.8204,top10E=0.12,eRank=312.8,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=604.2,q75/q25=3.11 vo_prod:H=0.6819,top10E=0.23,eRank=135.8,q75/q25=inf train_time:925710ms step_avg:96.43ms +[2025-08-22 19:03:44] [Rank 0] PRINT: step:9600/10000 val_loss:3.4703 svd_entropy: attn_qk:H=0.7532,top10E=0.29,eRank=168.9,q75/q25=57.95 attn_vo:H=0.8204,top10E=0.12,eRank=312.8,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.1,q75/q25=2.99 mlp_w2:H=0.9637,top10E=0.05,eRank=604.2,q75/q25=3.11 vo_prod:H=0.6819,top10E=0.23,eRank=135.8,q75/q25=inf train_time:925710ms step_avg:96.43ms +[2025-08-22 19:03:44] [Rank 0] step:9601/10000 train_time:925730ms step_avg:96.42ms +[2025-08-22 19:03:44] [Rank 0] step:9601/10000 train_time:925730ms step_avg:96.42ms +[2025-08-22 19:03:46] [Rank 0] step:9621/10000 train_time:927759ms step_avg:96.43ms +[2025-08-22 19:03:46] [Rank 0] step:9621/10000 train_time:927759ms step_avg:96.43ms +[2025-08-22 19:03:48] [Rank 0] step:9641/10000 train_time:929787ms step_avg:96.44ms +[2025-08-22 19:03:48] [Rank 0] step:9641/10000 train_time:929787ms step_avg:96.44ms +[2025-08-22 19:03:50] [Rank 0] step:9661/10000 train_time:931844ms step_avg:96.45ms +[2025-08-22 19:03:50] [Rank 0] step:9661/10000 train_time:931844ms step_avg:96.45ms +[2025-08-22 19:03:52] [Rank 0] step:9681/10000 train_time:933892ms step_avg:96.47ms +[2025-08-22 19:03:52] [Rank 0] step:9681/10000 train_time:933892ms step_avg:96.47ms +[2025-08-22 19:03:54] [Rank 0] step:9701/10000 train_time:935954ms step_avg:96.48ms +[2025-08-22 19:03:54] [Rank 0] step:9701/10000 train_time:935954ms step_avg:96.48ms +[2025-08-22 19:03:56] [Rank 0] step:9721/10000 train_time:938004ms step_avg:96.49ms +[2025-08-22 19:03:56] [Rank 0] step:9721/10000 train_time:938004ms step_avg:96.49ms +[2025-08-22 19:03:58] [Rank 0] step:9741/10000 train_time:940072ms step_avg:96.51ms +[2025-08-22 19:03:58] [Rank 0] step:9741/10000 train_time:940072ms step_avg:96.51ms +[2025-08-22 19:04:00] [Rank 0] step:9761/10000 train_time:942131ms step_avg:96.52ms +[2025-08-22 19:04:00] [Rank 0] step:9761/10000 train_time:942131ms step_avg:96.52ms +[2025-08-22 19:04:02] [Rank 0] step:9781/10000 train_time:944193ms step_avg:96.53ms +[2025-08-22 19:04:02] [Rank 0] step:9781/10000 train_time:944193ms step_avg:96.53ms +[2025-08-22 19:04:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:04:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:04:18] [Rank 0] PRINT: step:9800/10000 val_loss:3.4625 svd_entropy: attn_qk:H=0.7533,top10E=0.29,eRank=168.9,q75/q25=57.78 attn_vo:H=0.8205,top10E=0.12,eRank=313.0,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.2,q75/q25=2.99 mlp_w2:H=0.9638,top10E=0.05,eRank=604.2,q75/q25=3.11 vo_prod:H=0.6822,top10E=0.23,eRank=136.0,q75/q25=inf train_time:946273ms step_avg:96.56ms +[2025-08-22 19:04:18] [Rank 0] PRINT: step:9800/10000 val_loss:3.4625 svd_entropy: attn_qk:H=0.7533,top10E=0.29,eRank=168.9,q75/q25=57.78 attn_vo:H=0.8205,top10E=0.12,eRank=313.0,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.2,q75/q25=2.99 mlp_w2:H=0.9638,top10E=0.05,eRank=604.2,q75/q25=3.11 vo_prod:H=0.6822,top10E=0.23,eRank=136.0,q75/q25=inf train_time:946273ms step_avg:96.56ms +[2025-08-22 19:04:18] [Rank 0] step:9801/10000 train_time:946294ms step_avg:96.55ms +[2025-08-22 19:04:18] [Rank 0] step:9801/10000 train_time:946294ms step_avg:96.55ms +[2025-08-22 19:04:21] [Rank 0] step:9821/10000 train_time:948343ms step_avg:96.56ms +[2025-08-22 19:04:21] [Rank 0] step:9821/10000 train_time:948343ms step_avg:96.56ms +[2025-08-22 19:04:23] [Rank 0] step:9841/10000 train_time:950394ms step_avg:96.57ms +[2025-08-22 19:04:23] [Rank 0] step:9841/10000 train_time:950394ms step_avg:96.57ms +[2025-08-22 19:04:25] [Rank 0] step:9861/10000 train_time:952428ms step_avg:96.59ms +[2025-08-22 19:04:25] [Rank 0] step:9861/10000 train_time:952428ms step_avg:96.59ms +[2025-08-22 19:04:27] [Rank 0] step:9881/10000 train_time:954466ms step_avg:96.60ms +[2025-08-22 19:04:27] [Rank 0] step:9881/10000 train_time:954466ms step_avg:96.60ms +[2025-08-22 19:04:29] [Rank 0] step:9901/10000 train_time:956525ms step_avg:96.61ms +[2025-08-22 19:04:29] [Rank 0] step:9901/10000 train_time:956525ms step_avg:96.61ms +[2025-08-22 19:04:31] [Rank 0] step:9921/10000 train_time:958569ms step_avg:96.62ms +[2025-08-22 19:04:31] [Rank 0] step:9921/10000 train_time:958569ms step_avg:96.62ms +[2025-08-22 19:04:33] [Rank 0] step:9941/10000 train_time:960628ms step_avg:96.63ms +[2025-08-22 19:04:33] [Rank 0] step:9941/10000 train_time:960628ms step_avg:96.63ms +[2025-08-22 19:04:35] [Rank 0] step:9961/10000 train_time:962671ms step_avg:96.64ms +[2025-08-22 19:04:35] [Rank 0] step:9961/10000 train_time:962671ms step_avg:96.64ms +[2025-08-22 19:04:37] [Rank 0] step:9981/10000 train_time:964785ms step_avg:96.66ms +[2025-08-22 19:04:37] [Rank 0] step:9981/10000 train_time:964785ms step_avg:96.66ms +[2025-08-22 19:04:39] [Rank 0] step:10000/10000 train_time:966793ms step_avg:96.68ms +[2025-08-22 19:04:39] [Rank 0] step:10000/10000 train_time:966793ms step_avg:96.68ms +[2025-08-22 19:04:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:04:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:04:53] [Rank 0] PRINT: step:10000/10000 val_loss:3.4554 svd_entropy: attn_qk:H=0.7533,top10E=0.29,eRank=169.0,q75/q25=57.72 attn_vo:H=0.8206,top10E=0.12,eRank=313.1,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.2,q75/q25=2.99 mlp_w2:H=0.9638,top10E=0.05,eRank=604.3,q75/q25=3.11 vo_prod:H=0.6824,top10E=0.23,eRank=136.2,q75/q25=inf train_time:966907ms step_avg:96.69ms +[2025-08-22 19:04:53] [Rank 0] PRINT: step:10000/10000 val_loss:3.4554 svd_entropy: attn_qk:H=0.7533,top10E=0.29,eRank=169.0,q75/q25=57.72 attn_vo:H=0.8206,top10E=0.12,eRank=313.1,q75/q25=inf mlp_w1:H=0.9690,top10E=0.04,eRank=625.2,q75/q25=2.99 mlp_w2:H=0.9638,top10E=0.05,eRank=604.3,q75/q25=3.11 vo_prod:H=0.6824,top10E=0.23,eRank=136.2,q75/q25=inf train_time:966907ms step_avg:96.69ms +[2025-08-22 19:04:53] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 19:04:53 2025 --- +[2025-08-22 19:04:53] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 19:04:53 2025 --- +[2025-08-22 19:04:53] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16996 MiB +[2025-08-22 19:04:53] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16996 MiB diff --git a/logs_svd_gated/mode_1_param_gated_seed_41/config.json b/logs_svd_gated/mode_1_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b31b03ef00ee04425f38ea60c1851693404eaaba --- /dev/null +++ b/logs_svd_gated/mode_1_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 1, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "3a3ce158-9694-4ff2-9fe7-1f4012f76183", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_1_param_gated_seed_41/training_log_3a3ce158-9694-4ff2-9fe7-1f4012f76183.txt b/logs_svd_gated/mode_1_param_gated_seed_41/training_log_3a3ce158-9694-4ff2-9fe7-1f4012f76183.txt new file mode 100644 index 0000000000000000000000000000000000000000..504a23e42f489798c3cee00a38833430388501a6 --- /dev/null +++ b/logs_svd_gated/mode_1_param_gated_seed_41/training_log_3a3ce158-9694-4ff2-9fe7-1f4012f76183.txt @@ -0,0 +1,2926 @@ +[2025-08-22 08:59:38] [Rank 0] PRINT: --- Script Start: Fri Aug 22 08:59:38 2025 --- +[2025-08-22 08:59:38] [Rank 0] PRINT: --- Script Start: Fri Aug 22 08:59:38 2025 --- +[2025-08-22 08:59:38] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=1, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 08:59:38] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=1, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 08:59:38] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 08:59:38] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 08:59:38] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 08:59:38] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 08:59:38] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_1_param_gated_seed_41 +[2025-08-22 08:59:38] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_1_param_gated_seed_41 +[2025-08-22 08:59:38] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 08:59:38] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 08:59:38] [Rank 0] PRINT: Constructing model... +[2025-08-22 08:59:38] [Rank 0] PRINT: Constructing model... +[2025-08-22 08:59:40] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 08:59:40] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 08:59:40] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 08:59:40] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 08:59:40] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 08:59:40] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 08:59:40] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-08-22 08:59:40] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-08-22 08:59:40] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.05). +[2025-08-22 08:59:40] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.05). +[2025-08-22 08:59:40] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 08:59:40] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 08:59:40] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 08:59:40] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 08:59:40] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 08:59:40] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 08:59:40] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 08:59:40] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 08:59:40] [Rank 0] PRINT: Starting warmup... +[2025-08-22 08:59:40] [Rank 0] PRINT: Starting warmup... +[2025-08-22 09:04:43] [Rank 0] PRINT: Warmup complete. +[2025-08-22 09:04:43] [Rank 0] PRINT: Warmup complete. +[2025-08-22 09:04:43] [Rank 0] PRINT: Starting training... +[2025-08-22 09:04:43] [Rank 0] PRINT: Starting training... +[2025-08-22 09:04:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:04:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:05:57] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 09:05:57] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 09:05:59] [Rank 0] step:21/10000 train_time:1651ms step_avg:78.64ms +[2025-08-22 09:05:59] [Rank 0] step:21/10000 train_time:1651ms step_avg:78.64ms +[2025-08-22 09:06:00] [Rank 0] step:41/10000 train_time:3323ms step_avg:81.05ms +[2025-08-22 09:06:00] [Rank 0] step:41/10000 train_time:3323ms step_avg:81.05ms +[2025-08-22 09:06:02] [Rank 0] step:61/10000 train_time:4996ms step_avg:81.90ms +[2025-08-22 09:06:02] [Rank 0] step:61/10000 train_time:4996ms step_avg:81.90ms +[2025-08-22 09:06:04] [Rank 0] step:81/10000 train_time:6670ms step_avg:82.35ms +[2025-08-22 09:06:04] [Rank 0] step:81/10000 train_time:6670ms step_avg:82.35ms +[2025-08-22 09:06:05] [Rank 0] step:101/10000 train_time:8345ms step_avg:82.63ms +[2025-08-22 09:06:05] [Rank 0] step:101/10000 train_time:8345ms step_avg:82.63ms +[2025-08-22 09:06:07] [Rank 0] step:121/10000 train_time:10021ms step_avg:82.82ms +[2025-08-22 09:06:07] [Rank 0] step:121/10000 train_time:10021ms step_avg:82.82ms +[2025-08-22 09:06:09] [Rank 0] step:141/10000 train_time:11699ms step_avg:82.97ms +[2025-08-22 09:06:09] [Rank 0] step:141/10000 train_time:11699ms step_avg:82.97ms +[2025-08-22 09:06:10] [Rank 0] step:161/10000 train_time:13378ms step_avg:83.09ms +[2025-08-22 09:06:10] [Rank 0] step:161/10000 train_time:13378ms step_avg:83.09ms +[2025-08-22 09:06:12] [Rank 0] step:181/10000 train_time:15058ms step_avg:83.19ms +[2025-08-22 09:06:12] [Rank 0] step:181/10000 train_time:15058ms step_avg:83.19ms +[2025-08-22 09:06:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:06:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:06:27] [Rank 0] PRINT: step:200/10000 val_loss:6.6700 svd_entropy: attn_qk:H=0.8232,top10E=0.24,eRank=252.4,q75/q25=10.74 attn_vo:H=0.1303,top10E=0.98,eRank=2.8,q75/q25=1380.86 mlp_w1:H=0.3796,top10E=0.86,eRank=12.7,q75/q25=7.54 mlp_w2:H=0.4431,top10E=0.78,eRank=19.0,q75/q25=6.04 vo_prod:H=0.0274,top10E=1.00,eRank=1.4,q75/q25=9165.58 train_time:16820ms step_avg:84.10ms +[2025-08-22 09:06:27] [Rank 0] PRINT: step:200/10000 val_loss:6.6700 svd_entropy: attn_qk:H=0.8232,top10E=0.24,eRank=252.4,q75/q25=10.74 attn_vo:H=0.1303,top10E=0.98,eRank=2.8,q75/q25=1380.86 mlp_w1:H=0.3796,top10E=0.86,eRank=12.7,q75/q25=7.54 mlp_w2:H=0.4431,top10E=0.78,eRank=19.0,q75/q25=6.04 vo_prod:H=0.0274,top10E=1.00,eRank=1.4,q75/q25=9165.58 train_time:16820ms step_avg:84.10ms +[2025-08-22 09:06:27] [Rank 0] step:201/10000 train_time:16835ms step_avg:83.76ms +[2025-08-22 09:06:27] [Rank 0] step:201/10000 train_time:16835ms step_avg:83.76ms +[2025-08-22 09:06:29] [Rank 0] step:221/10000 train_time:18445ms step_avg:83.46ms +[2025-08-22 09:06:29] [Rank 0] step:221/10000 train_time:18445ms step_avg:83.46ms +[2025-08-22 09:06:31] [Rank 0] step:241/10000 train_time:20122ms step_avg:83.50ms +[2025-08-22 09:06:31] [Rank 0] step:241/10000 train_time:20122ms step_avg:83.50ms +[2025-08-22 09:06:32] [Rank 0] step:261/10000 train_time:21800ms step_avg:83.53ms +[2025-08-22 09:06:32] [Rank 0] step:261/10000 train_time:21800ms step_avg:83.53ms +[2025-08-22 09:06:34] [Rank 0] step:281/10000 train_time:23478ms step_avg:83.55ms +[2025-08-22 09:06:34] [Rank 0] step:281/10000 train_time:23478ms step_avg:83.55ms +[2025-08-22 09:06:36] [Rank 0] step:301/10000 train_time:25154ms step_avg:83.57ms +[2025-08-22 09:06:36] [Rank 0] step:301/10000 train_time:25154ms step_avg:83.57ms +[2025-08-22 09:06:37] [Rank 0] step:321/10000 train_time:26830ms step_avg:83.58ms +[2025-08-22 09:06:37] [Rank 0] step:321/10000 train_time:26830ms step_avg:83.58ms +[2025-08-22 09:06:39] [Rank 0] step:341/10000 train_time:28506ms step_avg:83.60ms +[2025-08-22 09:06:39] [Rank 0] step:341/10000 train_time:28506ms step_avg:83.60ms +[2025-08-22 09:06:41] [Rank 0] step:361/10000 train_time:30186ms step_avg:83.62ms +[2025-08-22 09:06:41] [Rank 0] step:361/10000 train_time:30186ms step_avg:83.62ms +[2025-08-22 09:06:42] [Rank 0] step:381/10000 train_time:31864ms step_avg:83.63ms +[2025-08-22 09:06:42] [Rank 0] step:381/10000 train_time:31864ms step_avg:83.63ms +[2025-08-22 09:06:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:06:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:06:58] [Rank 0] PRINT: step:400/10000 val_loss:6.0621 svd_entropy: attn_qk:H=0.8011,top10E=0.19,eRank=224.1,q75/q25=14.09 attn_vo:H=0.2587,top10E=0.98,eRank=6.0,q75/q25=122.06 mlp_w1:H=0.5504,top10E=0.61,eRank=41.6,q75/q25=6.84 mlp_w2:H=0.6405,top10E=0.47,eRank=73.0,q75/q25=6.49 vo_prod:H=0.1405,top10E=1.00,eRank=2.7,q75/q25=1089.69 train_time:33625ms step_avg:84.06ms +[2025-08-22 09:06:58] [Rank 0] PRINT: step:400/10000 val_loss:6.0621 svd_entropy: attn_qk:H=0.8011,top10E=0.19,eRank=224.1,q75/q25=14.09 attn_vo:H=0.2587,top10E=0.98,eRank=6.0,q75/q25=122.06 mlp_w1:H=0.5504,top10E=0.61,eRank=41.6,q75/q25=6.84 mlp_w2:H=0.6405,top10E=0.47,eRank=73.0,q75/q25=6.49 vo_prod:H=0.1405,top10E=1.00,eRank=2.7,q75/q25=1089.69 train_time:33625ms step_avg:84.06ms +[2025-08-22 09:06:58] [Rank 0] step:401/10000 train_time:33640ms step_avg:83.89ms +[2025-08-22 09:06:58] [Rank 0] step:401/10000 train_time:33640ms step_avg:83.89ms +[2025-08-22 09:07:00] [Rank 0] step:421/10000 train_time:35286ms step_avg:83.81ms +[2025-08-22 09:07:00] [Rank 0] step:421/10000 train_time:35286ms step_avg:83.81ms +[2025-08-22 09:07:01] [Rank 0] step:441/10000 train_time:36961ms step_avg:83.81ms +[2025-08-22 09:07:01] [Rank 0] step:441/10000 train_time:36961ms step_avg:83.81ms +[2025-08-22 09:07:03] [Rank 0] step:461/10000 train_time:38635ms step_avg:83.81ms +[2025-08-22 09:07:03] [Rank 0] step:461/10000 train_time:38635ms step_avg:83.81ms +[2025-08-22 09:07:05] [Rank 0] step:481/10000 train_time:40313ms step_avg:83.81ms +[2025-08-22 09:07:05] [Rank 0] step:481/10000 train_time:40313ms step_avg:83.81ms +[2025-08-22 09:07:06] [Rank 0] step:501/10000 train_time:41991ms step_avg:83.81ms +[2025-08-22 09:07:06] [Rank 0] step:501/10000 train_time:41991ms step_avg:83.81ms +[2025-08-22 09:07:08] [Rank 0] step:521/10000 train_time:43670ms step_avg:83.82ms +[2025-08-22 09:07:08] [Rank 0] step:521/10000 train_time:43670ms step_avg:83.82ms +[2025-08-22 09:07:10] [Rank 0] step:541/10000 train_time:45348ms step_avg:83.82ms +[2025-08-22 09:07:10] [Rank 0] step:541/10000 train_time:45348ms step_avg:83.82ms +[2025-08-22 09:07:11] [Rank 0] step:561/10000 train_time:47027ms step_avg:83.83ms +[2025-08-22 09:07:11] [Rank 0] step:561/10000 train_time:47027ms step_avg:83.83ms +[2025-08-22 09:07:13] [Rank 0] step:581/10000 train_time:48708ms step_avg:83.83ms +[2025-08-22 09:07:13] [Rank 0] step:581/10000 train_time:48708ms step_avg:83.83ms +[2025-08-22 09:07:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:07:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:07:28] [Rank 0] PRINT: step:600/10000 val_loss:5.6945 svd_entropy: attn_qk:H=0.8061,top10E=0.16,eRank=230.4,q75/q25=28.24 attn_vo:H=0.3015,top10E=0.96,eRank=8.0,q75/q25=97.36 mlp_w1:H=0.6040,top10E=0.52,eRank=61.4,q75/q25=6.06 mlp_w2:H=0.7128,top10E=0.35,eRank=118.2,q75/q25=7.67 vo_prod:H=0.1961,top10E=1.00,eRank=3.9,q75/q25=950.27 train_time:50471ms step_avg:84.12ms +[2025-08-22 09:07:28] [Rank 0] PRINT: step:600/10000 val_loss:5.6945 svd_entropy: attn_qk:H=0.8061,top10E=0.16,eRank=230.4,q75/q25=28.24 attn_vo:H=0.3015,top10E=0.96,eRank=8.0,q75/q25=97.36 mlp_w1:H=0.6040,top10E=0.52,eRank=61.4,q75/q25=6.06 mlp_w2:H=0.7128,top10E=0.35,eRank=118.2,q75/q25=7.67 vo_prod:H=0.1961,top10E=1.00,eRank=3.9,q75/q25=950.27 train_time:50471ms step_avg:84.12ms +[2025-08-22 09:07:28] [Rank 0] step:601/10000 train_time:50487ms step_avg:84.00ms +[2025-08-22 09:07:28] [Rank 0] step:601/10000 train_time:50487ms step_avg:84.00ms +[2025-08-22 09:07:30] [Rank 0] step:621/10000 train_time:52085ms step_avg:83.87ms +[2025-08-22 09:07:30] [Rank 0] step:621/10000 train_time:52085ms step_avg:83.87ms +[2025-08-22 09:07:32] [Rank 0] step:641/10000 train_time:53759ms step_avg:83.87ms +[2025-08-22 09:07:32] [Rank 0] step:641/10000 train_time:53759ms step_avg:83.87ms +[2025-08-22 09:07:33] [Rank 0] step:661/10000 train_time:55433ms step_avg:83.86ms +[2025-08-22 09:07:33] [Rank 0] step:661/10000 train_time:55433ms step_avg:83.86ms +[2025-08-22 09:07:35] [Rank 0] step:681/10000 train_time:57108ms step_avg:83.86ms +[2025-08-22 09:07:35] [Rank 0] step:681/10000 train_time:57108ms step_avg:83.86ms +[2025-08-22 09:07:37] [Rank 0] step:701/10000 train_time:58783ms step_avg:83.86ms +[2025-08-22 09:07:37] [Rank 0] step:701/10000 train_time:58783ms step_avg:83.86ms +[2025-08-22 09:07:38] [Rank 0] step:721/10000 train_time:60458ms step_avg:83.85ms +[2025-08-22 09:07:38] [Rank 0] step:721/10000 train_time:60458ms step_avg:83.85ms +[2025-08-22 09:07:40] [Rank 0] step:741/10000 train_time:62134ms step_avg:83.85ms +[2025-08-22 09:07:40] [Rank 0] step:741/10000 train_time:62134ms step_avg:83.85ms +[2025-08-22 09:07:42] [Rank 0] step:761/10000 train_time:63822ms step_avg:83.87ms +[2025-08-22 09:07:42] [Rank 0] step:761/10000 train_time:63822ms step_avg:83.87ms +[2025-08-22 09:07:43] [Rank 0] step:781/10000 train_time:65512ms step_avg:83.88ms +[2025-08-22 09:07:43] [Rank 0] step:781/10000 train_time:65512ms step_avg:83.88ms +[2025-08-22 09:07:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:07:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:07:59] [Rank 0] PRINT: step:800/10000 val_loss:5.4442 svd_entropy: attn_qk:H=0.8141,top10E=0.15,eRank=241.5,q75/q25=43.69 attn_vo:H=0.3309,top10E=0.94,eRank=9.7,q75/q25=84.65 mlp_w1:H=0.6317,top10E=0.47,eRank=74.8,q75/q25=6.26 mlp_w2:H=0.7520,top10E=0.29,eRank=153.3,q75/q25=9.20 vo_prod:H=0.2330,top10E=0.99,eRank=5.1,q75/q25=911.56 train_time:67287ms step_avg:84.11ms +[2025-08-22 09:07:59] [Rank 0] PRINT: step:800/10000 val_loss:5.4442 svd_entropy: attn_qk:H=0.8141,top10E=0.15,eRank=241.5,q75/q25=43.69 attn_vo:H=0.3309,top10E=0.94,eRank=9.7,q75/q25=84.65 mlp_w1:H=0.6317,top10E=0.47,eRank=74.8,q75/q25=6.26 mlp_w2:H=0.7520,top10E=0.29,eRank=153.3,q75/q25=9.20 vo_prod:H=0.2330,top10E=0.99,eRank=5.1,q75/q25=911.56 train_time:67287ms step_avg:84.11ms +[2025-08-22 09:07:59] [Rank 0] step:801/10000 train_time:67302ms step_avg:84.02ms +[2025-08-22 09:07:59] [Rank 0] step:801/10000 train_time:67302ms step_avg:84.02ms +[2025-08-22 09:08:01] [Rank 0] step:821/10000 train_time:68976ms step_avg:84.01ms +[2025-08-22 09:08:01] [Rank 0] step:821/10000 train_time:68976ms step_avg:84.01ms +[2025-08-22 09:08:02] [Rank 0] step:841/10000 train_time:70712ms step_avg:84.08ms +[2025-08-22 09:08:02] [Rank 0] step:841/10000 train_time:70712ms step_avg:84.08ms +[2025-08-22 09:08:04] [Rank 0] step:861/10000 train_time:72397ms step_avg:84.09ms +[2025-08-22 09:08:04] [Rank 0] step:861/10000 train_time:72397ms step_avg:84.09ms +[2025-08-22 09:08:06] [Rank 0] step:881/10000 train_time:74083ms step_avg:84.09ms +[2025-08-22 09:08:06] [Rank 0] step:881/10000 train_time:74083ms step_avg:84.09ms +[2025-08-22 09:08:08] [Rank 0] step:901/10000 train_time:75770ms step_avg:84.10ms +[2025-08-22 09:08:08] [Rank 0] step:901/10000 train_time:75770ms step_avg:84.10ms +[2025-08-22 09:08:09] [Rank 0] step:921/10000 train_time:77458ms step_avg:84.10ms +[2025-08-22 09:08:09] [Rank 0] step:921/10000 train_time:77458ms step_avg:84.10ms +[2025-08-22 09:08:11] [Rank 0] step:941/10000 train_time:79145ms step_avg:84.11ms +[2025-08-22 09:08:11] [Rank 0] step:941/10000 train_time:79145ms step_avg:84.11ms +[2025-08-22 09:08:13] [Rank 0] step:961/10000 train_time:80834ms step_avg:84.11ms +[2025-08-22 09:08:13] [Rank 0] step:961/10000 train_time:80834ms step_avg:84.11ms +[2025-08-22 09:08:14] [Rank 0] step:981/10000 train_time:82525ms step_avg:84.12ms +[2025-08-22 09:08:14] [Rank 0] step:981/10000 train_time:82525ms step_avg:84.12ms +[2025-08-22 09:08:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:08:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:08:30] [Rank 0] PRINT: step:1000/10000 val_loss:5.2743 svd_entropy: attn_qk:H=0.8218,top10E=0.14,eRank=253.0,q75/q25=53.60 attn_vo:H=0.3550,top10E=0.91,eRank=11.5,q75/q25=77.80 mlp_w1:H=0.6502,top10E=0.44,eRank=85.4,q75/q25=6.67 mlp_w2:H=0.7783,top10E=0.25,eRank=182.7,q75/q25=10.43 vo_prod:H=0.2605,top10E=0.98,eRank=6.1,q75/q25=959.87 train_time:84300ms step_avg:84.30ms +[2025-08-22 09:08:30] [Rank 0] PRINT: step:1000/10000 val_loss:5.2743 svd_entropy: attn_qk:H=0.8218,top10E=0.14,eRank=253.0,q75/q25=53.60 attn_vo:H=0.3550,top10E=0.91,eRank=11.5,q75/q25=77.80 mlp_w1:H=0.6502,top10E=0.44,eRank=85.4,q75/q25=6.67 mlp_w2:H=0.7783,top10E=0.25,eRank=182.7,q75/q25=10.43 vo_prod:H=0.2605,top10E=0.98,eRank=6.1,q75/q25=959.87 train_time:84300ms step_avg:84.30ms +[2025-08-22 09:08:30] [Rank 0] step:1001/10000 train_time:84315ms step_avg:84.23ms +[2025-08-22 09:08:30] [Rank 0] step:1001/10000 train_time:84315ms step_avg:84.23ms +[2025-08-22 09:08:32] [Rank 0] step:1021/10000 train_time:85922ms step_avg:84.15ms +[2025-08-22 09:08:32] [Rank 0] step:1021/10000 train_time:85922ms step_avg:84.15ms +[2025-08-22 09:08:33] [Rank 0] step:1041/10000 train_time:87611ms step_avg:84.16ms +[2025-08-22 09:08:33] [Rank 0] step:1041/10000 train_time:87611ms step_avg:84.16ms +[2025-08-22 09:08:35] [Rank 0] step:1061/10000 train_time:89301ms step_avg:84.17ms +[2025-08-22 09:08:35] [Rank 0] step:1061/10000 train_time:89301ms step_avg:84.17ms +[2025-08-22 09:08:37] [Rank 0] step:1081/10000 train_time:90990ms step_avg:84.17ms +[2025-08-22 09:08:37] [Rank 0] step:1081/10000 train_time:90990ms step_avg:84.17ms +[2025-08-22 09:08:38] [Rank 0] step:1101/10000 train_time:92681ms step_avg:84.18ms +[2025-08-22 09:08:38] [Rank 0] step:1101/10000 train_time:92681ms step_avg:84.18ms +[2025-08-22 09:08:40] [Rank 0] step:1121/10000 train_time:94372ms step_avg:84.19ms +[2025-08-22 09:08:40] [Rank 0] step:1121/10000 train_time:94372ms step_avg:84.19ms +[2025-08-22 09:08:42] [Rank 0] step:1141/10000 train_time:96065ms step_avg:84.19ms +[2025-08-22 09:08:42] [Rank 0] step:1141/10000 train_time:96065ms step_avg:84.19ms +[2025-08-22 09:08:43] [Rank 0] step:1161/10000 train_time:97759ms step_avg:84.20ms +[2025-08-22 09:08:43] [Rank 0] step:1161/10000 train_time:97759ms step_avg:84.20ms +[2025-08-22 09:08:45] [Rank 0] step:1181/10000 train_time:99454ms step_avg:84.21ms +[2025-08-22 09:08:45] [Rank 0] step:1181/10000 train_time:99454ms step_avg:84.21ms +[2025-08-22 09:08:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:08:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:09:01] [Rank 0] PRINT: step:1200/10000 val_loss:5.1162 svd_entropy: attn_qk:H=0.8283,top10E=0.13,eRank=263.2,q75/q25=59.75 attn_vo:H=0.3764,top10E=0.87,eRank=13.4,q75/q25=75.73 mlp_w1:H=0.6637,top10E=0.42,eRank=94.4,q75/q25=7.20 mlp_w2:H=0.7979,top10E=0.23,eRank=208.1,q75/q25=11.07 vo_prod:H=0.2831,top10E=0.96,eRank=7.2,q75/q25=1018.86 train_time:101234ms step_avg:84.36ms +[2025-08-22 09:09:01] [Rank 0] PRINT: step:1200/10000 val_loss:5.1162 svd_entropy: attn_qk:H=0.8283,top10E=0.13,eRank=263.2,q75/q25=59.75 attn_vo:H=0.3764,top10E=0.87,eRank=13.4,q75/q25=75.73 mlp_w1:H=0.6637,top10E=0.42,eRank=94.4,q75/q25=7.20 mlp_w2:H=0.7979,top10E=0.23,eRank=208.1,q75/q25=11.07 vo_prod:H=0.2831,top10E=0.96,eRank=7.2,q75/q25=1018.86 train_time:101234ms step_avg:84.36ms +[2025-08-22 09:09:01] [Rank 0] step:1201/10000 train_time:101249ms step_avg:84.30ms +[2025-08-22 09:09:01] [Rank 0] step:1201/10000 train_time:101249ms step_avg:84.30ms +[2025-08-22 09:09:02] [Rank 0] step:1221/10000 train_time:102866ms step_avg:84.25ms +[2025-08-22 09:09:02] [Rank 0] step:1221/10000 train_time:102866ms step_avg:84.25ms +[2025-08-22 09:09:04] [Rank 0] step:1241/10000 train_time:104610ms step_avg:84.30ms +[2025-08-22 09:09:04] [Rank 0] step:1241/10000 train_time:104610ms step_avg:84.30ms +[2025-08-22 09:09:06] [Rank 0] step:1261/10000 train_time:106371ms step_avg:84.35ms +[2025-08-22 09:09:06] [Rank 0] step:1261/10000 train_time:106371ms step_avg:84.35ms +[2025-08-22 09:09:08] [Rank 0] step:1281/10000 train_time:108060ms step_avg:84.36ms +[2025-08-22 09:09:08] [Rank 0] step:1281/10000 train_time:108060ms step_avg:84.36ms +[2025-08-22 09:09:09] [Rank 0] step:1301/10000 train_time:109747ms step_avg:84.36ms +[2025-08-22 09:09:09] [Rank 0] step:1301/10000 train_time:109747ms step_avg:84.36ms +[2025-08-22 09:09:11] [Rank 0] step:1321/10000 train_time:111436ms step_avg:84.36ms +[2025-08-22 09:09:11] [Rank 0] step:1321/10000 train_time:111436ms step_avg:84.36ms +[2025-08-22 09:09:13] [Rank 0] step:1341/10000 train_time:113127ms step_avg:84.36ms +[2025-08-22 09:09:13] [Rank 0] step:1341/10000 train_time:113127ms step_avg:84.36ms +[2025-08-22 09:09:14] [Rank 0] step:1361/10000 train_time:114817ms step_avg:84.36ms +[2025-08-22 09:09:14] [Rank 0] step:1361/10000 train_time:114817ms step_avg:84.36ms +[2025-08-22 09:09:16] [Rank 0] step:1381/10000 train_time:116508ms step_avg:84.36ms +[2025-08-22 09:09:16] [Rank 0] step:1381/10000 train_time:116508ms step_avg:84.36ms +[2025-08-22 09:09:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:09:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:09:31] [Rank 0] PRINT: step:1400/10000 val_loss:4.9956 svd_entropy: attn_qk:H=0.8335,top10E=0.13,eRank=271.7,q75/q25=64.08 attn_vo:H=0.3960,top10E=0.84,eRank=15.6,q75/q25=74.79 mlp_w1:H=0.6750,top10E=0.40,eRank=102.6,q75/q25=7.73 mlp_w2:H=0.8130,top10E=0.21,eRank=230.4,q75/q25=11.36 vo_prod:H=0.3021,top10E=0.95,eRank=8.3,q75/q25=1108.91 train_time:118283ms step_avg:84.49ms +[2025-08-22 09:09:31] [Rank 0] PRINT: step:1400/10000 val_loss:4.9956 svd_entropy: attn_qk:H=0.8335,top10E=0.13,eRank=271.7,q75/q25=64.08 attn_vo:H=0.3960,top10E=0.84,eRank=15.6,q75/q25=74.79 mlp_w1:H=0.6750,top10E=0.40,eRank=102.6,q75/q25=7.73 mlp_w2:H=0.8130,top10E=0.21,eRank=230.4,q75/q25=11.36 vo_prod:H=0.3021,top10E=0.95,eRank=8.3,q75/q25=1108.91 train_time:118283ms step_avg:84.49ms +[2025-08-22 09:09:31] [Rank 0] step:1401/10000 train_time:118298ms step_avg:84.44ms +[2025-08-22 09:09:31] [Rank 0] step:1401/10000 train_time:118298ms step_avg:84.44ms +[2025-08-22 09:09:33] [Rank 0] step:1421/10000 train_time:119921ms step_avg:84.39ms +[2025-08-22 09:09:33] [Rank 0] step:1421/10000 train_time:119921ms step_avg:84.39ms +[2025-08-22 09:09:35] [Rank 0] step:1441/10000 train_time:121608ms step_avg:84.39ms +[2025-08-22 09:09:35] [Rank 0] step:1441/10000 train_time:121608ms step_avg:84.39ms +[2025-08-22 09:09:36] [Rank 0] step:1461/10000 train_time:123296ms step_avg:84.39ms +[2025-08-22 09:09:36] [Rank 0] step:1461/10000 train_time:123296ms step_avg:84.39ms +[2025-08-22 09:09:38] [Rank 0] step:1481/10000 train_time:124983ms step_avg:84.39ms +[2025-08-22 09:09:38] [Rank 0] step:1481/10000 train_time:124983ms step_avg:84.39ms +[2025-08-22 09:09:40] [Rank 0] step:1501/10000 train_time:126682ms step_avg:84.40ms +[2025-08-22 09:09:40] [Rank 0] step:1501/10000 train_time:126682ms step_avg:84.40ms +[2025-08-22 09:09:41] [Rank 0] step:1521/10000 train_time:128382ms step_avg:84.41ms +[2025-08-22 09:09:41] [Rank 0] step:1521/10000 train_time:128382ms step_avg:84.41ms +[2025-08-22 09:09:43] [Rank 0] step:1541/10000 train_time:130084ms step_avg:84.42ms +[2025-08-22 09:09:43] [Rank 0] step:1541/10000 train_time:130084ms step_avg:84.42ms +[2025-08-22 09:09:45] [Rank 0] step:1561/10000 train_time:131786ms step_avg:84.42ms +[2025-08-22 09:09:45] [Rank 0] step:1561/10000 train_time:131786ms step_avg:84.42ms +[2025-08-22 09:09:47] [Rank 0] step:1581/10000 train_time:133489ms step_avg:84.43ms +[2025-08-22 09:09:47] [Rank 0] step:1581/10000 train_time:133489ms step_avg:84.43ms +[2025-08-22 09:09:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:09:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:10:02] [Rank 0] PRINT: step:1600/10000 val_loss:4.8516 svd_entropy: attn_qk:H=0.8377,top10E=0.13,eRank=278.7,q75/q25=68.06 attn_vo:H=0.4134,top10E=0.81,eRank=17.7,q75/q25=76.07 mlp_w1:H=0.6844,top10E=0.38,eRank=110.1,q75/q25=8.24 mlp_w2:H=0.8247,top10E=0.20,eRank=249.4,q75/q25=11.38 vo_prod:H=0.3198,top10E=0.93,eRank=9.4,q75/q25=1286.15 train_time:135277ms step_avg:84.55ms +[2025-08-22 09:10:02] [Rank 0] PRINT: step:1600/10000 val_loss:4.8516 svd_entropy: attn_qk:H=0.8377,top10E=0.13,eRank=278.7,q75/q25=68.06 attn_vo:H=0.4134,top10E=0.81,eRank=17.7,q75/q25=76.07 mlp_w1:H=0.6844,top10E=0.38,eRank=110.1,q75/q25=8.24 mlp_w2:H=0.8247,top10E=0.20,eRank=249.4,q75/q25=11.38 vo_prod:H=0.3198,top10E=0.93,eRank=9.4,q75/q25=1286.15 train_time:135277ms step_avg:84.55ms +[2025-08-22 09:10:02] [Rank 0] step:1601/10000 train_time:135291ms step_avg:84.50ms +[2025-08-22 09:10:02] [Rank 0] step:1601/10000 train_time:135291ms step_avg:84.50ms +[2025-08-22 09:10:04] [Rank 0] step:1621/10000 train_time:136908ms step_avg:84.46ms +[2025-08-22 09:10:04] [Rank 0] step:1621/10000 train_time:136908ms step_avg:84.46ms +[2025-08-22 09:10:05] [Rank 0] step:1641/10000 train_time:138608ms step_avg:84.47ms +[2025-08-22 09:10:05] [Rank 0] step:1641/10000 train_time:138608ms step_avg:84.47ms +[2025-08-22 09:10:07] [Rank 0] step:1661/10000 train_time:140353ms step_avg:84.50ms +[2025-08-22 09:10:07] [Rank 0] step:1661/10000 train_time:140353ms step_avg:84.50ms +[2025-08-22 09:10:09] [Rank 0] step:1681/10000 train_time:142124ms step_avg:84.55ms +[2025-08-22 09:10:09] [Rank 0] step:1681/10000 train_time:142124ms step_avg:84.55ms +[2025-08-22 09:10:11] [Rank 0] step:1701/10000 train_time:143827ms step_avg:84.55ms +[2025-08-22 09:10:11] [Rank 0] step:1701/10000 train_time:143827ms step_avg:84.55ms +[2025-08-22 09:10:12] [Rank 0] step:1721/10000 train_time:145532ms step_avg:84.56ms +[2025-08-22 09:10:12] [Rank 0] step:1721/10000 train_time:145532ms step_avg:84.56ms +[2025-08-22 09:10:14] [Rank 0] step:1741/10000 train_time:147236ms step_avg:84.57ms +[2025-08-22 09:10:14] [Rank 0] step:1741/10000 train_time:147236ms step_avg:84.57ms +[2025-08-22 09:10:16] [Rank 0] step:1761/10000 train_time:148941ms step_avg:84.58ms +[2025-08-22 09:10:16] [Rank 0] step:1761/10000 train_time:148941ms step_avg:84.58ms +[2025-08-22 09:10:17] [Rank 0] step:1781/10000 train_time:150649ms step_avg:84.59ms +[2025-08-22 09:10:17] [Rank 0] step:1781/10000 train_time:150649ms step_avg:84.59ms +[2025-08-22 09:10:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:10:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:10:33] [Rank 0] PRINT: step:1800/10000 val_loss:4.7373 svd_entropy: attn_qk:H=0.8411,top10E=0.12,eRank=284.5,q75/q25=71.25 attn_vo:H=0.4288,top10E=0.78,eRank=19.8,q75/q25=77.65 mlp_w1:H=0.6924,top10E=0.37,eRank=117.0,q75/q25=8.69 mlp_w2:H=0.8338,top10E=0.19,eRank=265.4,q75/q25=11.13 vo_prod:H=0.3348,top10E=0.91,eRank=10.5,q75/q25=1556.90 train_time:152438ms step_avg:84.69ms +[2025-08-22 09:10:33] [Rank 0] PRINT: step:1800/10000 val_loss:4.7373 svd_entropy: attn_qk:H=0.8411,top10E=0.12,eRank=284.5,q75/q25=71.25 attn_vo:H=0.4288,top10E=0.78,eRank=19.8,q75/q25=77.65 mlp_w1:H=0.6924,top10E=0.37,eRank=117.0,q75/q25=8.69 mlp_w2:H=0.8338,top10E=0.19,eRank=265.4,q75/q25=11.13 vo_prod:H=0.3348,top10E=0.91,eRank=10.5,q75/q25=1556.90 train_time:152438ms step_avg:84.69ms +[2025-08-22 09:10:33] [Rank 0] step:1801/10000 train_time:152454ms step_avg:84.65ms +[2025-08-22 09:10:33] [Rank 0] step:1801/10000 train_time:152454ms step_avg:84.65ms +[2025-08-22 09:10:34] [Rank 0] step:1821/10000 train_time:154083ms step_avg:84.61ms +[2025-08-22 09:10:34] [Rank 0] step:1821/10000 train_time:154083ms step_avg:84.61ms +[2025-08-22 09:10:36] [Rank 0] step:1841/10000 train_time:155780ms step_avg:84.62ms +[2025-08-22 09:10:36] [Rank 0] step:1841/10000 train_time:155780ms step_avg:84.62ms +[2025-08-22 09:10:38] [Rank 0] step:1861/10000 train_time:157479ms step_avg:84.62ms +[2025-08-22 09:10:38] [Rank 0] step:1861/10000 train_time:157479ms step_avg:84.62ms +[2025-08-22 09:10:40] [Rank 0] step:1881/10000 train_time:159178ms step_avg:84.62ms +[2025-08-22 09:10:40] [Rank 0] step:1881/10000 train_time:159178ms step_avg:84.62ms +[2025-08-22 09:10:41] [Rank 0] step:1901/10000 train_time:160878ms step_avg:84.63ms +[2025-08-22 09:10:41] [Rank 0] step:1901/10000 train_time:160878ms step_avg:84.63ms +[2025-08-22 09:10:43] [Rank 0] step:1921/10000 train_time:162580ms step_avg:84.63ms +[2025-08-22 09:10:43] [Rank 0] step:1921/10000 train_time:162580ms step_avg:84.63ms +[2025-08-22 09:10:45] [Rank 0] step:1941/10000 train_time:164279ms step_avg:84.64ms +[2025-08-22 09:10:45] [Rank 0] step:1941/10000 train_time:164279ms step_avg:84.64ms +[2025-08-22 09:10:46] [Rank 0] step:1961/10000 train_time:165981ms step_avg:84.64ms +[2025-08-22 09:10:46] [Rank 0] step:1961/10000 train_time:165981ms step_avg:84.64ms +[2025-08-22 09:10:48] [Rank 0] step:1981/10000 train_time:167684ms step_avg:84.65ms +[2025-08-22 09:10:48] [Rank 0] step:1981/10000 train_time:167684ms step_avg:84.65ms +[2025-08-22 09:10:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:10:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:11:03] [Rank 0] PRINT: step:2000/10000 val_loss:4.6833 svd_entropy: attn_qk:H=0.8441,top10E=0.12,eRank=289.8,q75/q25=73.49 attn_vo:H=0.4421,top10E=0.76,eRank=21.9,q75/q25=80.45 mlp_w1:H=0.6996,top10E=0.36,eRank=123.3,q75/q25=9.07 mlp_w2:H=0.8411,top10E=0.18,eRank=279.1,q75/q25=10.90 vo_prod:H=0.3484,top10E=0.90,eRank=11.6,q75/q25=1895.98 train_time:169472ms step_avg:84.74ms +[2025-08-22 09:11:03] [Rank 0] PRINT: step:2000/10000 val_loss:4.6833 svd_entropy: attn_qk:H=0.8441,top10E=0.12,eRank=289.8,q75/q25=73.49 attn_vo:H=0.4421,top10E=0.76,eRank=21.9,q75/q25=80.45 mlp_w1:H=0.6996,top10E=0.36,eRank=123.3,q75/q25=9.07 mlp_w2:H=0.8411,top10E=0.18,eRank=279.1,q75/q25=10.90 vo_prod:H=0.3484,top10E=0.90,eRank=11.6,q75/q25=1895.98 train_time:169472ms step_avg:84.74ms +[2025-08-22 09:11:03] [Rank 0] step:2001/10000 train_time:169487ms step_avg:84.70ms +[2025-08-22 09:11:03] [Rank 0] step:2001/10000 train_time:169487ms step_avg:84.70ms +[2025-08-22 09:11:05] [Rank 0] step:2021/10000 train_time:171120ms step_avg:84.67ms +[2025-08-22 09:11:05] [Rank 0] step:2021/10000 train_time:171120ms step_avg:84.67ms +[2025-08-22 09:11:07] [Rank 0] step:2041/10000 train_time:173401ms step_avg:84.96ms +[2025-08-22 09:11:07] [Rank 0] step:2041/10000 train_time:173401ms step_avg:84.96ms +[2025-08-22 09:11:09] [Rank 0] step:2061/10000 train_time:175242ms step_avg:85.03ms +[2025-08-22 09:11:09] [Rank 0] step:2061/10000 train_time:175242ms step_avg:85.03ms +[2025-08-22 09:11:11] [Rank 0] step:2081/10000 train_time:176814ms step_avg:84.97ms +[2025-08-22 09:11:11] [Rank 0] step:2081/10000 train_time:176814ms step_avg:84.97ms +[2025-08-22 09:11:12] [Rank 0] step:2101/10000 train_time:178580ms step_avg:85.00ms +[2025-08-22 09:11:12] [Rank 0] step:2101/10000 train_time:178580ms step_avg:85.00ms +[2025-08-22 09:11:14] [Rank 0] step:2121/10000 train_time:180281ms step_avg:85.00ms +[2025-08-22 09:11:14] [Rank 0] step:2121/10000 train_time:180281ms step_avg:85.00ms +[2025-08-22 09:11:16] [Rank 0] step:2141/10000 train_time:181981ms step_avg:85.00ms +[2025-08-22 09:11:16] [Rank 0] step:2141/10000 train_time:181981ms step_avg:85.00ms +[2025-08-22 09:11:18] [Rank 0] step:2161/10000 train_time:183682ms step_avg:85.00ms +[2025-08-22 09:11:18] [Rank 0] step:2161/10000 train_time:183682ms step_avg:85.00ms +[2025-08-22 09:11:19] [Rank 0] step:2181/10000 train_time:185384ms step_avg:85.00ms +[2025-08-22 09:11:19] [Rank 0] step:2181/10000 train_time:185384ms step_avg:85.00ms +[2025-08-22 09:11:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:11:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:11:35] [Rank 0] PRINT: step:2200/10000 val_loss:4.5995 svd_entropy: attn_qk:H=0.8470,top10E=0.12,eRank=295.6,q75/q25=76.38 attn_vo:H=0.4547,top10E=0.74,eRank=23.9,q75/q25=84.30 mlp_w1:H=0.7062,top10E=0.35,eRank=129.3,q75/q25=9.40 mlp_w2:H=0.8470,top10E=0.18,eRank=290.5,q75/q25=10.64 vo_prod:H=0.3606,top10E=0.88,eRank=12.6,q75/q25=2429.47 train_time:187170ms step_avg:85.08ms +[2025-08-22 09:11:35] [Rank 0] PRINT: step:2200/10000 val_loss:4.5995 svd_entropy: attn_qk:H=0.8470,top10E=0.12,eRank=295.6,q75/q25=76.38 attn_vo:H=0.4547,top10E=0.74,eRank=23.9,q75/q25=84.30 mlp_w1:H=0.7062,top10E=0.35,eRank=129.3,q75/q25=9.40 mlp_w2:H=0.8470,top10E=0.18,eRank=290.5,q75/q25=10.64 vo_prod:H=0.3606,top10E=0.88,eRank=12.6,q75/q25=2429.47 train_time:187170ms step_avg:85.08ms +[2025-08-22 09:11:35] [Rank 0] step:2201/10000 train_time:187184ms step_avg:85.05ms +[2025-08-22 09:11:35] [Rank 0] step:2201/10000 train_time:187184ms step_avg:85.05ms +[2025-08-22 09:11:36] [Rank 0] step:2221/10000 train_time:188812ms step_avg:85.01ms +[2025-08-22 09:11:36] [Rank 0] step:2221/10000 train_time:188812ms step_avg:85.01ms +[2025-08-22 09:11:38] [Rank 0] step:2241/10000 train_time:190546ms step_avg:85.03ms +[2025-08-22 09:11:38] [Rank 0] step:2241/10000 train_time:190546ms step_avg:85.03ms +[2025-08-22 09:11:40] [Rank 0] step:2261/10000 train_time:192289ms step_avg:85.05ms +[2025-08-22 09:11:40] [Rank 0] step:2261/10000 train_time:192289ms step_avg:85.05ms +[2025-08-22 09:11:42] [Rank 0] step:2281/10000 train_time:194032ms step_avg:85.06ms +[2025-08-22 09:11:42] [Rank 0] step:2281/10000 train_time:194032ms step_avg:85.06ms +[2025-08-22 09:11:43] [Rank 0] step:2301/10000 train_time:195776ms step_avg:85.08ms +[2025-08-22 09:11:43] [Rank 0] step:2301/10000 train_time:195776ms step_avg:85.08ms +[2025-08-22 09:11:45] [Rank 0] step:2321/10000 train_time:197521ms step_avg:85.10ms +[2025-08-22 09:11:45] [Rank 0] step:2321/10000 train_time:197521ms step_avg:85.10ms +[2025-08-22 09:11:47] [Rank 0] step:2341/10000 train_time:199268ms step_avg:85.12ms +[2025-08-22 09:11:47] [Rank 0] step:2341/10000 train_time:199268ms step_avg:85.12ms +[2025-08-22 09:11:49] [Rank 0] step:2361/10000 train_time:201014ms step_avg:85.14ms +[2025-08-22 09:11:49] [Rank 0] step:2361/10000 train_time:201014ms step_avg:85.14ms +[2025-08-22 09:11:50] [Rank 0] step:2381/10000 train_time:202761ms step_avg:85.16ms +[2025-08-22 09:11:50] [Rank 0] step:2381/10000 train_time:202761ms step_avg:85.16ms +[2025-08-22 09:11:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:11:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:12:06] [Rank 0] PRINT: step:2400/10000 val_loss:4.5261 svd_entropy: attn_qk:H=0.8492,top10E=0.11,eRank=299.4,q75/q25=77.23 attn_vo:H=0.4648,top10E=0.72,eRank=25.8,q75/q25=88.06 mlp_w1:H=0.7123,top10E=0.34,eRank=135.1,q75/q25=9.65 mlp_w2:H=0.8520,top10E=0.17,eRank=300.5,q75/q25=10.31 vo_prod:H=0.3699,top10E=0.87,eRank=13.5,q75/q25=3036.76 train_time:204595ms step_avg:85.25ms +[2025-08-22 09:12:06] [Rank 0] PRINT: step:2400/10000 val_loss:4.5261 svd_entropy: attn_qk:H=0.8492,top10E=0.11,eRank=299.4,q75/q25=77.23 attn_vo:H=0.4648,top10E=0.72,eRank=25.8,q75/q25=88.06 mlp_w1:H=0.7123,top10E=0.34,eRank=135.1,q75/q25=9.65 mlp_w2:H=0.8520,top10E=0.17,eRank=300.5,q75/q25=10.31 vo_prod:H=0.3699,top10E=0.87,eRank=13.5,q75/q25=3036.76 train_time:204595ms step_avg:85.25ms +[2025-08-22 09:12:06] [Rank 0] step:2401/10000 train_time:204611ms step_avg:85.22ms +[2025-08-22 09:12:06] [Rank 0] step:2401/10000 train_time:204611ms step_avg:85.22ms +[2025-08-22 09:12:07] [Rank 0] step:2421/10000 train_time:206279ms step_avg:85.20ms +[2025-08-22 09:12:07] [Rank 0] step:2421/10000 train_time:206279ms step_avg:85.20ms +[2025-08-22 09:12:09] [Rank 0] step:2441/10000 train_time:208022ms step_avg:85.22ms +[2025-08-22 09:12:09] [Rank 0] step:2441/10000 train_time:208022ms step_avg:85.22ms +[2025-08-22 09:12:11] [Rank 0] step:2461/10000 train_time:209769ms step_avg:85.24ms +[2025-08-22 09:12:11] [Rank 0] step:2461/10000 train_time:209769ms step_avg:85.24ms +[2025-08-22 09:12:13] [Rank 0] step:2481/10000 train_time:211571ms step_avg:85.28ms +[2025-08-22 09:12:13] [Rank 0] step:2481/10000 train_time:211571ms step_avg:85.28ms +[2025-08-22 09:12:15] [Rank 0] step:2501/10000 train_time:213379ms step_avg:85.32ms +[2025-08-22 09:12:15] [Rank 0] step:2501/10000 train_time:213379ms step_avg:85.32ms +[2025-08-22 09:12:16] [Rank 0] step:2521/10000 train_time:215129ms step_avg:85.33ms +[2025-08-22 09:12:16] [Rank 0] step:2521/10000 train_time:215129ms step_avg:85.33ms +[2025-08-22 09:12:18] [Rank 0] step:2541/10000 train_time:216878ms step_avg:85.35ms +[2025-08-22 09:12:18] [Rank 0] step:2541/10000 train_time:216878ms step_avg:85.35ms +[2025-08-22 09:12:20] [Rank 0] step:2561/10000 train_time:218629ms step_avg:85.37ms +[2025-08-22 09:12:20] [Rank 0] step:2561/10000 train_time:218629ms step_avg:85.37ms +[2025-08-22 09:12:22] [Rank 0] step:2581/10000 train_time:220382ms step_avg:85.39ms +[2025-08-22 09:12:22] [Rank 0] step:2581/10000 train_time:220382ms step_avg:85.39ms +[2025-08-22 09:12:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:12:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:12:37] [Rank 0] PRINT: step:2600/10000 val_loss:4.4577 svd_entropy: attn_qk:H=0.8511,top10E=0.11,eRank=302.8,q75/q25=78.31 attn_vo:H=0.4740,top10E=0.70,eRank=27.6,q75/q25=93.30 mlp_w1:H=0.7175,top10E=0.33,eRank=140.2,q75/q25=9.91 mlp_w2:H=0.8563,top10E=0.17,eRank=309.3,q75/q25=10.07 vo_prod:H=0.3786,top10E=0.86,eRank=14.4,q75/q25=3764.08 train_time:222222ms step_avg:85.47ms +[2025-08-22 09:12:37] [Rank 0] PRINT: step:2600/10000 val_loss:4.4577 svd_entropy: attn_qk:H=0.8511,top10E=0.11,eRank=302.8,q75/q25=78.31 attn_vo:H=0.4740,top10E=0.70,eRank=27.6,q75/q25=93.30 mlp_w1:H=0.7175,top10E=0.33,eRank=140.2,q75/q25=9.91 mlp_w2:H=0.8563,top10E=0.17,eRank=309.3,q75/q25=10.07 vo_prod:H=0.3786,top10E=0.86,eRank=14.4,q75/q25=3764.08 train_time:222222ms step_avg:85.47ms +[2025-08-22 09:12:37] [Rank 0] step:2601/10000 train_time:222236ms step_avg:85.44ms +[2025-08-22 09:12:37] [Rank 0] step:2601/10000 train_time:222236ms step_avg:85.44ms +[2025-08-22 09:12:39] [Rank 0] step:2621/10000 train_time:223913ms step_avg:85.43ms +[2025-08-22 09:12:39] [Rank 0] step:2621/10000 train_time:223913ms step_avg:85.43ms +[2025-08-22 09:12:40] [Rank 0] step:2641/10000 train_time:225657ms step_avg:85.44ms +[2025-08-22 09:12:40] [Rank 0] step:2641/10000 train_time:225657ms step_avg:85.44ms +[2025-08-22 09:12:42] [Rank 0] step:2661/10000 train_time:227402ms step_avg:85.46ms +[2025-08-22 09:12:42] [Rank 0] step:2661/10000 train_time:227402ms step_avg:85.46ms +[2025-08-22 09:12:44] [Rank 0] step:2681/10000 train_time:229149ms step_avg:85.47ms +[2025-08-22 09:12:44] [Rank 0] step:2681/10000 train_time:229149ms step_avg:85.47ms +[2025-08-22 09:12:46] [Rank 0] step:2701/10000 train_time:230897ms step_avg:85.49ms +[2025-08-22 09:12:46] [Rank 0] step:2701/10000 train_time:230897ms step_avg:85.49ms +[2025-08-22 09:12:47] [Rank 0] step:2721/10000 train_time:232642ms step_avg:85.50ms +[2025-08-22 09:12:47] [Rank 0] step:2721/10000 train_time:232642ms step_avg:85.50ms +[2025-08-22 09:12:49] [Rank 0] step:2741/10000 train_time:234390ms step_avg:85.51ms +[2025-08-22 09:12:49] [Rank 0] step:2741/10000 train_time:234390ms step_avg:85.51ms +[2025-08-22 09:12:51] [Rank 0] step:2761/10000 train_time:236137ms step_avg:85.53ms +[2025-08-22 09:12:51] [Rank 0] step:2761/10000 train_time:236137ms step_avg:85.53ms +[2025-08-22 09:12:53] [Rank 0] step:2781/10000 train_time:237885ms step_avg:85.54ms +[2025-08-22 09:12:53] [Rank 0] step:2781/10000 train_time:237885ms step_avg:85.54ms +[2025-08-22 09:12:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:12:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:13:08] [Rank 0] PRINT: step:2800/10000 val_loss:4.4269 svd_entropy: attn_qk:H=0.8531,top10E=0.11,eRank=306.3,q75/q25=78.90 attn_vo:H=0.4827,top10E=0.68,eRank=29.4,q75/q25=97.55 mlp_w1:H=0.7225,top10E=0.33,eRank=145.2,q75/q25=10.11 mlp_w2:H=0.8603,top10E=0.16,eRank=317.5,q75/q25=9.84 vo_prod:H=0.3870,top10E=0.85,eRank=15.3,q75/q25=4612.00 train_time:239720ms step_avg:85.61ms +[2025-08-22 09:13:08] [Rank 0] PRINT: step:2800/10000 val_loss:4.4269 svd_entropy: attn_qk:H=0.8531,top10E=0.11,eRank=306.3,q75/q25=78.90 attn_vo:H=0.4827,top10E=0.68,eRank=29.4,q75/q25=97.55 mlp_w1:H=0.7225,top10E=0.33,eRank=145.2,q75/q25=10.11 mlp_w2:H=0.8603,top10E=0.16,eRank=317.5,q75/q25=9.84 vo_prod:H=0.3870,top10E=0.85,eRank=15.3,q75/q25=4612.00 train_time:239720ms step_avg:85.61ms +[2025-08-22 09:13:08] [Rank 0] step:2801/10000 train_time:239735ms step_avg:85.59ms +[2025-08-22 09:13:08] [Rank 0] step:2801/10000 train_time:239735ms step_avg:85.59ms +[2025-08-22 09:13:10] [Rank 0] step:2821/10000 train_time:241401ms step_avg:85.57ms +[2025-08-22 09:13:10] [Rank 0] step:2821/10000 train_time:241401ms step_avg:85.57ms +[2025-08-22 09:13:12] [Rank 0] step:2841/10000 train_time:243143ms step_avg:85.58ms +[2025-08-22 09:13:12] [Rank 0] step:2841/10000 train_time:243143ms step_avg:85.58ms +[2025-08-22 09:13:13] [Rank 0] step:2861/10000 train_time:244888ms step_avg:85.60ms +[2025-08-22 09:13:13] [Rank 0] step:2861/10000 train_time:244888ms step_avg:85.60ms +[2025-08-22 09:13:15] [Rank 0] step:2881/10000 train_time:246633ms step_avg:85.61ms +[2025-08-22 09:13:15] [Rank 0] step:2881/10000 train_time:246633ms step_avg:85.61ms +[2025-08-22 09:13:17] [Rank 0] step:2901/10000 train_time:248422ms step_avg:85.63ms +[2025-08-22 09:13:17] [Rank 0] step:2901/10000 train_time:248422ms step_avg:85.63ms +[2025-08-22 09:13:19] [Rank 0] step:2921/10000 train_time:250220ms step_avg:85.66ms +[2025-08-22 09:13:19] [Rank 0] step:2921/10000 train_time:250220ms step_avg:85.66ms +[2025-08-22 09:13:20] [Rank 0] step:2941/10000 train_time:251966ms step_avg:85.67ms +[2025-08-22 09:13:20] [Rank 0] step:2941/10000 train_time:251966ms step_avg:85.67ms +[2025-08-22 09:13:22] [Rank 0] step:2961/10000 train_time:253713ms step_avg:85.68ms +[2025-08-22 09:13:22] [Rank 0] step:2961/10000 train_time:253713ms step_avg:85.68ms +[2025-08-22 09:13:24] [Rank 0] step:2981/10000 train_time:255467ms step_avg:85.70ms +[2025-08-22 09:13:24] [Rank 0] step:2981/10000 train_time:255467ms step_avg:85.70ms +[2025-08-22 09:13:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:13:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:13:39] [Rank 0] PRINT: step:3000/10000 val_loss:4.3816 svd_entropy: attn_qk:H=0.8549,top10E=0.11,eRank=309.4,q75/q25=79.70 attn_vo:H=0.4903,top10E=0.66,eRank=31.1,q75/q25=101.45 mlp_w1:H=0.7271,top10E=0.32,eRank=149.8,q75/q25=10.27 mlp_w2:H=0.8636,top10E=0.16,eRank=324.6,q75/q25=9.59 vo_prod:H=0.3944,top10E=0.84,eRank=16.1,q75/q25=5455.05 train_time:257310ms step_avg:85.77ms +[2025-08-22 09:13:39] [Rank 0] PRINT: step:3000/10000 val_loss:4.3816 svd_entropy: attn_qk:H=0.8549,top10E=0.11,eRank=309.4,q75/q25=79.70 attn_vo:H=0.4903,top10E=0.66,eRank=31.1,q75/q25=101.45 mlp_w1:H=0.7271,top10E=0.32,eRank=149.8,q75/q25=10.27 mlp_w2:H=0.8636,top10E=0.16,eRank=324.6,q75/q25=9.59 vo_prod:H=0.3944,top10E=0.84,eRank=16.1,q75/q25=5455.05 train_time:257310ms step_avg:85.77ms +[2025-08-22 09:13:39] [Rank 0] step:3001/10000 train_time:257325ms step_avg:85.75ms +[2025-08-22 09:13:39] [Rank 0] step:3001/10000 train_time:257325ms step_avg:85.75ms +[2025-08-22 09:13:41] [Rank 0] step:3021/10000 train_time:259015ms step_avg:85.74ms +[2025-08-22 09:13:41] [Rank 0] step:3021/10000 train_time:259015ms step_avg:85.74ms +[2025-08-22 09:13:43] [Rank 0] step:3041/10000 train_time:260768ms step_avg:85.75ms +[2025-08-22 09:13:43] [Rank 0] step:3041/10000 train_time:260768ms step_avg:85.75ms +[2025-08-22 09:13:45] [Rank 0] step:3061/10000 train_time:262523ms step_avg:85.76ms +[2025-08-22 09:13:45] [Rank 0] step:3061/10000 train_time:262523ms step_avg:85.76ms +[2025-08-22 09:13:46] [Rank 0] step:3081/10000 train_time:264280ms step_avg:85.78ms +[2025-08-22 09:13:46] [Rank 0] step:3081/10000 train_time:264280ms step_avg:85.78ms +[2025-08-22 09:13:48] [Rank 0] step:3101/10000 train_time:266037ms step_avg:85.79ms +[2025-08-22 09:13:48] [Rank 0] step:3101/10000 train_time:266037ms step_avg:85.79ms +[2025-08-22 09:13:50] [Rank 0] step:3121/10000 train_time:267794ms step_avg:85.80ms +[2025-08-22 09:13:50] [Rank 0] step:3121/10000 train_time:267794ms step_avg:85.80ms +[2025-08-22 09:13:52] [Rank 0] step:3141/10000 train_time:269553ms step_avg:85.82ms +[2025-08-22 09:13:52] [Rank 0] step:3141/10000 train_time:269553ms step_avg:85.82ms +[2025-08-22 09:13:53] [Rank 0] step:3161/10000 train_time:271313ms step_avg:85.83ms +[2025-08-22 09:13:53] [Rank 0] step:3161/10000 train_time:271313ms step_avg:85.83ms +[2025-08-22 09:13:55] [Rank 0] step:3181/10000 train_time:273074ms step_avg:85.85ms +[2025-08-22 09:13:55] [Rank 0] step:3181/10000 train_time:273074ms step_avg:85.85ms +[2025-08-22 09:13:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:13:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:14:10] [Rank 0] PRINT: step:3200/10000 val_loss:4.3523 svd_entropy: attn_qk:H=0.8564,top10E=0.11,eRank=312.1,q75/q25=80.71 attn_vo:H=0.4974,top10E=0.65,eRank=32.8,q75/q25=105.76 mlp_w1:H=0.7310,top10E=0.31,eRank=154.0,q75/q25=10.41 mlp_w2:H=0.8667,top10E=0.16,eRank=331.2,q75/q25=9.40 vo_prod:H=0.4011,top10E=0.82,eRank=17.0,q75/q25=6336.16 train_time:274921ms step_avg:85.91ms +[2025-08-22 09:14:10] [Rank 0] PRINT: step:3200/10000 val_loss:4.3523 svd_entropy: attn_qk:H=0.8564,top10E=0.11,eRank=312.1,q75/q25=80.71 attn_vo:H=0.4974,top10E=0.65,eRank=32.8,q75/q25=105.76 mlp_w1:H=0.7310,top10E=0.31,eRank=154.0,q75/q25=10.41 mlp_w2:H=0.8667,top10E=0.16,eRank=331.2,q75/q25=9.40 vo_prod:H=0.4011,top10E=0.82,eRank=17.0,q75/q25=6336.16 train_time:274921ms step_avg:85.91ms +[2025-08-22 09:14:11] [Rank 0] step:3201/10000 train_time:274936ms step_avg:85.89ms +[2025-08-22 09:14:11] [Rank 0] step:3201/10000 train_time:274936ms step_avg:85.89ms +[2025-08-22 09:14:12] [Rank 0] step:3221/10000 train_time:276601ms step_avg:85.87ms +[2025-08-22 09:14:12] [Rank 0] step:3221/10000 train_time:276601ms step_avg:85.87ms +[2025-08-22 09:14:14] [Rank 0] step:3241/10000 train_time:278351ms step_avg:85.88ms +[2025-08-22 09:14:14] [Rank 0] step:3241/10000 train_time:278351ms step_avg:85.88ms +[2025-08-22 09:14:16] [Rank 0] step:3261/10000 train_time:280102ms step_avg:85.89ms +[2025-08-22 09:14:16] [Rank 0] step:3261/10000 train_time:280102ms step_avg:85.89ms +[2025-08-22 09:14:18] [Rank 0] step:3281/10000 train_time:281856ms step_avg:85.91ms +[2025-08-22 09:14:18] [Rank 0] step:3281/10000 train_time:281856ms step_avg:85.91ms +[2025-08-22 09:14:19] [Rank 0] step:3301/10000 train_time:283609ms step_avg:85.92ms +[2025-08-22 09:14:19] [Rank 0] step:3301/10000 train_time:283609ms step_avg:85.92ms +[2025-08-22 09:14:21] [Rank 0] step:3321/10000 train_time:285414ms step_avg:85.94ms +[2025-08-22 09:14:21] [Rank 0] step:3321/10000 train_time:285414ms step_avg:85.94ms +[2025-08-22 09:14:23] [Rank 0] step:3341/10000 train_time:287168ms step_avg:85.95ms +[2025-08-22 09:14:23] [Rank 0] step:3341/10000 train_time:287168ms step_avg:85.95ms +[2025-08-22 09:14:25] [Rank 0] step:3361/10000 train_time:288922ms step_avg:85.96ms +[2025-08-22 09:14:25] [Rank 0] step:3361/10000 train_time:288922ms step_avg:85.96ms +[2025-08-22 09:14:26] [Rank 0] step:3381/10000 train_time:290677ms step_avg:85.97ms +[2025-08-22 09:14:26] [Rank 0] step:3381/10000 train_time:290677ms step_avg:85.97ms +[2025-08-22 09:14:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:14:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:14:42] [Rank 0] PRINT: step:3400/10000 val_loss:4.3083 svd_entropy: attn_qk:H=0.8579,top10E=0.11,eRank=314.9,q75/q25=80.83 attn_vo:H=0.5043,top10E=0.63,eRank=34.5,q75/q25=109.76 mlp_w1:H=0.7347,top10E=0.31,eRank=158.1,q75/q25=10.53 mlp_w2:H=0.8695,top10E=0.16,eRank=337.3,q75/q25=9.18 vo_prod:H=0.4079,top10E=0.81,eRank=17.8,q75/q25=7347.26 train_time:292520ms step_avg:86.04ms +[2025-08-22 09:14:42] [Rank 0] PRINT: step:3400/10000 val_loss:4.3083 svd_entropy: attn_qk:H=0.8579,top10E=0.11,eRank=314.9,q75/q25=80.83 attn_vo:H=0.5043,top10E=0.63,eRank=34.5,q75/q25=109.76 mlp_w1:H=0.7347,top10E=0.31,eRank=158.1,q75/q25=10.53 mlp_w2:H=0.8695,top10E=0.16,eRank=337.3,q75/q25=9.18 vo_prod:H=0.4079,top10E=0.81,eRank=17.8,q75/q25=7347.26 train_time:292520ms step_avg:86.04ms +[2025-08-22 09:14:42] [Rank 0] step:3401/10000 train_time:292535ms step_avg:86.01ms +[2025-08-22 09:14:42] [Rank 0] step:3401/10000 train_time:292535ms step_avg:86.01ms +[2025-08-22 09:14:44] [Rank 0] step:3421/10000 train_time:294206ms step_avg:86.00ms +[2025-08-22 09:14:44] [Rank 0] step:3421/10000 train_time:294206ms step_avg:86.00ms +[2025-08-22 09:14:45] [Rank 0] step:3441/10000 train_time:295955ms step_avg:86.01ms +[2025-08-22 09:14:45] [Rank 0] step:3441/10000 train_time:295955ms step_avg:86.01ms +[2025-08-22 09:14:47] [Rank 0] step:3461/10000 train_time:297705ms step_avg:86.02ms +[2025-08-22 09:14:47] [Rank 0] step:3461/10000 train_time:297705ms step_avg:86.02ms +[2025-08-22 09:14:49] [Rank 0] step:3481/10000 train_time:299454ms step_avg:86.03ms +[2025-08-22 09:14:49] [Rank 0] step:3481/10000 train_time:299454ms step_avg:86.03ms +[2025-08-22 09:14:51] [Rank 0] step:3501/10000 train_time:301207ms step_avg:86.03ms +[2025-08-22 09:14:51] [Rank 0] step:3501/10000 train_time:301207ms step_avg:86.03ms +[2025-08-22 09:14:52] [Rank 0] step:3521/10000 train_time:302959ms step_avg:86.04ms +[2025-08-22 09:14:52] [Rank 0] step:3521/10000 train_time:302959ms step_avg:86.04ms +[2025-08-22 09:14:54] [Rank 0] step:3541/10000 train_time:304710ms step_avg:86.05ms +[2025-08-22 09:14:54] [Rank 0] step:3541/10000 train_time:304710ms step_avg:86.05ms +[2025-08-22 09:14:56] [Rank 0] step:3561/10000 train_time:306461ms step_avg:86.06ms +[2025-08-22 09:14:56] [Rank 0] step:3561/10000 train_time:306461ms step_avg:86.06ms +[2025-08-22 09:14:58] [Rank 0] step:3581/10000 train_time:308215ms step_avg:86.07ms +[2025-08-22 09:14:58] [Rank 0] step:3581/10000 train_time:308215ms step_avg:86.07ms +[2025-08-22 09:14:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:14:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:15:13] [Rank 0] PRINT: step:3600/10000 val_loss:4.3153 svd_entropy: attn_qk:H=0.8593,top10E=0.11,eRank=317.6,q75/q25=81.98 attn_vo:H=0.5104,top10E=0.62,eRank=36.1,q75/q25=112.65 mlp_w1:H=0.7379,top10E=0.30,eRank=161.8,q75/q25=10.62 mlp_w2:H=0.8719,top10E=0.15,eRank=342.8,q75/q25=9.01 vo_prod:H=0.4138,top10E=0.80,eRank=18.6,q75/q25=8212.54 train_time:310056ms step_avg:86.13ms +[2025-08-22 09:15:13] [Rank 0] PRINT: step:3600/10000 val_loss:4.3153 svd_entropy: attn_qk:H=0.8593,top10E=0.11,eRank=317.6,q75/q25=81.98 attn_vo:H=0.5104,top10E=0.62,eRank=36.1,q75/q25=112.65 mlp_w1:H=0.7379,top10E=0.30,eRank=161.8,q75/q25=10.62 mlp_w2:H=0.8719,top10E=0.15,eRank=342.8,q75/q25=9.01 vo_prod:H=0.4138,top10E=0.80,eRank=18.6,q75/q25=8212.54 train_time:310056ms step_avg:86.13ms +[2025-08-22 09:15:13] [Rank 0] step:3601/10000 train_time:310071ms step_avg:86.11ms +[2025-08-22 09:15:13] [Rank 0] step:3601/10000 train_time:310071ms step_avg:86.11ms +[2025-08-22 09:15:15] [Rank 0] step:3621/10000 train_time:311754ms step_avg:86.10ms +[2025-08-22 09:15:15] [Rank 0] step:3621/10000 train_time:311754ms step_avg:86.10ms +[2025-08-22 09:15:16] [Rank 0] step:3641/10000 train_time:313502ms step_avg:86.10ms +[2025-08-22 09:15:16] [Rank 0] step:3641/10000 train_time:313502ms step_avg:86.10ms +[2025-08-22 09:15:18] [Rank 0] step:3661/10000 train_time:315251ms step_avg:86.11ms +[2025-08-22 09:15:18] [Rank 0] step:3661/10000 train_time:315251ms step_avg:86.11ms +[2025-08-22 09:15:20] [Rank 0] step:3681/10000 train_time:317002ms step_avg:86.12ms +[2025-08-22 09:15:20] [Rank 0] step:3681/10000 train_time:317002ms step_avg:86.12ms +[2025-08-22 09:15:22] [Rank 0] step:3701/10000 train_time:318752ms step_avg:86.13ms +[2025-08-22 09:15:22] [Rank 0] step:3701/10000 train_time:318752ms step_avg:86.13ms +[2025-08-22 09:15:24] [Rank 0] step:3721/10000 train_time:320635ms step_avg:86.17ms +[2025-08-22 09:15:24] [Rank 0] step:3721/10000 train_time:320635ms step_avg:86.17ms +[2025-08-22 09:15:25] [Rank 0] step:3741/10000 train_time:322423ms step_avg:86.19ms +[2025-08-22 09:15:25] [Rank 0] step:3741/10000 train_time:322423ms step_avg:86.19ms +[2025-08-22 09:15:27] [Rank 0] step:3761/10000 train_time:324214ms step_avg:86.20ms +[2025-08-22 09:15:27] [Rank 0] step:3761/10000 train_time:324214ms step_avg:86.20ms +[2025-08-22 09:15:29] [Rank 0] step:3781/10000 train_time:326006ms step_avg:86.22ms +[2025-08-22 09:15:29] [Rank 0] step:3781/10000 train_time:326006ms step_avg:86.22ms +[2025-08-22 09:15:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:15:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:15:44] [Rank 0] PRINT: step:3800/10000 val_loss:4.2424 svd_entropy: attn_qk:H=0.8606,top10E=0.10,eRank=319.9,q75/q25=81.99 attn_vo:H=0.5161,top10E=0.61,eRank=37.7,q75/q25=116.78 mlp_w1:H=0.7411,top10E=0.30,eRank=165.5,q75/q25=10.67 mlp_w2:H=0.8743,top10E=0.15,eRank=347.9,q75/q25=8.85 vo_prod:H=0.4194,top10E=0.79,eRank=19.4,q75/q25=9174.29 train_time:327887ms step_avg:86.29ms +[2025-08-22 09:15:44] [Rank 0] PRINT: step:3800/10000 val_loss:4.2424 svd_entropy: attn_qk:H=0.8606,top10E=0.10,eRank=319.9,q75/q25=81.99 attn_vo:H=0.5161,top10E=0.61,eRank=37.7,q75/q25=116.78 mlp_w1:H=0.7411,top10E=0.30,eRank=165.5,q75/q25=10.67 mlp_w2:H=0.8743,top10E=0.15,eRank=347.9,q75/q25=8.85 vo_prod:H=0.4194,top10E=0.79,eRank=19.4,q75/q25=9174.29 train_time:327887ms step_avg:86.29ms +[2025-08-22 09:15:44] [Rank 0] step:3801/10000 train_time:327903ms step_avg:86.27ms +[2025-08-22 09:15:44] [Rank 0] step:3801/10000 train_time:327903ms step_avg:86.27ms +[2025-08-22 09:15:46] [Rank 0] step:3821/10000 train_time:329623ms step_avg:86.27ms +[2025-08-22 09:15:46] [Rank 0] step:3821/10000 train_time:329623ms step_avg:86.27ms +[2025-08-22 09:15:48] [Rank 0] step:3841/10000 train_time:331416ms step_avg:86.28ms +[2025-08-22 09:15:48] [Rank 0] step:3841/10000 train_time:331416ms step_avg:86.28ms +[2025-08-22 09:15:50] [Rank 0] step:3861/10000 train_time:333205ms step_avg:86.30ms +[2025-08-22 09:15:50] [Rank 0] step:3861/10000 train_time:333205ms step_avg:86.30ms +[2025-08-22 09:15:52] [Rank 0] step:3881/10000 train_time:334994ms step_avg:86.32ms +[2025-08-22 09:15:52] [Rank 0] step:3881/10000 train_time:334994ms step_avg:86.32ms +[2025-08-22 09:15:53] [Rank 0] step:3901/10000 train_time:336784ms step_avg:86.33ms +[2025-08-22 09:15:53] [Rank 0] step:3901/10000 train_time:336784ms step_avg:86.33ms +[2025-08-22 09:15:55] [Rank 0] step:3921/10000 train_time:338576ms step_avg:86.35ms +[2025-08-22 09:15:55] [Rank 0] step:3921/10000 train_time:338576ms step_avg:86.35ms +[2025-08-22 09:15:57] [Rank 0] step:3941/10000 train_time:340370ms step_avg:86.37ms +[2025-08-22 09:15:57] [Rank 0] step:3941/10000 train_time:340370ms step_avg:86.37ms +[2025-08-22 09:15:59] [Rank 0] step:3961/10000 train_time:342162ms step_avg:86.38ms +[2025-08-22 09:15:59] [Rank 0] step:3961/10000 train_time:342162ms step_avg:86.38ms +[2025-08-22 09:16:01] [Rank 0] step:3981/10000 train_time:343956ms step_avg:86.40ms +[2025-08-22 09:16:01] [Rank 0] step:3981/10000 train_time:343956ms step_avg:86.40ms +[2025-08-22 09:16:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:16:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:16:16] [Rank 0] PRINT: step:4000/10000 val_loss:4.2138 svd_entropy: attn_qk:H=0.8619,top10E=0.10,eRank=322.3,q75/q25=81.94 attn_vo:H=0.5215,top10E=0.60,eRank=39.2,q75/q25=118.10 mlp_w1:H=0.7441,top10E=0.29,eRank=169.0,q75/q25=10.74 mlp_w2:H=0.8764,top10E=0.15,eRank=352.6,q75/q25=8.69 vo_prod:H=0.4247,top10E=0.78,eRank=20.1,q75/q25=10109.80 train_time:345840ms step_avg:86.46ms +[2025-08-22 09:16:16] [Rank 0] PRINT: step:4000/10000 val_loss:4.2138 svd_entropy: attn_qk:H=0.8619,top10E=0.10,eRank=322.3,q75/q25=81.94 attn_vo:H=0.5215,top10E=0.60,eRank=39.2,q75/q25=118.10 mlp_w1:H=0.7441,top10E=0.29,eRank=169.0,q75/q25=10.74 mlp_w2:H=0.8764,top10E=0.15,eRank=352.6,q75/q25=8.69 vo_prod:H=0.4247,top10E=0.78,eRank=20.1,q75/q25=10109.80 train_time:345840ms step_avg:86.46ms +[2025-08-22 09:16:16] [Rank 0] step:4001/10000 train_time:345854ms step_avg:86.44ms +[2025-08-22 09:16:16] [Rank 0] step:4001/10000 train_time:345854ms step_avg:86.44ms +[2025-08-22 09:16:18] [Rank 0] step:4021/10000 train_time:347552ms step_avg:86.43ms +[2025-08-22 09:16:18] [Rank 0] step:4021/10000 train_time:347552ms step_avg:86.43ms +[2025-08-22 09:16:20] [Rank 0] step:4041/10000 train_time:349339ms step_avg:86.45ms +[2025-08-22 09:16:20] [Rank 0] step:4041/10000 train_time:349339ms step_avg:86.45ms +[2025-08-22 09:16:21] [Rank 0] step:4061/10000 train_time:351129ms step_avg:86.46ms +[2025-08-22 09:16:21] [Rank 0] step:4061/10000 train_time:351129ms step_avg:86.46ms +[2025-08-22 09:16:23] [Rank 0] step:4081/10000 train_time:353085ms step_avg:86.52ms +[2025-08-22 09:16:23] [Rank 0] step:4081/10000 train_time:353085ms step_avg:86.52ms +[2025-08-22 09:16:25] [Rank 0] step:4101/10000 train_time:354946ms step_avg:86.55ms +[2025-08-22 09:16:25] [Rank 0] step:4101/10000 train_time:354946ms step_avg:86.55ms +[2025-08-22 09:16:27] [Rank 0] step:4121/10000 train_time:356758ms step_avg:86.57ms +[2025-08-22 09:16:27] [Rank 0] step:4121/10000 train_time:356758ms step_avg:86.57ms +[2025-08-22 09:16:29] [Rank 0] step:4141/10000 train_time:358555ms step_avg:86.59ms +[2025-08-22 09:16:29] [Rank 0] step:4141/10000 train_time:358555ms step_avg:86.59ms +[2025-08-22 09:16:31] [Rank 0] step:4161/10000 train_time:360345ms step_avg:86.60ms +[2025-08-22 09:16:31] [Rank 0] step:4161/10000 train_time:360345ms step_avg:86.60ms +[2025-08-22 09:16:32] [Rank 0] step:4181/10000 train_time:362137ms step_avg:86.61ms +[2025-08-22 09:16:32] [Rank 0] step:4181/10000 train_time:362137ms step_avg:86.61ms +[2025-08-22 09:16:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:16:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:16:48] [Rank 0] PRINT: step:4200/10000 val_loss:4.1987 svd_entropy: attn_qk:H=0.8632,top10E=0.10,eRank=324.7,q75/q25=82.41 attn_vo:H=0.5264,top10E=0.59,eRank=40.7,q75/q25=119.19 mlp_w1:H=0.7469,top10E=0.29,eRank=172.3,q75/q25=10.75 mlp_w2:H=0.8783,top10E=0.15,eRank=357.0,q75/q25=8.57 vo_prod:H=0.4293,top10E=0.77,eRank=20.8,q75/q25=10708.67 train_time:364017ms step_avg:86.67ms +[2025-08-22 09:16:48] [Rank 0] PRINT: step:4200/10000 val_loss:4.1987 svd_entropy: attn_qk:H=0.8632,top10E=0.10,eRank=324.7,q75/q25=82.41 attn_vo:H=0.5264,top10E=0.59,eRank=40.7,q75/q25=119.19 mlp_w1:H=0.7469,top10E=0.29,eRank=172.3,q75/q25=10.75 mlp_w2:H=0.8783,top10E=0.15,eRank=357.0,q75/q25=8.57 vo_prod:H=0.4293,top10E=0.77,eRank=20.8,q75/q25=10708.67 train_time:364017ms step_avg:86.67ms +[2025-08-22 09:16:48] [Rank 0] step:4201/10000 train_time:364032ms step_avg:86.65ms +[2025-08-22 09:16:48] [Rank 0] step:4201/10000 train_time:364032ms step_avg:86.65ms +[2025-08-22 09:16:49] [Rank 0] step:4221/10000 train_time:365736ms step_avg:86.65ms +[2025-08-22 09:16:49] [Rank 0] step:4221/10000 train_time:365736ms step_avg:86.65ms +[2025-08-22 09:16:51] [Rank 0] step:4241/10000 train_time:367524ms step_avg:86.66ms +[2025-08-22 09:16:51] [Rank 0] step:4241/10000 train_time:367524ms step_avg:86.66ms +[2025-08-22 09:16:53] [Rank 0] step:4261/10000 train_time:369312ms step_avg:86.67ms +[2025-08-22 09:16:53] [Rank 0] step:4261/10000 train_time:369312ms step_avg:86.67ms +[2025-08-22 09:16:55] [Rank 0] step:4281/10000 train_time:371101ms step_avg:86.69ms +[2025-08-22 09:16:55] [Rank 0] step:4281/10000 train_time:371101ms step_avg:86.69ms +[2025-08-22 09:16:57] [Rank 0] step:4301/10000 train_time:372888ms step_avg:86.70ms +[2025-08-22 09:16:57] [Rank 0] step:4301/10000 train_time:372888ms step_avg:86.70ms +[2025-08-22 09:16:58] [Rank 0] step:4321/10000 train_time:374679ms step_avg:86.71ms +[2025-08-22 09:16:58] [Rank 0] step:4321/10000 train_time:374679ms step_avg:86.71ms +[2025-08-22 09:17:00] [Rank 0] step:4341/10000 train_time:376467ms step_avg:86.72ms +[2025-08-22 09:17:00] [Rank 0] step:4341/10000 train_time:376467ms step_avg:86.72ms +[2025-08-22 09:17:02] [Rank 0] step:4361/10000 train_time:378256ms step_avg:86.74ms +[2025-08-22 09:17:02] [Rank 0] step:4361/10000 train_time:378256ms step_avg:86.74ms +[2025-08-22 09:17:04] [Rank 0] step:4381/10000 train_time:380047ms step_avg:86.75ms +[2025-08-22 09:17:04] [Rank 0] step:4381/10000 train_time:380047ms step_avg:86.75ms +[2025-08-22 09:17:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:17:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:17:19] [Rank 0] PRINT: step:4400/10000 val_loss:4.1934 svd_entropy: attn_qk:H=0.8645,top10E=0.10,eRank=327.0,q75/q25=82.67 attn_vo:H=0.5311,top10E=0.58,eRank=42.1,q75/q25=121.34 mlp_w1:H=0.7495,top10E=0.29,eRank=175.4,q75/q25=10.81 mlp_w2:H=0.8802,top10E=0.15,eRank=361.3,q75/q25=8.45 vo_prod:H=0.4341,top10E=0.76,eRank=21.6,q75/q25=11538.75 train_time:381938ms step_avg:86.80ms +[2025-08-22 09:17:19] [Rank 0] PRINT: step:4400/10000 val_loss:4.1934 svd_entropy: attn_qk:H=0.8645,top10E=0.10,eRank=327.0,q75/q25=82.67 attn_vo:H=0.5311,top10E=0.58,eRank=42.1,q75/q25=121.34 mlp_w1:H=0.7495,top10E=0.29,eRank=175.4,q75/q25=10.81 mlp_w2:H=0.8802,top10E=0.15,eRank=361.3,q75/q25=8.45 vo_prod:H=0.4341,top10E=0.76,eRank=21.6,q75/q25=11538.75 train_time:381938ms step_avg:86.80ms +[2025-08-22 09:17:19] [Rank 0] step:4401/10000 train_time:381953ms step_avg:86.79ms +[2025-08-22 09:17:19] [Rank 0] step:4401/10000 train_time:381953ms step_avg:86.79ms +[2025-08-22 09:17:21] [Rank 0] step:4421/10000 train_time:383671ms step_avg:86.78ms +[2025-08-22 09:17:21] [Rank 0] step:4421/10000 train_time:383671ms step_avg:86.78ms +[2025-08-22 09:17:23] [Rank 0] step:4441/10000 train_time:385459ms step_avg:86.80ms +[2025-08-22 09:17:23] [Rank 0] step:4441/10000 train_time:385459ms step_avg:86.80ms +[2025-08-22 09:17:25] [Rank 0] step:4461/10000 train_time:387256ms step_avg:86.81ms +[2025-08-22 09:17:25] [Rank 0] step:4461/10000 train_time:387256ms step_avg:86.81ms +[2025-08-22 09:17:26] [Rank 0] step:4481/10000 train_time:389055ms step_avg:86.82ms +[2025-08-22 09:17:26] [Rank 0] step:4481/10000 train_time:389055ms step_avg:86.82ms +[2025-08-22 09:17:28] [Rank 0] step:4501/10000 train_time:390853ms step_avg:86.84ms +[2025-08-22 09:17:28] [Rank 0] step:4501/10000 train_time:390853ms step_avg:86.84ms +[2025-08-22 09:17:30] [Rank 0] step:4521/10000 train_time:392722ms step_avg:86.87ms +[2025-08-22 09:17:30] [Rank 0] step:4521/10000 train_time:392722ms step_avg:86.87ms +[2025-08-22 09:17:32] [Rank 0] step:4541/10000 train_time:394574ms step_avg:86.89ms +[2025-08-22 09:17:32] [Rank 0] step:4541/10000 train_time:394574ms step_avg:86.89ms +[2025-08-22 09:17:34] [Rank 0] step:4561/10000 train_time:396377ms step_avg:86.91ms +[2025-08-22 09:17:34] [Rank 0] step:4561/10000 train_time:396377ms step_avg:86.91ms +[2025-08-22 09:17:36] [Rank 0] step:4581/10000 train_time:398180ms step_avg:86.92ms +[2025-08-22 09:17:36] [Rank 0] step:4581/10000 train_time:398180ms step_avg:86.92ms +[2025-08-22 09:17:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:17:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:17:51] [Rank 0] PRINT: step:4600/10000 val_loss:4.1423 svd_entropy: attn_qk:H=0.8657,top10E=0.10,eRank=329.4,q75/q25=82.60 attn_vo:H=0.5359,top10E=0.57,eRank=43.6,q75/q25=123.33 mlp_w1:H=0.7519,top10E=0.28,eRank=178.5,q75/q25=10.87 mlp_w2:H=0.8820,top10E=0.14,eRank=365.3,q75/q25=8.32 vo_prod:H=0.4385,top10E=0.75,eRank=22.3,q75/q25=12528.20 train_time:400073ms step_avg:86.97ms +[2025-08-22 09:17:51] [Rank 0] PRINT: step:4600/10000 val_loss:4.1423 svd_entropy: attn_qk:H=0.8657,top10E=0.10,eRank=329.4,q75/q25=82.60 attn_vo:H=0.5359,top10E=0.57,eRank=43.6,q75/q25=123.33 mlp_w1:H=0.7519,top10E=0.28,eRank=178.5,q75/q25=10.87 mlp_w2:H=0.8820,top10E=0.14,eRank=365.3,q75/q25=8.32 vo_prod:H=0.4385,top10E=0.75,eRank=22.3,q75/q25=12528.20 train_time:400073ms step_avg:86.97ms +[2025-08-22 09:17:51] [Rank 0] step:4601/10000 train_time:400087ms step_avg:86.96ms +[2025-08-22 09:17:51] [Rank 0] step:4601/10000 train_time:400087ms step_avg:86.96ms +[2025-08-22 09:17:53] [Rank 0] step:4621/10000 train_time:401794ms step_avg:86.95ms +[2025-08-22 09:17:53] [Rank 0] step:4621/10000 train_time:401794ms step_avg:86.95ms +[2025-08-22 09:17:55] [Rank 0] step:4641/10000 train_time:403590ms step_avg:86.96ms +[2025-08-22 09:17:55] [Rank 0] step:4641/10000 train_time:403590ms step_avg:86.96ms +[2025-08-22 09:17:56] [Rank 0] step:4661/10000 train_time:405386ms step_avg:86.97ms +[2025-08-22 09:17:56] [Rank 0] step:4661/10000 train_time:405386ms step_avg:86.97ms +[2025-08-22 09:17:58] [Rank 0] step:4681/10000 train_time:407182ms step_avg:86.99ms +[2025-08-22 09:17:58] [Rank 0] step:4681/10000 train_time:407182ms step_avg:86.99ms +[2025-08-22 09:18:00] [Rank 0] step:4701/10000 train_time:408978ms step_avg:87.00ms +[2025-08-22 09:18:00] [Rank 0] step:4701/10000 train_time:408978ms step_avg:87.00ms +[2025-08-22 09:18:02] [Rank 0] step:4721/10000 train_time:410773ms step_avg:87.01ms +[2025-08-22 09:18:02] [Rank 0] step:4721/10000 train_time:410773ms step_avg:87.01ms +[2025-08-22 09:18:04] [Rank 0] step:4741/10000 train_time:412569ms step_avg:87.02ms +[2025-08-22 09:18:04] [Rank 0] step:4741/10000 train_time:412569ms step_avg:87.02ms +[2025-08-22 09:18:05] [Rank 0] step:4761/10000 train_time:414368ms step_avg:87.03ms +[2025-08-22 09:18:05] [Rank 0] step:4761/10000 train_time:414368ms step_avg:87.03ms +[2025-08-22 09:18:07] [Rank 0] step:4781/10000 train_time:416165ms step_avg:87.05ms +[2025-08-22 09:18:07] [Rank 0] step:4781/10000 train_time:416165ms step_avg:87.05ms +[2025-08-22 09:18:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:18:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:18:23] [Rank 0] PRINT: step:4800/10000 val_loss:4.1322 svd_entropy: attn_qk:H=0.8668,top10E=0.10,eRank=331.4,q75/q25=82.59 attn_vo:H=0.5401,top10E=0.56,eRank=45.0,q75/q25=126.35 mlp_w1:H=0.7543,top10E=0.28,eRank=181.5,q75/q25=10.87 mlp_w2:H=0.8836,top10E=0.14,eRank=369.0,q75/q25=8.20 vo_prod:H=0.4423,top10E=0.75,eRank=23.0,q75/q25=13375.18 train_time:418054ms step_avg:87.09ms +[2025-08-22 09:18:23] [Rank 0] PRINT: step:4800/10000 val_loss:4.1322 svd_entropy: attn_qk:H=0.8668,top10E=0.10,eRank=331.4,q75/q25=82.59 attn_vo:H=0.5401,top10E=0.56,eRank=45.0,q75/q25=126.35 mlp_w1:H=0.7543,top10E=0.28,eRank=181.5,q75/q25=10.87 mlp_w2:H=0.8836,top10E=0.14,eRank=369.0,q75/q25=8.20 vo_prod:H=0.4423,top10E=0.75,eRank=23.0,q75/q25=13375.18 train_time:418054ms step_avg:87.09ms +[2025-08-22 09:18:23] [Rank 0] step:4801/10000 train_time:418069ms step_avg:87.08ms +[2025-08-22 09:18:23] [Rank 0] step:4801/10000 train_time:418069ms step_avg:87.08ms +[2025-08-22 09:18:24] [Rank 0] step:4821/10000 train_time:419786ms step_avg:87.07ms +[2025-08-22 09:18:24] [Rank 0] step:4821/10000 train_time:419786ms step_avg:87.07ms +[2025-08-22 09:18:26] [Rank 0] step:4841/10000 train_time:421580ms step_avg:87.09ms +[2025-08-22 09:18:26] [Rank 0] step:4841/10000 train_time:421580ms step_avg:87.09ms +[2025-08-22 09:18:28] [Rank 0] step:4861/10000 train_time:423376ms step_avg:87.10ms +[2025-08-22 09:18:28] [Rank 0] step:4861/10000 train_time:423376ms step_avg:87.10ms +[2025-08-22 09:18:30] [Rank 0] step:4881/10000 train_time:425168ms step_avg:87.11ms +[2025-08-22 09:18:30] [Rank 0] step:4881/10000 train_time:425168ms step_avg:87.11ms +[2025-08-22 09:18:32] [Rank 0] step:4901/10000 train_time:426963ms step_avg:87.12ms +[2025-08-22 09:18:32] [Rank 0] step:4901/10000 train_time:426963ms step_avg:87.12ms +[2025-08-22 09:18:33] [Rank 0] step:4921/10000 train_time:428809ms step_avg:87.14ms +[2025-08-22 09:18:33] [Rank 0] step:4921/10000 train_time:428809ms step_avg:87.14ms +[2025-08-22 09:18:35] [Rank 0] step:4941/10000 train_time:430607ms step_avg:87.15ms +[2025-08-22 09:18:35] [Rank 0] step:4941/10000 train_time:430607ms step_avg:87.15ms +[2025-08-22 09:18:37] [Rank 0] step:4961/10000 train_time:432401ms step_avg:87.16ms +[2025-08-22 09:18:37] [Rank 0] step:4961/10000 train_time:432401ms step_avg:87.16ms +[2025-08-22 09:18:39] [Rank 0] step:4981/10000 train_time:434199ms step_avg:87.17ms +[2025-08-22 09:18:39] [Rank 0] step:4981/10000 train_time:434199ms step_avg:87.17ms +[2025-08-22 09:18:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:18:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:18:54] [Rank 0] PRINT: step:5000/10000 val_loss:4.1101 svd_entropy: attn_qk:H=0.8678,top10E=0.10,eRank=333.3,q75/q25=83.35 attn_vo:H=0.5442,top10E=0.55,eRank=46.4,q75/q25=126.30 mlp_w1:H=0.7564,top10E=0.28,eRank=184.3,q75/q25=10.90 mlp_w2:H=0.8851,top10E=0.14,eRank=372.4,q75/q25=8.10 vo_prod:H=0.4464,top10E=0.74,eRank=23.7,q75/q25=14112.01 train_time:436087ms step_avg:87.22ms +[2025-08-22 09:18:54] [Rank 0] PRINT: step:5000/10000 val_loss:4.1101 svd_entropy: attn_qk:H=0.8678,top10E=0.10,eRank=333.3,q75/q25=83.35 attn_vo:H=0.5442,top10E=0.55,eRank=46.4,q75/q25=126.30 mlp_w1:H=0.7564,top10E=0.28,eRank=184.3,q75/q25=10.90 mlp_w2:H=0.8851,top10E=0.14,eRank=372.4,q75/q25=8.10 vo_prod:H=0.4464,top10E=0.74,eRank=23.7,q75/q25=14112.01 train_time:436087ms step_avg:87.22ms +[2025-08-22 09:18:54] [Rank 0] step:5001/10000 train_time:436102ms step_avg:87.20ms +[2025-08-22 09:18:54] [Rank 0] step:5001/10000 train_time:436102ms step_avg:87.20ms +[2025-08-22 09:18:56] [Rank 0] step:5021/10000 train_time:437814ms step_avg:87.20ms +[2025-08-22 09:18:56] [Rank 0] step:5021/10000 train_time:437814ms step_avg:87.20ms +[2025-08-22 09:18:58] [Rank 0] step:5041/10000 train_time:439611ms step_avg:87.21ms +[2025-08-22 09:18:58] [Rank 0] step:5041/10000 train_time:439611ms step_avg:87.21ms +[2025-08-22 09:19:00] [Rank 0] step:5061/10000 train_time:441403ms step_avg:87.22ms +[2025-08-22 09:19:00] [Rank 0] step:5061/10000 train_time:441403ms step_avg:87.22ms +[2025-08-22 09:19:02] [Rank 0] step:5081/10000 train_time:443197ms step_avg:87.23ms +[2025-08-22 09:19:02] [Rank 0] step:5081/10000 train_time:443197ms step_avg:87.23ms +[2025-08-22 09:19:03] [Rank 0] step:5101/10000 train_time:444993ms step_avg:87.24ms +[2025-08-22 09:19:03] [Rank 0] step:5101/10000 train_time:444993ms step_avg:87.24ms +[2025-08-22 09:19:05] [Rank 0] step:5121/10000 train_time:446788ms step_avg:87.25ms +[2025-08-22 09:19:05] [Rank 0] step:5121/10000 train_time:446788ms step_avg:87.25ms +[2025-08-22 09:19:07] [Rank 0] step:5141/10000 train_time:448589ms step_avg:87.26ms +[2025-08-22 09:19:07] [Rank 0] step:5141/10000 train_time:448589ms step_avg:87.26ms +[2025-08-22 09:19:09] [Rank 0] step:5161/10000 train_time:450386ms step_avg:87.27ms +[2025-08-22 09:19:09] [Rank 0] step:5161/10000 train_time:450386ms step_avg:87.27ms +[2025-08-22 09:19:11] [Rank 0] step:5181/10000 train_time:452186ms step_avg:87.28ms +[2025-08-22 09:19:11] [Rank 0] step:5181/10000 train_time:452186ms step_avg:87.28ms +[2025-08-22 09:19:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:19:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:19:26] [Rank 0] PRINT: step:5200/10000 val_loss:4.0904 svd_entropy: attn_qk:H=0.8687,top10E=0.10,eRank=335.1,q75/q25=83.52 attn_vo:H=0.5482,top10E=0.54,eRank=47.8,q75/q25=126.72 mlp_w1:H=0.7586,top10E=0.27,eRank=187.1,q75/q25=10.92 mlp_w2:H=0.8864,top10E=0.14,eRank=375.6,q75/q25=8.01 vo_prod:H=0.4504,top10E=0.73,eRank=24.3,q75/q25=14574.75 train_time:454099ms step_avg:87.33ms +[2025-08-22 09:19:26] [Rank 0] PRINT: step:5200/10000 val_loss:4.0904 svd_entropy: attn_qk:H=0.8687,top10E=0.10,eRank=335.1,q75/q25=83.52 attn_vo:H=0.5482,top10E=0.54,eRank=47.8,q75/q25=126.72 mlp_w1:H=0.7586,top10E=0.27,eRank=187.1,q75/q25=10.92 mlp_w2:H=0.8864,top10E=0.14,eRank=375.6,q75/q25=8.01 vo_prod:H=0.4504,top10E=0.73,eRank=24.3,q75/q25=14574.75 train_time:454099ms step_avg:87.33ms +[2025-08-22 09:19:26] [Rank 0] step:5201/10000 train_time:454114ms step_avg:87.31ms +[2025-08-22 09:19:26] [Rank 0] step:5201/10000 train_time:454114ms step_avg:87.31ms +[2025-08-22 09:19:28] [Rank 0] step:5221/10000 train_time:455860ms step_avg:87.31ms +[2025-08-22 09:19:28] [Rank 0] step:5221/10000 train_time:455860ms step_avg:87.31ms +[2025-08-22 09:19:30] [Rank 0] step:5241/10000 train_time:457685ms step_avg:87.33ms +[2025-08-22 09:19:30] [Rank 0] step:5241/10000 train_time:457685ms step_avg:87.33ms +[2025-08-22 09:19:32] [Rank 0] step:5261/10000 train_time:459515ms step_avg:87.34ms +[2025-08-22 09:19:32] [Rank 0] step:5261/10000 train_time:459515ms step_avg:87.34ms +[2025-08-22 09:19:33] [Rank 0] step:5281/10000 train_time:461342ms step_avg:87.36ms +[2025-08-22 09:19:33] [Rank 0] step:5281/10000 train_time:461342ms step_avg:87.36ms +[2025-08-22 09:19:35] [Rank 0] step:5301/10000 train_time:463181ms step_avg:87.38ms +[2025-08-22 09:19:35] [Rank 0] step:5301/10000 train_time:463181ms step_avg:87.38ms +[2025-08-22 09:19:37] [Rank 0] step:5321/10000 train_time:465013ms step_avg:87.39ms +[2025-08-22 09:19:37] [Rank 0] step:5321/10000 train_time:465013ms step_avg:87.39ms +[2025-08-22 09:19:39] [Rank 0] step:5341/10000 train_time:466846ms step_avg:87.41ms +[2025-08-22 09:19:39] [Rank 0] step:5341/10000 train_time:466846ms step_avg:87.41ms +[2025-08-22 09:19:41] [Rank 0] step:5361/10000 train_time:468681ms step_avg:87.42ms +[2025-08-22 09:19:41] [Rank 0] step:5361/10000 train_time:468681ms step_avg:87.42ms +[2025-08-22 09:19:43] [Rank 0] step:5381/10000 train_time:470515ms step_avg:87.44ms +[2025-08-22 09:19:43] [Rank 0] step:5381/10000 train_time:470515ms step_avg:87.44ms +[2025-08-22 09:19:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:19:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:19:58] [Rank 0] PRINT: step:5400/10000 val_loss:4.0709 svd_entropy: attn_qk:H=0.8696,top10E=0.10,eRank=336.7,q75/q25=83.22 attn_vo:H=0.5518,top10E=0.53,eRank=49.1,q75/q25=127.23 mlp_w1:H=0.7606,top10E=0.27,eRank=189.8,q75/q25=10.91 mlp_w2:H=0.8877,top10E=0.14,eRank=378.5,q75/q25=7.94 vo_prod:H=0.4540,top10E=0.72,eRank=25.0,q75/q25=15068.48 train_time:472439ms step_avg:87.49ms +[2025-08-22 09:19:58] [Rank 0] PRINT: step:5400/10000 val_loss:4.0709 svd_entropy: attn_qk:H=0.8696,top10E=0.10,eRank=336.7,q75/q25=83.22 attn_vo:H=0.5518,top10E=0.53,eRank=49.1,q75/q25=127.23 mlp_w1:H=0.7606,top10E=0.27,eRank=189.8,q75/q25=10.91 mlp_w2:H=0.8877,top10E=0.14,eRank=378.5,q75/q25=7.94 vo_prod:H=0.4540,top10E=0.72,eRank=25.0,q75/q25=15068.48 train_time:472439ms step_avg:87.49ms +[2025-08-22 09:19:58] [Rank 0] step:5401/10000 train_time:472454ms step_avg:87.48ms +[2025-08-22 09:19:58] [Rank 0] step:5401/10000 train_time:472454ms step_avg:87.48ms +[2025-08-22 09:20:00] [Rank 0] step:5421/10000 train_time:474200ms step_avg:87.47ms +[2025-08-22 09:20:00] [Rank 0] step:5421/10000 train_time:474200ms step_avg:87.47ms +[2025-08-22 09:20:02] [Rank 0] step:5441/10000 train_time:476024ms step_avg:87.49ms +[2025-08-22 09:20:02] [Rank 0] step:5441/10000 train_time:476024ms step_avg:87.49ms +[2025-08-22 09:20:04] [Rank 0] step:5461/10000 train_time:477856ms step_avg:87.50ms +[2025-08-22 09:20:04] [Rank 0] step:5461/10000 train_time:477856ms step_avg:87.50ms +[2025-08-22 09:20:05] [Rank 0] step:5481/10000 train_time:479684ms step_avg:87.52ms +[2025-08-22 09:20:05] [Rank 0] step:5481/10000 train_time:479684ms step_avg:87.52ms +[2025-08-22 09:20:07] [Rank 0] step:5501/10000 train_time:481518ms step_avg:87.53ms +[2025-08-22 09:20:07] [Rank 0] step:5501/10000 train_time:481518ms step_avg:87.53ms +[2025-08-22 09:20:09] [Rank 0] step:5521/10000 train_time:483351ms step_avg:87.55ms +[2025-08-22 09:20:09] [Rank 0] step:5521/10000 train_time:483351ms step_avg:87.55ms +[2025-08-22 09:20:11] [Rank 0] step:5541/10000 train_time:485180ms step_avg:87.56ms +[2025-08-22 09:20:11] [Rank 0] step:5541/10000 train_time:485180ms step_avg:87.56ms +[2025-08-22 09:20:13] [Rank 0] step:5561/10000 train_time:487011ms step_avg:87.58ms +[2025-08-22 09:20:13] [Rank 0] step:5561/10000 train_time:487011ms step_avg:87.58ms +[2025-08-22 09:20:15] [Rank 0] step:5581/10000 train_time:488842ms step_avg:87.59ms +[2025-08-22 09:20:15] [Rank 0] step:5581/10000 train_time:488842ms step_avg:87.59ms +[2025-08-22 09:20:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:20:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:20:30] [Rank 0] PRINT: step:5600/10000 val_loss:4.0602 svd_entropy: attn_qk:H=0.8704,top10E=0.10,eRank=338.3,q75/q25=83.47 attn_vo:H=0.5553,top10E=0.52,eRank=50.3,q75/q25=128.93 mlp_w1:H=0.7626,top10E=0.27,eRank=192.3,q75/q25=10.91 mlp_w2:H=0.8889,top10E=0.14,eRank=381.3,q75/q25=7.87 vo_prod:H=0.4573,top10E=0.71,eRank=25.6,q75/q25=15259.65 train_time:490769ms step_avg:87.64ms +[2025-08-22 09:20:30] [Rank 0] PRINT: step:5600/10000 val_loss:4.0602 svd_entropy: attn_qk:H=0.8704,top10E=0.10,eRank=338.3,q75/q25=83.47 attn_vo:H=0.5553,top10E=0.52,eRank=50.3,q75/q25=128.93 mlp_w1:H=0.7626,top10E=0.27,eRank=192.3,q75/q25=10.91 mlp_w2:H=0.8889,top10E=0.14,eRank=381.3,q75/q25=7.87 vo_prod:H=0.4573,top10E=0.71,eRank=25.6,q75/q25=15259.65 train_time:490769ms step_avg:87.64ms +[2025-08-22 09:20:30] [Rank 0] step:5601/10000 train_time:490784ms step_avg:87.62ms +[2025-08-22 09:20:30] [Rank 0] step:5601/10000 train_time:490784ms step_avg:87.62ms +[2025-08-22 09:20:32] [Rank 0] step:5621/10000 train_time:492533ms step_avg:87.62ms +[2025-08-22 09:20:32] [Rank 0] step:5621/10000 train_time:492533ms step_avg:87.62ms +[2025-08-22 09:20:34] [Rank 0] step:5641/10000 train_time:494360ms step_avg:87.64ms +[2025-08-22 09:20:34] [Rank 0] step:5641/10000 train_time:494360ms step_avg:87.64ms +[2025-08-22 09:20:35] [Rank 0] step:5661/10000 train_time:496186ms step_avg:87.65ms +[2025-08-22 09:20:35] [Rank 0] step:5661/10000 train_time:496186ms step_avg:87.65ms +[2025-08-22 09:20:37] [Rank 0] step:5681/10000 train_time:498016ms step_avg:87.66ms +[2025-08-22 09:20:37] [Rank 0] step:5681/10000 train_time:498016ms step_avg:87.66ms +[2025-08-22 09:20:39] [Rank 0] step:5701/10000 train_time:499844ms step_avg:87.68ms +[2025-08-22 09:20:39] [Rank 0] step:5701/10000 train_time:499844ms step_avg:87.68ms +[2025-08-22 09:20:41] [Rank 0] step:5721/10000 train_time:501674ms step_avg:87.69ms +[2025-08-22 09:20:41] [Rank 0] step:5721/10000 train_time:501674ms step_avg:87.69ms +[2025-08-22 09:20:43] [Rank 0] step:5741/10000 train_time:503503ms step_avg:87.70ms +[2025-08-22 09:20:43] [Rank 0] step:5741/10000 train_time:503503ms step_avg:87.70ms +[2025-08-22 09:20:45] [Rank 0] step:5761/10000 train_time:505335ms step_avg:87.72ms +[2025-08-22 09:20:45] [Rank 0] step:5761/10000 train_time:505335ms step_avg:87.72ms +[2025-08-22 09:20:46] [Rank 0] step:5781/10000 train_time:507165ms step_avg:87.73ms +[2025-08-22 09:20:46] [Rank 0] step:5781/10000 train_time:507165ms step_avg:87.73ms +[2025-08-22 09:20:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:20:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:21:02] [Rank 0] PRINT: step:5800/10000 val_loss:4.0489 svd_entropy: attn_qk:H=0.8713,top10E=0.10,eRank=339.9,q75/q25=83.01 attn_vo:H=0.5587,top10E=0.52,eRank=51.6,q75/q25=129.32 mlp_w1:H=0.7644,top10E=0.27,eRank=194.8,q75/q25=10.92 mlp_w2:H=0.8901,top10E=0.14,eRank=384.1,q75/q25=7.80 vo_prod:H=0.4610,top10E=0.71,eRank=26.3,q75/q25=15853.68 train_time:509084ms step_avg:87.77ms +[2025-08-22 09:21:02] [Rank 0] PRINT: step:5800/10000 val_loss:4.0489 svd_entropy: attn_qk:H=0.8713,top10E=0.10,eRank=339.9,q75/q25=83.01 attn_vo:H=0.5587,top10E=0.52,eRank=51.6,q75/q25=129.32 mlp_w1:H=0.7644,top10E=0.27,eRank=194.8,q75/q25=10.92 mlp_w2:H=0.8901,top10E=0.14,eRank=384.1,q75/q25=7.80 vo_prod:H=0.4610,top10E=0.71,eRank=26.3,q75/q25=15853.68 train_time:509084ms step_avg:87.77ms +[2025-08-22 09:21:02] [Rank 0] step:5801/10000 train_time:509098ms step_avg:87.76ms +[2025-08-22 09:21:02] [Rank 0] step:5801/10000 train_time:509098ms step_avg:87.76ms +[2025-08-22 09:21:04] [Rank 0] step:5821/10000 train_time:510843ms step_avg:87.76ms +[2025-08-22 09:21:04] [Rank 0] step:5821/10000 train_time:510843ms step_avg:87.76ms +[2025-08-22 09:21:06] [Rank 0] step:5841/10000 train_time:512666ms step_avg:87.77ms +[2025-08-22 09:21:06] [Rank 0] step:5841/10000 train_time:512666ms step_avg:87.77ms +[2025-08-22 09:21:07] [Rank 0] step:5861/10000 train_time:514496ms step_avg:87.78ms +[2025-08-22 09:21:07] [Rank 0] step:5861/10000 train_time:514496ms step_avg:87.78ms +[2025-08-22 09:21:09] [Rank 0] step:5881/10000 train_time:516326ms step_avg:87.80ms +[2025-08-22 09:21:09] [Rank 0] step:5881/10000 train_time:516326ms step_avg:87.80ms +[2025-08-22 09:21:11] [Rank 0] step:5901/10000 train_time:518152ms step_avg:87.81ms +[2025-08-22 09:21:11] [Rank 0] step:5901/10000 train_time:518152ms step_avg:87.81ms +[2025-08-22 09:21:13] [Rank 0] step:5921/10000 train_time:519978ms step_avg:87.82ms +[2025-08-22 09:21:13] [Rank 0] step:5921/10000 train_time:519978ms step_avg:87.82ms +[2025-08-22 09:21:15] [Rank 0] step:5941/10000 train_time:521812ms step_avg:87.83ms +[2025-08-22 09:21:15] [Rank 0] step:5941/10000 train_time:521812ms step_avg:87.83ms +[2025-08-22 09:21:17] [Rank 0] step:5961/10000 train_time:523647ms step_avg:87.85ms +[2025-08-22 09:21:17] [Rank 0] step:5961/10000 train_time:523647ms step_avg:87.85ms +[2025-08-22 09:21:18] [Rank 0] step:5981/10000 train_time:525478ms step_avg:87.86ms +[2025-08-22 09:21:18] [Rank 0] step:5981/10000 train_time:525478ms step_avg:87.86ms +[2025-08-22 09:21:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:21:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:21:34] [Rank 0] PRINT: step:6000/10000 val_loss:4.0243 svd_entropy: attn_qk:H=0.8720,top10E=0.09,eRank=341.3,q75/q25=82.89 attn_vo:H=0.5619,top10E=0.51,eRank=52.8,q75/q25=128.97 mlp_w1:H=0.7662,top10E=0.26,eRank=197.2,q75/q25=10.90 mlp_w2:H=0.8912,top10E=0.13,eRank=386.7,q75/q25=7.74 vo_prod:H=0.4641,top10E=0.70,eRank=26.8,q75/q25=16553.64 train_time:527397ms step_avg:87.90ms +[2025-08-22 09:21:34] [Rank 0] PRINT: step:6000/10000 val_loss:4.0243 svd_entropy: attn_qk:H=0.8720,top10E=0.09,eRank=341.3,q75/q25=82.89 attn_vo:H=0.5619,top10E=0.51,eRank=52.8,q75/q25=128.97 mlp_w1:H=0.7662,top10E=0.26,eRank=197.2,q75/q25=10.90 mlp_w2:H=0.8912,top10E=0.13,eRank=386.7,q75/q25=7.74 vo_prod:H=0.4641,top10E=0.70,eRank=26.8,q75/q25=16553.64 train_time:527397ms step_avg:87.90ms +[2025-08-22 09:21:34] [Rank 0] step:6001/10000 train_time:527413ms step_avg:87.89ms +[2025-08-22 09:21:34] [Rank 0] step:6001/10000 train_time:527413ms step_avg:87.89ms +[2025-08-22 09:21:36] [Rank 0] step:6021/10000 train_time:529163ms step_avg:87.89ms +[2025-08-22 09:21:36] [Rank 0] step:6021/10000 train_time:529163ms step_avg:87.89ms +[2025-08-22 09:21:37] [Rank 0] step:6041/10000 train_time:530996ms step_avg:87.90ms +[2025-08-22 09:21:37] [Rank 0] step:6041/10000 train_time:530996ms step_avg:87.90ms +[2025-08-22 09:21:39] [Rank 0] step:6061/10000 train_time:532837ms step_avg:87.91ms +[2025-08-22 09:21:39] [Rank 0] step:6061/10000 train_time:532837ms step_avg:87.91ms +[2025-08-22 09:21:41] [Rank 0] step:6081/10000 train_time:534728ms step_avg:87.93ms +[2025-08-22 09:21:41] [Rank 0] step:6081/10000 train_time:534728ms step_avg:87.93ms +[2025-08-22 09:21:43] [Rank 0] step:6101/10000 train_time:536584ms step_avg:87.95ms +[2025-08-22 09:21:43] [Rank 0] step:6101/10000 train_time:536584ms step_avg:87.95ms +[2025-08-22 09:21:45] [Rank 0] step:6121/10000 train_time:538464ms step_avg:87.97ms +[2025-08-22 09:21:45] [Rank 0] step:6121/10000 train_time:538464ms step_avg:87.97ms +[2025-08-22 09:21:47] [Rank 0] step:6141/10000 train_time:540312ms step_avg:87.98ms +[2025-08-22 09:21:47] [Rank 0] step:6141/10000 train_time:540312ms step_avg:87.98ms +[2025-08-22 09:21:49] [Rank 0] step:6161/10000 train_time:542151ms step_avg:88.00ms +[2025-08-22 09:21:49] [Rank 0] step:6161/10000 train_time:542151ms step_avg:88.00ms +[2025-08-22 09:21:50] [Rank 0] step:6181/10000 train_time:543987ms step_avg:88.01ms +[2025-08-22 09:21:50] [Rank 0] step:6181/10000 train_time:543987ms step_avg:88.01ms +[2025-08-22 09:21:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:21:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:22:06] [Rank 0] PRINT: step:6200/10000 val_loss:4.0094 svd_entropy: attn_qk:H=0.8727,top10E=0.09,eRank=342.6,q75/q25=82.84 attn_vo:H=0.5650,top10E=0.50,eRank=54.0,q75/q25=130.40 mlp_w1:H=0.7679,top10E=0.26,eRank=199.5,q75/q25=10.92 mlp_w2:H=0.8922,top10E=0.13,eRank=389.2,q75/q25=7.70 vo_prod:H=0.4672,top10E=0.69,eRank=27.4,q75/q25=16780.01 train_time:545918ms step_avg:88.05ms +[2025-08-22 09:22:06] [Rank 0] PRINT: step:6200/10000 val_loss:4.0094 svd_entropy: attn_qk:H=0.8727,top10E=0.09,eRank=342.6,q75/q25=82.84 attn_vo:H=0.5650,top10E=0.50,eRank=54.0,q75/q25=130.40 mlp_w1:H=0.7679,top10E=0.26,eRank=199.5,q75/q25=10.92 mlp_w2:H=0.8922,top10E=0.13,eRank=389.2,q75/q25=7.70 vo_prod:H=0.4672,top10E=0.69,eRank=27.4,q75/q25=16780.01 train_time:545918ms step_avg:88.05ms +[2025-08-22 09:22:06] [Rank 0] step:6201/10000 train_time:545933ms step_avg:88.04ms +[2025-08-22 09:22:06] [Rank 0] step:6201/10000 train_time:545933ms step_avg:88.04ms +[2025-08-22 09:22:08] [Rank 0] step:6221/10000 train_time:547677ms step_avg:88.04ms +[2025-08-22 09:22:08] [Rank 0] step:6221/10000 train_time:547677ms step_avg:88.04ms +[2025-08-22 09:22:10] [Rank 0] step:6241/10000 train_time:549506ms step_avg:88.05ms +[2025-08-22 09:22:10] [Rank 0] step:6241/10000 train_time:549506ms step_avg:88.05ms +[2025-08-22 09:22:11] [Rank 0] step:6261/10000 train_time:551341ms step_avg:88.06ms +[2025-08-22 09:22:11] [Rank 0] step:6261/10000 train_time:551341ms step_avg:88.06ms +[2025-08-22 09:22:13] [Rank 0] step:6281/10000 train_time:553179ms step_avg:88.07ms +[2025-08-22 09:22:13] [Rank 0] step:6281/10000 train_time:553179ms step_avg:88.07ms +[2025-08-22 09:22:15] [Rank 0] step:6301/10000 train_time:555014ms step_avg:88.08ms +[2025-08-22 09:22:15] [Rank 0] step:6301/10000 train_time:555014ms step_avg:88.08ms +[2025-08-22 09:22:17] [Rank 0] step:6321/10000 train_time:556849ms step_avg:88.10ms +[2025-08-22 09:22:17] [Rank 0] step:6321/10000 train_time:556849ms step_avg:88.10ms +[2025-08-22 09:22:19] [Rank 0] step:6341/10000 train_time:558686ms step_avg:88.11ms +[2025-08-22 09:22:19] [Rank 0] step:6341/10000 train_time:558686ms step_avg:88.11ms +[2025-08-22 09:22:21] [Rank 0] step:6361/10000 train_time:560528ms step_avg:88.12ms +[2025-08-22 09:22:21] [Rank 0] step:6361/10000 train_time:560528ms step_avg:88.12ms +[2025-08-22 09:22:22] [Rank 0] step:6381/10000 train_time:562367ms step_avg:88.13ms +[2025-08-22 09:22:22] [Rank 0] step:6381/10000 train_time:562367ms step_avg:88.13ms +[2025-08-22 09:22:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:22:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:22:38] [Rank 0] PRINT: step:6400/10000 val_loss:3.9886 svd_entropy: attn_qk:H=0.8733,top10E=0.09,eRank=343.8,q75/q25=83.03 attn_vo:H=0.5678,top10E=0.50,eRank=55.1,q75/q25=131.42 mlp_w1:H=0.7694,top10E=0.26,eRank=201.6,q75/q25=10.90 mlp_w2:H=0.8932,top10E=0.13,eRank=391.5,q75/q25=7.66 vo_prod:H=0.4701,top10E=0.69,eRank=28.0,q75/q25=17098.57 train_time:564292ms step_avg:88.17ms +[2025-08-22 09:22:38] [Rank 0] PRINT: step:6400/10000 val_loss:3.9886 svd_entropy: attn_qk:H=0.8733,top10E=0.09,eRank=343.8,q75/q25=83.03 attn_vo:H=0.5678,top10E=0.50,eRank=55.1,q75/q25=131.42 mlp_w1:H=0.7694,top10E=0.26,eRank=201.6,q75/q25=10.90 mlp_w2:H=0.8932,top10E=0.13,eRank=391.5,q75/q25=7.66 vo_prod:H=0.4701,top10E=0.69,eRank=28.0,q75/q25=17098.57 train_time:564292ms step_avg:88.17ms +[2025-08-22 09:22:38] [Rank 0] step:6401/10000 train_time:564307ms step_avg:88.16ms +[2025-08-22 09:22:38] [Rank 0] step:6401/10000 train_time:564307ms step_avg:88.16ms +[2025-08-22 09:22:40] [Rank 0] step:6421/10000 train_time:566062ms step_avg:88.16ms +[2025-08-22 09:22:40] [Rank 0] step:6421/10000 train_time:566062ms step_avg:88.16ms +[2025-08-22 09:22:42] [Rank 0] step:6441/10000 train_time:567895ms step_avg:88.17ms +[2025-08-22 09:22:42] [Rank 0] step:6441/10000 train_time:567895ms step_avg:88.17ms +[2025-08-22 09:22:43] [Rank 0] step:6461/10000 train_time:569732ms step_avg:88.18ms +[2025-08-22 09:22:43] [Rank 0] step:6461/10000 train_time:569732ms step_avg:88.18ms +[2025-08-22 09:22:45] [Rank 0] step:6481/10000 train_time:571623ms step_avg:88.20ms +[2025-08-22 09:22:45] [Rank 0] step:6481/10000 train_time:571623ms step_avg:88.20ms +[2025-08-22 09:22:47] [Rank 0] step:6501/10000 train_time:573454ms step_avg:88.21ms +[2025-08-22 09:22:47] [Rank 0] step:6501/10000 train_time:573454ms step_avg:88.21ms +[2025-08-22 09:22:49] [Rank 0] step:6521/10000 train_time:575285ms step_avg:88.22ms +[2025-08-22 09:22:49] [Rank 0] step:6521/10000 train_time:575285ms step_avg:88.22ms +[2025-08-22 09:22:51] [Rank 0] step:6541/10000 train_time:577121ms step_avg:88.23ms +[2025-08-22 09:22:51] [Rank 0] step:6541/10000 train_time:577121ms step_avg:88.23ms +[2025-08-22 09:22:53] [Rank 0] step:6561/10000 train_time:578963ms step_avg:88.24ms +[2025-08-22 09:22:53] [Rank 0] step:6561/10000 train_time:578963ms step_avg:88.24ms +[2025-08-22 09:22:55] [Rank 0] step:6581/10000 train_time:580796ms step_avg:88.25ms +[2025-08-22 09:22:55] [Rank 0] step:6581/10000 train_time:580796ms step_avg:88.25ms +[2025-08-22 09:22:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:22:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:23:10] [Rank 0] PRINT: step:6600/10000 val_loss:3.9791 svd_entropy: attn_qk:H=0.8738,top10E=0.09,eRank=344.9,q75/q25=82.35 attn_vo:H=0.5704,top10E=0.49,eRank=56.1,q75/q25=131.79 mlp_w1:H=0.7708,top10E=0.26,eRank=203.5,q75/q25=10.89 mlp_w2:H=0.8940,top10E=0.13,eRank=393.6,q75/q25=7.62 vo_prod:H=0.4723,top10E=0.68,eRank=28.4,q75/q25=17387.31 train_time:582726ms step_avg:88.29ms +[2025-08-22 09:23:10] [Rank 0] PRINT: step:6600/10000 val_loss:3.9791 svd_entropy: attn_qk:H=0.8738,top10E=0.09,eRank=344.9,q75/q25=82.35 attn_vo:H=0.5704,top10E=0.49,eRank=56.1,q75/q25=131.79 mlp_w1:H=0.7708,top10E=0.26,eRank=203.5,q75/q25=10.89 mlp_w2:H=0.8940,top10E=0.13,eRank=393.6,q75/q25=7.62 vo_prod:H=0.4723,top10E=0.68,eRank=28.4,q75/q25=17387.31 train_time:582726ms step_avg:88.29ms +[2025-08-22 09:23:10] [Rank 0] step:6601/10000 train_time:582740ms step_avg:88.28ms +[2025-08-22 09:23:10] [Rank 0] step:6601/10000 train_time:582740ms step_avg:88.28ms +[2025-08-22 09:23:12] [Rank 0] step:6621/10000 train_time:584501ms step_avg:88.28ms +[2025-08-22 09:23:12] [Rank 0] step:6621/10000 train_time:584501ms step_avg:88.28ms +[2025-08-22 09:23:14] [Rank 0] step:6641/10000 train_time:586344ms step_avg:88.29ms +[2025-08-22 09:23:14] [Rank 0] step:6641/10000 train_time:586344ms step_avg:88.29ms +[2025-08-22 09:23:15] [Rank 0] step:6661/10000 train_time:588181ms step_avg:88.30ms +[2025-08-22 09:23:15] [Rank 0] step:6661/10000 train_time:588181ms step_avg:88.30ms +[2025-08-22 09:23:17] [Rank 0] step:6681/10000 train_time:590035ms step_avg:88.32ms +[2025-08-22 09:23:17] [Rank 0] step:6681/10000 train_time:590035ms step_avg:88.32ms +[2025-08-22 09:23:19] [Rank 0] step:6701/10000 train_time:591909ms step_avg:88.33ms +[2025-08-22 09:23:19] [Rank 0] step:6701/10000 train_time:591909ms step_avg:88.33ms +[2025-08-22 09:23:21] [Rank 0] step:6721/10000 train_time:593779ms step_avg:88.35ms +[2025-08-22 09:23:21] [Rank 0] step:6721/10000 train_time:593779ms step_avg:88.35ms +[2025-08-22 09:23:23] [Rank 0] step:6741/10000 train_time:595649ms step_avg:88.36ms +[2025-08-22 09:23:23] [Rank 0] step:6741/10000 train_time:595649ms step_avg:88.36ms +[2025-08-22 09:23:25] [Rank 0] step:6761/10000 train_time:597515ms step_avg:88.38ms +[2025-08-22 09:23:25] [Rank 0] step:6761/10000 train_time:597515ms step_avg:88.38ms +[2025-08-22 09:23:27] [Rank 0] step:6781/10000 train_time:599381ms step_avg:88.39ms +[2025-08-22 09:23:27] [Rank 0] step:6781/10000 train_time:599381ms step_avg:88.39ms +[2025-08-22 09:23:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:23:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:23:42] [Rank 0] PRINT: step:6800/10000 val_loss:3.9585 svd_entropy: attn_qk:H=0.8743,top10E=0.09,eRank=345.8,q75/q25=81.94 attn_vo:H=0.5727,top10E=0.49,eRank=57.1,q75/q25=132.56 mlp_w1:H=0.7721,top10E=0.26,eRank=205.3,q75/q25=10.88 mlp_w2:H=0.8948,top10E=0.13,eRank=395.4,q75/q25=7.57 vo_prod:H=0.4746,top10E=0.68,eRank=28.9,q75/q25=18006.10 train_time:601348ms step_avg:88.43ms +[2025-08-22 09:23:42] [Rank 0] PRINT: step:6800/10000 val_loss:3.9585 svd_entropy: attn_qk:H=0.8743,top10E=0.09,eRank=345.8,q75/q25=81.94 attn_vo:H=0.5727,top10E=0.49,eRank=57.1,q75/q25=132.56 mlp_w1:H=0.7721,top10E=0.26,eRank=205.3,q75/q25=10.88 mlp_w2:H=0.8948,top10E=0.13,eRank=395.4,q75/q25=7.57 vo_prod:H=0.4746,top10E=0.68,eRank=28.9,q75/q25=18006.10 train_time:601348ms step_avg:88.43ms +[2025-08-22 09:23:42] [Rank 0] step:6801/10000 train_time:601364ms step_avg:88.42ms +[2025-08-22 09:23:42] [Rank 0] step:6801/10000 train_time:601364ms step_avg:88.42ms +[2025-08-22 09:23:44] [Rank 0] step:6821/10000 train_time:603130ms step_avg:88.42ms +[2025-08-22 09:23:44] [Rank 0] step:6821/10000 train_time:603130ms step_avg:88.42ms +[2025-08-22 09:23:46] [Rank 0] step:6841/10000 train_time:604989ms step_avg:88.44ms +[2025-08-22 09:23:46] [Rank 0] step:6841/10000 train_time:604989ms step_avg:88.44ms +[2025-08-22 09:23:48] [Rank 0] step:6861/10000 train_time:606852ms step_avg:88.45ms +[2025-08-22 09:23:48] [Rank 0] step:6861/10000 train_time:606852ms step_avg:88.45ms +[2025-08-22 09:23:50] [Rank 0] step:6881/10000 train_time:608714ms step_avg:88.46ms +[2025-08-22 09:23:50] [Rank 0] step:6881/10000 train_time:608714ms step_avg:88.46ms +[2025-08-22 09:23:52] [Rank 0] step:6901/10000 train_time:610576ms step_avg:88.48ms +[2025-08-22 09:23:52] [Rank 0] step:6901/10000 train_time:610576ms step_avg:88.48ms +[2025-08-22 09:23:53] [Rank 0] step:6921/10000 train_time:612438ms step_avg:88.49ms +[2025-08-22 09:23:53] [Rank 0] step:6921/10000 train_time:612438ms step_avg:88.49ms +[2025-08-22 09:23:55] [Rank 0] step:6941/10000 train_time:614303ms step_avg:88.50ms +[2025-08-22 09:23:55] [Rank 0] step:6941/10000 train_time:614303ms step_avg:88.50ms +[2025-08-22 09:23:57] [Rank 0] step:6961/10000 train_time:616183ms step_avg:88.52ms +[2025-08-22 09:23:57] [Rank 0] step:6961/10000 train_time:616183ms step_avg:88.52ms +[2025-08-22 09:23:59] [Rank 0] step:6981/10000 train_time:618052ms step_avg:88.53ms +[2025-08-22 09:23:59] [Rank 0] step:6981/10000 train_time:618052ms step_avg:88.53ms +[2025-08-22 09:24:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:24:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:24:14] [Rank 0] PRINT: step:7000/10000 val_loss:3.9415 svd_entropy: attn_qk:H=0.8747,top10E=0.09,eRank=346.6,q75/q25=82.02 attn_vo:H=0.5749,top10E=0.48,eRank=58.0,q75/q25=132.65 mlp_w1:H=0.7732,top10E=0.25,eRank=207.0,q75/q25=10.88 mlp_w2:H=0.8956,top10E=0.13,eRank=397.2,q75/q25=7.52 vo_prod:H=0.4769,top10E=0.67,eRank=29.4,q75/q25=18254.79 train_time:620013ms step_avg:88.57ms +[2025-08-22 09:24:14] [Rank 0] PRINT: step:7000/10000 val_loss:3.9415 svd_entropy: attn_qk:H=0.8747,top10E=0.09,eRank=346.6,q75/q25=82.02 attn_vo:H=0.5749,top10E=0.48,eRank=58.0,q75/q25=132.65 mlp_w1:H=0.7732,top10E=0.25,eRank=207.0,q75/q25=10.88 mlp_w2:H=0.8956,top10E=0.13,eRank=397.2,q75/q25=7.52 vo_prod:H=0.4769,top10E=0.67,eRank=29.4,q75/q25=18254.79 train_time:620013ms step_avg:88.57ms +[2025-08-22 09:24:14] [Rank 0] step:7001/10000 train_time:620028ms step_avg:88.56ms +[2025-08-22 09:24:14] [Rank 0] step:7001/10000 train_time:620028ms step_avg:88.56ms +[2025-08-22 09:24:16] [Rank 0] step:7021/10000 train_time:621802ms step_avg:88.56ms +[2025-08-22 09:24:16] [Rank 0] step:7021/10000 train_time:621802ms step_avg:88.56ms +[2025-08-22 09:24:18] [Rank 0] step:7041/10000 train_time:623660ms step_avg:88.58ms +[2025-08-22 09:24:18] [Rank 0] step:7041/10000 train_time:623660ms step_avg:88.58ms +[2025-08-22 09:24:20] [Rank 0] step:7061/10000 train_time:625521ms step_avg:88.59ms +[2025-08-22 09:24:20] [Rank 0] step:7061/10000 train_time:625521ms step_avg:88.59ms +[2025-08-22 09:24:22] [Rank 0] step:7081/10000 train_time:627380ms step_avg:88.60ms +[2025-08-22 09:24:22] [Rank 0] step:7081/10000 train_time:627380ms step_avg:88.60ms +[2025-08-22 09:24:24] [Rank 0] step:7101/10000 train_time:629244ms step_avg:88.61ms +[2025-08-22 09:24:24] [Rank 0] step:7101/10000 train_time:629244ms step_avg:88.61ms +[2025-08-22 09:24:26] [Rank 0] step:7121/10000 train_time:631105ms step_avg:88.63ms +[2025-08-22 09:24:26] [Rank 0] step:7121/10000 train_time:631105ms step_avg:88.63ms +[2025-08-22 09:24:27] [Rank 0] step:7141/10000 train_time:632967ms step_avg:88.64ms +[2025-08-22 09:24:27] [Rank 0] step:7141/10000 train_time:632967ms step_avg:88.64ms +[2025-08-22 09:24:29] [Rank 0] step:7161/10000 train_time:634834ms step_avg:88.65ms +[2025-08-22 09:24:29] [Rank 0] step:7161/10000 train_time:634834ms step_avg:88.65ms +[2025-08-22 09:24:31] [Rank 0] step:7181/10000 train_time:636698ms step_avg:88.66ms +[2025-08-22 09:24:31] [Rank 0] step:7181/10000 train_time:636698ms step_avg:88.66ms +[2025-08-22 09:24:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:24:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:24:47] [Rank 0] PRINT: step:7200/10000 val_loss:3.9273 svd_entropy: attn_qk:H=0.8751,top10E=0.09,eRank=347.3,q75/q25=82.28 attn_vo:H=0.5770,top10E=0.48,eRank=58.8,q75/q25=132.86 mlp_w1:H=0.7743,top10E=0.25,eRank=208.5,q75/q25=10.87 mlp_w2:H=0.8962,top10E=0.13,eRank=398.9,q75/q25=7.50 vo_prod:H=0.4789,top10E=0.67,eRank=29.8,q75/q25=18768.85 train_time:638657ms step_avg:88.70ms +[2025-08-22 09:24:47] [Rank 0] PRINT: step:7200/10000 val_loss:3.9273 svd_entropy: attn_qk:H=0.8751,top10E=0.09,eRank=347.3,q75/q25=82.28 attn_vo:H=0.5770,top10E=0.48,eRank=58.8,q75/q25=132.86 mlp_w1:H=0.7743,top10E=0.25,eRank=208.5,q75/q25=10.87 mlp_w2:H=0.8962,top10E=0.13,eRank=398.9,q75/q25=7.50 vo_prod:H=0.4789,top10E=0.67,eRank=29.8,q75/q25=18768.85 train_time:638657ms step_avg:88.70ms +[2025-08-22 09:24:47] [Rank 0] step:7201/10000 train_time:638672ms step_avg:88.69ms +[2025-08-22 09:24:47] [Rank 0] step:7201/10000 train_time:638672ms step_avg:88.69ms +[2025-08-22 09:24:49] [Rank 0] step:7221/10000 train_time:640460ms step_avg:88.69ms +[2025-08-22 09:24:49] [Rank 0] step:7221/10000 train_time:640460ms step_avg:88.69ms +[2025-08-22 09:24:50] [Rank 0] step:7241/10000 train_time:642318ms step_avg:88.71ms +[2025-08-22 09:24:50] [Rank 0] step:7241/10000 train_time:642318ms step_avg:88.71ms +[2025-08-22 09:24:52] [Rank 0] step:7261/10000 train_time:644175ms step_avg:88.72ms +[2025-08-22 09:24:52] [Rank 0] step:7261/10000 train_time:644175ms step_avg:88.72ms +[2025-08-22 09:24:54] [Rank 0] step:7281/10000 train_time:646044ms step_avg:88.73ms +[2025-08-22 09:24:54] [Rank 0] step:7281/10000 train_time:646044ms step_avg:88.73ms +[2025-08-22 09:24:56] [Rank 0] step:7301/10000 train_time:647905ms step_avg:88.74ms +[2025-08-22 09:24:56] [Rank 0] step:7301/10000 train_time:647905ms step_avg:88.74ms +[2025-08-22 09:24:58] [Rank 0] step:7321/10000 train_time:649780ms step_avg:88.76ms +[2025-08-22 09:24:58] [Rank 0] step:7321/10000 train_time:649780ms step_avg:88.76ms +[2025-08-22 09:25:00] [Rank 0] step:7341/10000 train_time:651643ms step_avg:88.77ms +[2025-08-22 09:25:00] [Rank 0] step:7341/10000 train_time:651643ms step_avg:88.77ms +[2025-08-22 09:25:02] [Rank 0] step:7361/10000 train_time:653516ms step_avg:88.78ms +[2025-08-22 09:25:02] [Rank 0] step:7361/10000 train_time:653516ms step_avg:88.78ms +[2025-08-22 09:25:04] [Rank 0] step:7381/10000 train_time:655391ms step_avg:88.79ms +[2025-08-22 09:25:04] [Rank 0] step:7381/10000 train_time:655391ms step_avg:88.79ms +[2025-08-22 09:25:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:25:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:25:19] [Rank 0] PRINT: step:7400/10000 val_loss:3.9030 svd_entropy: attn_qk:H=0.8754,top10E=0.09,eRank=347.9,q75/q25=82.03 attn_vo:H=0.5787,top10E=0.48,eRank=59.6,q75/q25=132.69 mlp_w1:H=0.7752,top10E=0.25,eRank=209.9,q75/q25=10.87 mlp_w2:H=0.8968,top10E=0.13,eRank=400.4,q75/q25=7.47 vo_prod:H=0.4806,top10E=0.67,eRank=30.1,q75/q25=18678.12 train_time:657337ms step_avg:88.83ms +[2025-08-22 09:25:19] [Rank 0] PRINT: step:7400/10000 val_loss:3.9030 svd_entropy: attn_qk:H=0.8754,top10E=0.09,eRank=347.9,q75/q25=82.03 attn_vo:H=0.5787,top10E=0.48,eRank=59.6,q75/q25=132.69 mlp_w1:H=0.7752,top10E=0.25,eRank=209.9,q75/q25=10.87 mlp_w2:H=0.8968,top10E=0.13,eRank=400.4,q75/q25=7.47 vo_prod:H=0.4806,top10E=0.67,eRank=30.1,q75/q25=18678.12 train_time:657337ms step_avg:88.83ms +[2025-08-22 09:25:19] [Rank 0] step:7401/10000 train_time:657352ms step_avg:88.82ms +[2025-08-22 09:25:19] [Rank 0] step:7401/10000 train_time:657352ms step_avg:88.82ms +[2025-08-22 09:25:21] [Rank 0] step:7421/10000 train_time:659135ms step_avg:88.82ms +[2025-08-22 09:25:21] [Rank 0] step:7421/10000 train_time:659135ms step_avg:88.82ms +[2025-08-22 09:25:23] [Rank 0] step:7441/10000 train_time:661001ms step_avg:88.83ms +[2025-08-22 09:25:23] [Rank 0] step:7441/10000 train_time:661001ms step_avg:88.83ms +[2025-08-22 09:25:25] [Rank 0] step:7461/10000 train_time:662869ms step_avg:88.84ms +[2025-08-22 09:25:25] [Rank 0] step:7461/10000 train_time:662869ms step_avg:88.84ms +[2025-08-22 09:25:26] [Rank 0] step:7481/10000 train_time:664744ms step_avg:88.86ms +[2025-08-22 09:25:26] [Rank 0] step:7481/10000 train_time:664744ms step_avg:88.86ms +[2025-08-22 09:25:28] [Rank 0] step:7501/10000 train_time:666618ms step_avg:88.87ms +[2025-08-22 09:25:28] [Rank 0] step:7501/10000 train_time:666618ms step_avg:88.87ms +[2025-08-22 09:25:30] [Rank 0] step:7521/10000 train_time:668491ms step_avg:88.88ms +[2025-08-22 09:25:30] [Rank 0] step:7521/10000 train_time:668491ms step_avg:88.88ms +[2025-08-22 09:25:32] [Rank 0] step:7541/10000 train_time:670375ms step_avg:88.90ms +[2025-08-22 09:25:32] [Rank 0] step:7541/10000 train_time:670375ms step_avg:88.90ms +[2025-08-22 09:25:34] [Rank 0] step:7561/10000 train_time:672238ms step_avg:88.91ms +[2025-08-22 09:25:34] [Rank 0] step:7561/10000 train_time:672238ms step_avg:88.91ms +[2025-08-22 09:25:36] [Rank 0] step:7581/10000 train_time:674121ms step_avg:88.92ms +[2025-08-22 09:25:36] [Rank 0] step:7581/10000 train_time:674121ms step_avg:88.92ms +[2025-08-22 09:25:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:25:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:25:51] [Rank 0] PRINT: step:7600/10000 val_loss:3.9005 svd_entropy: attn_qk:H=0.8757,top10E=0.09,eRank=348.5,q75/q25=81.73 attn_vo:H=0.5803,top10E=0.47,eRank=60.3,q75/q25=131.43 mlp_w1:H=0.7761,top10E=0.25,eRank=211.2,q75/q25=10.84 mlp_w2:H=0.8974,top10E=0.13,eRank=401.8,q75/q25=7.43 vo_prod:H=0.4819,top10E=0.66,eRank=30.4,q75/q25=18457.70 train_time:676098ms step_avg:88.96ms +[2025-08-22 09:25:51] [Rank 0] PRINT: step:7600/10000 val_loss:3.9005 svd_entropy: attn_qk:H=0.8757,top10E=0.09,eRank=348.5,q75/q25=81.73 attn_vo:H=0.5803,top10E=0.47,eRank=60.3,q75/q25=131.43 mlp_w1:H=0.7761,top10E=0.25,eRank=211.2,q75/q25=10.84 mlp_w2:H=0.8974,top10E=0.13,eRank=401.8,q75/q25=7.43 vo_prod:H=0.4819,top10E=0.66,eRank=30.4,q75/q25=18457.70 train_time:676098ms step_avg:88.96ms +[2025-08-22 09:25:51] [Rank 0] step:7601/10000 train_time:676113ms step_avg:88.95ms +[2025-08-22 09:25:51] [Rank 0] step:7601/10000 train_time:676113ms step_avg:88.95ms +[2025-08-22 09:25:53] [Rank 0] step:7621/10000 train_time:677898ms step_avg:88.95ms +[2025-08-22 09:25:53] [Rank 0] step:7621/10000 train_time:677898ms step_avg:88.95ms +[2025-08-22 09:25:55] [Rank 0] step:7641/10000 train_time:679889ms step_avg:88.98ms +[2025-08-22 09:25:55] [Rank 0] step:7641/10000 train_time:679889ms step_avg:88.98ms +[2025-08-22 09:25:57] [Rank 0] step:7661/10000 train_time:681634ms step_avg:88.97ms +[2025-08-22 09:25:57] [Rank 0] step:7661/10000 train_time:681634ms step_avg:88.97ms +[2025-08-22 09:25:59] [Rank 0] step:7681/10000 train_time:683495ms step_avg:88.99ms +[2025-08-22 09:25:59] [Rank 0] step:7681/10000 train_time:683495ms step_avg:88.99ms +[2025-08-22 09:26:01] [Rank 0] step:7701/10000 train_time:685361ms step_avg:89.00ms +[2025-08-22 09:26:01] [Rank 0] step:7701/10000 train_time:685361ms step_avg:89.00ms +[2025-08-22 09:26:02] [Rank 0] step:7721/10000 train_time:687239ms step_avg:89.01ms +[2025-08-22 09:26:02] [Rank 0] step:7721/10000 train_time:687239ms step_avg:89.01ms +[2025-08-22 09:26:04] [Rank 0] step:7741/10000 train_time:689109ms step_avg:89.02ms +[2025-08-22 09:26:04] [Rank 0] step:7741/10000 train_time:689109ms step_avg:89.02ms +[2025-08-22 09:26:06] [Rank 0] step:7761/10000 train_time:690985ms step_avg:89.03ms +[2025-08-22 09:26:06] [Rank 0] step:7761/10000 train_time:690985ms step_avg:89.03ms +[2025-08-22 09:26:08] [Rank 0] step:7781/10000 train_time:692854ms step_avg:89.04ms +[2025-08-22 09:26:08] [Rank 0] step:7781/10000 train_time:692854ms step_avg:89.04ms +[2025-08-22 09:26:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:26:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:26:24] [Rank 0] PRINT: step:7800/10000 val_loss:3.8812 svd_entropy: attn_qk:H=0.8760,top10E=0.09,eRank=349.1,q75/q25=81.94 attn_vo:H=0.5818,top10E=0.47,eRank=60.9,q75/q25=131.64 mlp_w1:H=0.7769,top10E=0.25,eRank=212.4,q75/q25=10.83 mlp_w2:H=0.8979,top10E=0.13,eRank=403.1,q75/q25=7.40 vo_prod:H=0.4835,top10E=0.66,eRank=30.8,q75/q25=18385.96 train_time:694829ms step_avg:89.08ms +[2025-08-22 09:26:24] [Rank 0] PRINT: step:7800/10000 val_loss:3.8812 svd_entropy: attn_qk:H=0.8760,top10E=0.09,eRank=349.1,q75/q25=81.94 attn_vo:H=0.5818,top10E=0.47,eRank=60.9,q75/q25=131.64 mlp_w1:H=0.7769,top10E=0.25,eRank=212.4,q75/q25=10.83 mlp_w2:H=0.8979,top10E=0.13,eRank=403.1,q75/q25=7.40 vo_prod:H=0.4835,top10E=0.66,eRank=30.8,q75/q25=18385.96 train_time:694829ms step_avg:89.08ms +[2025-08-22 09:26:24] [Rank 0] step:7801/10000 train_time:694845ms step_avg:89.07ms +[2025-08-22 09:26:24] [Rank 0] step:7801/10000 train_time:694845ms step_avg:89.07ms +[2025-08-22 09:26:25] [Rank 0] step:7821/10000 train_time:696611ms step_avg:89.07ms +[2025-08-22 09:26:25] [Rank 0] step:7821/10000 train_time:696611ms step_avg:89.07ms +[2025-08-22 09:26:27] [Rank 0] step:7841/10000 train_time:698472ms step_avg:89.08ms +[2025-08-22 09:26:27] [Rank 0] step:7841/10000 train_time:698472ms step_avg:89.08ms +[2025-08-22 09:26:29] [Rank 0] step:7861/10000 train_time:700344ms step_avg:89.09ms +[2025-08-22 09:26:29] [Rank 0] step:7861/10000 train_time:700344ms step_avg:89.09ms +[2025-08-22 09:26:31] [Rank 0] step:7881/10000 train_time:702219ms step_avg:89.10ms +[2025-08-22 09:26:31] [Rank 0] step:7881/10000 train_time:702219ms step_avg:89.10ms +[2025-08-22 09:26:33] [Rank 0] step:7901/10000 train_time:704086ms step_avg:89.11ms +[2025-08-22 09:26:33] [Rank 0] step:7901/10000 train_time:704086ms step_avg:89.11ms +[2025-08-22 09:26:35] [Rank 0] step:7921/10000 train_time:705960ms step_avg:89.13ms +[2025-08-22 09:26:35] [Rank 0] step:7921/10000 train_time:705960ms step_avg:89.13ms +[2025-08-22 09:26:37] [Rank 0] step:7941/10000 train_time:707835ms step_avg:89.14ms +[2025-08-22 09:26:37] [Rank 0] step:7941/10000 train_time:707835ms step_avg:89.14ms +[2025-08-22 09:26:39] [Rank 0] step:7961/10000 train_time:709708ms step_avg:89.15ms +[2025-08-22 09:26:39] [Rank 0] step:7961/10000 train_time:709708ms step_avg:89.15ms +[2025-08-22 09:26:40] [Rank 0] step:7981/10000 train_time:711570ms step_avg:89.16ms +[2025-08-22 09:26:40] [Rank 0] step:7981/10000 train_time:711570ms step_avg:89.16ms +[2025-08-22 09:26:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:26:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:26:56] [Rank 0] PRINT: step:8000/10000 val_loss:3.8605 svd_entropy: attn_qk:H=0.8762,top10E=0.09,eRank=349.6,q75/q25=81.14 attn_vo:H=0.5832,top10E=0.47,eRank=61.5,q75/q25=131.49 mlp_w1:H=0.7777,top10E=0.25,eRank=213.5,q75/q25=10.80 mlp_w2:H=0.8984,top10E=0.13,eRank=404.3,q75/q25=7.37 vo_prod:H=0.4848,top10E=0.66,eRank=31.0,q75/q25=18362.37 train_time:713538ms step_avg:89.19ms +[2025-08-22 09:26:56] [Rank 0] PRINT: step:8000/10000 val_loss:3.8605 svd_entropy: attn_qk:H=0.8762,top10E=0.09,eRank=349.6,q75/q25=81.14 attn_vo:H=0.5832,top10E=0.47,eRank=61.5,q75/q25=131.49 mlp_w1:H=0.7777,top10E=0.25,eRank=213.5,q75/q25=10.80 mlp_w2:H=0.8984,top10E=0.13,eRank=404.3,q75/q25=7.37 vo_prod:H=0.4848,top10E=0.66,eRank=31.0,q75/q25=18362.37 train_time:713538ms step_avg:89.19ms +[2025-08-22 09:26:56] [Rank 0] step:8001/10000 train_time:713553ms step_avg:89.18ms +[2025-08-22 09:26:56] [Rank 0] step:8001/10000 train_time:713553ms step_avg:89.18ms +[2025-08-22 09:26:58] [Rank 0] step:8021/10000 train_time:715416ms step_avg:89.19ms +[2025-08-22 09:26:58] [Rank 0] step:8021/10000 train_time:715416ms step_avg:89.19ms +[2025-08-22 09:27:00] [Rank 0] step:8041/10000 train_time:717307ms step_avg:89.21ms +[2025-08-22 09:27:00] [Rank 0] step:8041/10000 train_time:717307ms step_avg:89.21ms +[2025-08-22 09:27:02] [Rank 0] step:8061/10000 train_time:719175ms step_avg:89.22ms +[2025-08-22 09:27:02] [Rank 0] step:8061/10000 train_time:719175ms step_avg:89.22ms +[2025-08-22 09:27:04] [Rank 0] step:8081/10000 train_time:721036ms step_avg:89.23ms +[2025-08-22 09:27:04] [Rank 0] step:8081/10000 train_time:721036ms step_avg:89.23ms +[2025-08-22 09:27:05] [Rank 0] step:8101/10000 train_time:722914ms step_avg:89.24ms +[2025-08-22 09:27:05] [Rank 0] step:8101/10000 train_time:722914ms step_avg:89.24ms +[2025-08-22 09:27:07] [Rank 0] step:8121/10000 train_time:724780ms step_avg:89.25ms +[2025-08-22 09:27:07] [Rank 0] step:8121/10000 train_time:724780ms step_avg:89.25ms +[2025-08-22 09:27:09] [Rank 0] step:8141/10000 train_time:726807ms step_avg:89.28ms +[2025-08-22 09:27:09] [Rank 0] step:8141/10000 train_time:726807ms step_avg:89.28ms +[2025-08-22 09:27:11] [Rank 0] step:8161/10000 train_time:728692ms step_avg:89.29ms +[2025-08-22 09:27:11] [Rank 0] step:8161/10000 train_time:728692ms step_avg:89.29ms +[2025-08-22 09:27:13] [Rank 0] step:8181/10000 train_time:730591ms step_avg:89.30ms +[2025-08-22 09:27:13] [Rank 0] step:8181/10000 train_time:730591ms step_avg:89.30ms +[2025-08-22 09:27:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:27:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:27:29] [Rank 0] PRINT: step:8200/10000 val_loss:3.8494 svd_entropy: attn_qk:H=0.8764,top10E=0.09,eRank=350.0,q75/q25=81.44 attn_vo:H=0.5845,top10E=0.47,eRank=62.0,q75/q25=132.02 mlp_w1:H=0.7784,top10E=0.25,eRank=214.5,q75/q25=10.81 mlp_w2:H=0.8989,top10E=0.13,eRank=405.4,q75/q25=7.35 vo_prod:H=0.4862,top10E=0.65,eRank=31.3,q75/q25=18445.11 train_time:732611ms step_avg:89.34ms +[2025-08-22 09:27:29] [Rank 0] PRINT: step:8200/10000 val_loss:3.8494 svd_entropy: attn_qk:H=0.8764,top10E=0.09,eRank=350.0,q75/q25=81.44 attn_vo:H=0.5845,top10E=0.47,eRank=62.0,q75/q25=132.02 mlp_w1:H=0.7784,top10E=0.25,eRank=214.5,q75/q25=10.81 mlp_w2:H=0.8989,top10E=0.13,eRank=405.4,q75/q25=7.35 vo_prod:H=0.4862,top10E=0.65,eRank=31.3,q75/q25=18445.11 train_time:732611ms step_avg:89.34ms +[2025-08-22 09:27:29] [Rank 0] step:8201/10000 train_time:732627ms step_avg:89.33ms +[2025-08-22 09:27:29] [Rank 0] step:8201/10000 train_time:732627ms step_avg:89.33ms +[2025-08-22 09:27:31] [Rank 0] step:8221/10000 train_time:734441ms step_avg:89.34ms +[2025-08-22 09:27:31] [Rank 0] step:8221/10000 train_time:734441ms step_avg:89.34ms +[2025-08-22 09:27:33] [Rank 0] step:8241/10000 train_time:736345ms step_avg:89.35ms +[2025-08-22 09:27:33] [Rank 0] step:8241/10000 train_time:736345ms step_avg:89.35ms +[2025-08-22 09:27:34] [Rank 0] step:8261/10000 train_time:738250ms step_avg:89.37ms +[2025-08-22 09:27:34] [Rank 0] step:8261/10000 train_time:738250ms step_avg:89.37ms +[2025-08-22 09:27:36] [Rank 0] step:8281/10000 train_time:740150ms step_avg:89.38ms +[2025-08-22 09:27:36] [Rank 0] step:8281/10000 train_time:740150ms step_avg:89.38ms +[2025-08-22 09:27:38] [Rank 0] step:8301/10000 train_time:742049ms step_avg:89.39ms +[2025-08-22 09:27:38] [Rank 0] step:8301/10000 train_time:742049ms step_avg:89.39ms +[2025-08-22 09:27:40] [Rank 0] step:8321/10000 train_time:743944ms step_avg:89.41ms +[2025-08-22 09:27:40] [Rank 0] step:8321/10000 train_time:743944ms step_avg:89.41ms +[2025-08-22 09:27:42] [Rank 0] step:8341/10000 train_time:745852ms step_avg:89.42ms +[2025-08-22 09:27:42] [Rank 0] step:8341/10000 train_time:745852ms step_avg:89.42ms +[2025-08-22 09:27:44] [Rank 0] step:8361/10000 train_time:747756ms step_avg:89.43ms +[2025-08-22 09:27:44] [Rank 0] step:8361/10000 train_time:747756ms step_avg:89.43ms +[2025-08-22 09:27:46] [Rank 0] step:8381/10000 train_time:749657ms step_avg:89.45ms +[2025-08-22 09:27:46] [Rank 0] step:8381/10000 train_time:749657ms step_avg:89.45ms +[2025-08-22 09:27:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:27:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:28:01] [Rank 0] PRINT: step:8400/10000 val_loss:3.8332 svd_entropy: attn_qk:H=0.8766,top10E=0.09,eRank=350.4,q75/q25=80.90 attn_vo:H=0.5856,top10E=0.46,eRank=62.5,q75/q25=132.48 mlp_w1:H=0.7790,top10E=0.25,eRank=215.4,q75/q25=10.80 mlp_w2:H=0.8993,top10E=0.13,eRank=406.4,q75/q25=7.33 vo_prod:H=0.4872,top10E=0.65,eRank=31.5,q75/q25=18487.64 train_time:751652ms step_avg:89.48ms +[2025-08-22 09:28:01] [Rank 0] PRINT: step:8400/10000 val_loss:3.8332 svd_entropy: attn_qk:H=0.8766,top10E=0.09,eRank=350.4,q75/q25=80.90 attn_vo:H=0.5856,top10E=0.46,eRank=62.5,q75/q25=132.48 mlp_w1:H=0.7790,top10E=0.25,eRank=215.4,q75/q25=10.80 mlp_w2:H=0.8993,top10E=0.13,eRank=406.4,q75/q25=7.33 vo_prod:H=0.4872,top10E=0.65,eRank=31.5,q75/q25=18487.64 train_time:751652ms step_avg:89.48ms +[2025-08-22 09:28:01] [Rank 0] step:8401/10000 train_time:751668ms step_avg:89.47ms +[2025-08-22 09:28:01] [Rank 0] step:8401/10000 train_time:751668ms step_avg:89.47ms +[2025-08-22 09:28:03] [Rank 0] step:8421/10000 train_time:753464ms step_avg:89.47ms +[2025-08-22 09:28:03] [Rank 0] step:8421/10000 train_time:753464ms step_avg:89.47ms +[2025-08-22 09:28:05] [Rank 0] step:8441/10000 train_time:755356ms step_avg:89.49ms +[2025-08-22 09:28:05] [Rank 0] step:8441/10000 train_time:755356ms step_avg:89.49ms +[2025-08-22 09:28:07] [Rank 0] step:8461/10000 train_time:757251ms step_avg:89.50ms +[2025-08-22 09:28:07] [Rank 0] step:8461/10000 train_time:757251ms step_avg:89.50ms +[2025-08-22 09:28:09] [Rank 0] step:8481/10000 train_time:759151ms step_avg:89.51ms +[2025-08-22 09:28:09] [Rank 0] step:8481/10000 train_time:759151ms step_avg:89.51ms +[2025-08-22 09:28:11] [Rank 0] step:8501/10000 train_time:761071ms step_avg:89.53ms +[2025-08-22 09:28:11] [Rank 0] step:8501/10000 train_time:761071ms step_avg:89.53ms +[2025-08-22 09:28:13] [Rank 0] step:8521/10000 train_time:762973ms step_avg:89.54ms +[2025-08-22 09:28:13] [Rank 0] step:8521/10000 train_time:762973ms step_avg:89.54ms +[2025-08-22 09:28:15] [Rank 0] step:8541/10000 train_time:764886ms step_avg:89.55ms +[2025-08-22 09:28:15] [Rank 0] step:8541/10000 train_time:764886ms step_avg:89.55ms +[2025-08-22 09:28:17] [Rank 0] step:8561/10000 train_time:766789ms step_avg:89.57ms +[2025-08-22 09:28:17] [Rank 0] step:8561/10000 train_time:766789ms step_avg:89.57ms +[2025-08-22 09:28:19] [Rank 0] step:8581/10000 train_time:768692ms step_avg:89.58ms +[2025-08-22 09:28:19] [Rank 0] step:8581/10000 train_time:768692ms step_avg:89.58ms +[2025-08-22 09:28:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:28:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:28:34] [Rank 0] PRINT: step:8600/10000 val_loss:3.8227 svd_entropy: attn_qk:H=0.8768,top10E=0.09,eRank=350.6,q75/q25=80.78 attn_vo:H=0.5865,top10E=0.46,eRank=62.9,q75/q25=130.44 mlp_w1:H=0.7795,top10E=0.25,eRank=216.2,q75/q25=10.78 mlp_w2:H=0.8996,top10E=0.13,eRank=407.3,q75/q25=7.32 vo_prod:H=0.4882,top10E=0.65,eRank=31.7,q75/q25=18035.88 train_time:770679ms step_avg:89.61ms +[2025-08-22 09:28:34] [Rank 0] PRINT: step:8600/10000 val_loss:3.8227 svd_entropy: attn_qk:H=0.8768,top10E=0.09,eRank=350.6,q75/q25=80.78 attn_vo:H=0.5865,top10E=0.46,eRank=62.9,q75/q25=130.44 mlp_w1:H=0.7795,top10E=0.25,eRank=216.2,q75/q25=10.78 mlp_w2:H=0.8996,top10E=0.13,eRank=407.3,q75/q25=7.32 vo_prod:H=0.4882,top10E=0.65,eRank=31.7,q75/q25=18035.88 train_time:770679ms step_avg:89.61ms +[2025-08-22 09:28:34] [Rank 0] step:8601/10000 train_time:770694ms step_avg:89.61ms +[2025-08-22 09:28:34] [Rank 0] step:8601/10000 train_time:770694ms step_avg:89.61ms +[2025-08-22 09:28:36] [Rank 0] step:8621/10000 train_time:772511ms step_avg:89.61ms +[2025-08-22 09:28:36] [Rank 0] step:8621/10000 train_time:772511ms step_avg:89.61ms +[2025-08-22 09:28:38] [Rank 0] step:8641/10000 train_time:774400ms step_avg:89.62ms +[2025-08-22 09:28:38] [Rank 0] step:8641/10000 train_time:774400ms step_avg:89.62ms +[2025-08-22 09:28:40] [Rank 0] step:8661/10000 train_time:776299ms step_avg:89.63ms +[2025-08-22 09:28:40] [Rank 0] step:8661/10000 train_time:776299ms step_avg:89.63ms +[2025-08-22 09:28:42] [Rank 0] step:8681/10000 train_time:778193ms step_avg:89.64ms +[2025-08-22 09:28:42] [Rank 0] step:8681/10000 train_time:778193ms step_avg:89.64ms +[2025-08-22 09:28:44] [Rank 0] step:8701/10000 train_time:780085ms step_avg:89.65ms +[2025-08-22 09:28:44] [Rank 0] step:8701/10000 train_time:780085ms step_avg:89.65ms +[2025-08-22 09:28:46] [Rank 0] step:8721/10000 train_time:781986ms step_avg:89.67ms +[2025-08-22 09:28:46] [Rank 0] step:8721/10000 train_time:781986ms step_avg:89.67ms +[2025-08-22 09:28:47] [Rank 0] step:8741/10000 train_time:783874ms step_avg:89.68ms +[2025-08-22 09:28:47] [Rank 0] step:8741/10000 train_time:783874ms step_avg:89.68ms +[2025-08-22 09:28:49] [Rank 0] step:8761/10000 train_time:785772ms step_avg:89.69ms +[2025-08-22 09:28:49] [Rank 0] step:8761/10000 train_time:785772ms step_avg:89.69ms +[2025-08-22 09:28:51] [Rank 0] step:8781/10000 train_time:787673ms step_avg:89.70ms +[2025-08-22 09:28:51] [Rank 0] step:8781/10000 train_time:787673ms step_avg:89.70ms +[2025-08-22 09:28:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:28:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:29:07] [Rank 0] PRINT: step:8800/10000 val_loss:3.8088 svd_entropy: attn_qk:H=0.8769,top10E=0.09,eRank=350.9,q75/q25=80.76 attn_vo:H=0.5874,top10E=0.46,eRank=63.3,q75/q25=130.16 mlp_w1:H=0.7799,top10E=0.25,eRank=216.8,q75/q25=10.76 mlp_w2:H=0.9000,top10E=0.13,eRank=408.1,q75/q25=7.30 vo_prod:H=0.4890,top10E=0.65,eRank=31.9,q75/q25=18295.49 train_time:789668ms step_avg:89.73ms +[2025-08-22 09:29:07] [Rank 0] PRINT: step:8800/10000 val_loss:3.8088 svd_entropy: attn_qk:H=0.8769,top10E=0.09,eRank=350.9,q75/q25=80.76 attn_vo:H=0.5874,top10E=0.46,eRank=63.3,q75/q25=130.16 mlp_w1:H=0.7799,top10E=0.25,eRank=216.8,q75/q25=10.76 mlp_w2:H=0.9000,top10E=0.13,eRank=408.1,q75/q25=7.30 vo_prod:H=0.4890,top10E=0.65,eRank=31.9,q75/q25=18295.49 train_time:789668ms step_avg:89.73ms +[2025-08-22 09:29:07] [Rank 0] step:8801/10000 train_time:789683ms step_avg:89.73ms +[2025-08-22 09:29:07] [Rank 0] step:8801/10000 train_time:789683ms step_avg:89.73ms +[2025-08-22 09:29:09] [Rank 0] step:8821/10000 train_time:791480ms step_avg:89.73ms +[2025-08-22 09:29:09] [Rank 0] step:8821/10000 train_time:791480ms step_avg:89.73ms +[2025-08-22 09:29:11] [Rank 0] step:8841/10000 train_time:793399ms step_avg:89.74ms +[2025-08-22 09:29:11] [Rank 0] step:8841/10000 train_time:793399ms step_avg:89.74ms +[2025-08-22 09:29:13] [Rank 0] step:8861/10000 train_time:795289ms step_avg:89.75ms +[2025-08-22 09:29:13] [Rank 0] step:8861/10000 train_time:795289ms step_avg:89.75ms +[2025-08-22 09:29:14] [Rank 0] step:8881/10000 train_time:797187ms step_avg:89.76ms +[2025-08-22 09:29:14] [Rank 0] step:8881/10000 train_time:797187ms step_avg:89.76ms +[2025-08-22 09:29:16] [Rank 0] step:8901/10000 train_time:799089ms step_avg:89.78ms +[2025-08-22 09:29:16] [Rank 0] step:8901/10000 train_time:799089ms step_avg:89.78ms +[2025-08-22 09:29:18] [Rank 0] step:8921/10000 train_time:801004ms step_avg:89.79ms +[2025-08-22 09:29:18] [Rank 0] step:8921/10000 train_time:801004ms step_avg:89.79ms +[2025-08-22 09:29:20] [Rank 0] step:8941/10000 train_time:802915ms step_avg:89.80ms +[2025-08-22 09:29:20] [Rank 0] step:8941/10000 train_time:802915ms step_avg:89.80ms +[2025-08-22 09:29:22] [Rank 0] step:8961/10000 train_time:804815ms step_avg:89.81ms +[2025-08-22 09:29:22] [Rank 0] step:8961/10000 train_time:804815ms step_avg:89.81ms +[2025-08-22 09:29:24] [Rank 0] step:8981/10000 train_time:806715ms step_avg:89.82ms +[2025-08-22 09:29:24] [Rank 0] step:8981/10000 train_time:806715ms step_avg:89.82ms +[2025-08-22 09:29:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:29:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:29:39] [Rank 0] PRINT: step:9000/10000 val_loss:3.7973 svd_entropy: attn_qk:H=0.8770,top10E=0.09,eRank=351.2,q75/q25=80.64 attn_vo:H=0.5882,top10E=0.46,eRank=63.7,q75/q25=130.98 mlp_w1:H=0.7803,top10E=0.25,eRank=217.5,q75/q25=10.73 mlp_w2:H=0.9003,top10E=0.12,eRank=408.9,q75/q25=7.29 vo_prod:H=0.4897,top10E=0.65,eRank=32.1,q75/q25=18472.14 train_time:808710ms step_avg:89.86ms +[2025-08-22 09:29:39] [Rank 0] PRINT: step:9000/10000 val_loss:3.7973 svd_entropy: attn_qk:H=0.8770,top10E=0.09,eRank=351.2,q75/q25=80.64 attn_vo:H=0.5882,top10E=0.46,eRank=63.7,q75/q25=130.98 mlp_w1:H=0.7803,top10E=0.25,eRank=217.5,q75/q25=10.73 mlp_w2:H=0.9003,top10E=0.12,eRank=408.9,q75/q25=7.29 vo_prod:H=0.4897,top10E=0.65,eRank=32.1,q75/q25=18472.14 train_time:808710ms step_avg:89.86ms +[2025-08-22 09:29:40] [Rank 0] step:9001/10000 train_time:808725ms step_avg:89.85ms +[2025-08-22 09:29:40] [Rank 0] step:9001/10000 train_time:808725ms step_avg:89.85ms +[2025-08-22 09:29:41] [Rank 0] step:9021/10000 train_time:810522ms step_avg:89.85ms +[2025-08-22 09:29:41] [Rank 0] step:9021/10000 train_time:810522ms step_avg:89.85ms +[2025-08-22 09:29:43] [Rank 0] step:9041/10000 train_time:812419ms step_avg:89.86ms +[2025-08-22 09:29:43] [Rank 0] step:9041/10000 train_time:812419ms step_avg:89.86ms +[2025-08-22 09:29:45] [Rank 0] step:9061/10000 train_time:814322ms step_avg:89.87ms +[2025-08-22 09:29:45] [Rank 0] step:9061/10000 train_time:814322ms step_avg:89.87ms +[2025-08-22 09:29:47] [Rank 0] step:9081/10000 train_time:816230ms step_avg:89.88ms +[2025-08-22 09:29:47] [Rank 0] step:9081/10000 train_time:816230ms step_avg:89.88ms +[2025-08-22 09:29:49] [Rank 0] step:9101/10000 train_time:818145ms step_avg:89.90ms +[2025-08-22 09:29:49] [Rank 0] step:9101/10000 train_time:818145ms step_avg:89.90ms +[2025-08-22 09:29:51] [Rank 0] step:9121/10000 train_time:820048ms step_avg:89.91ms +[2025-08-22 09:29:51] [Rank 0] step:9121/10000 train_time:820048ms step_avg:89.91ms +[2025-08-22 09:29:53] [Rank 0] step:9141/10000 train_time:821939ms step_avg:89.92ms +[2025-08-22 09:29:53] [Rank 0] step:9141/10000 train_time:821939ms step_avg:89.92ms +[2025-08-22 09:29:55] [Rank 0] step:9161/10000 train_time:823832ms step_avg:89.93ms +[2025-08-22 09:29:55] [Rank 0] step:9161/10000 train_time:823832ms step_avg:89.93ms +[2025-08-22 09:29:57] [Rank 0] step:9181/10000 train_time:825765ms step_avg:89.94ms +[2025-08-22 09:29:57] [Rank 0] step:9181/10000 train_time:825765ms step_avg:89.94ms +[2025-08-22 09:29:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:29:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:30:12] [Rank 0] PRINT: step:9200/10000 val_loss:3.7876 svd_entropy: attn_qk:H=0.8771,top10E=0.09,eRank=351.4,q75/q25=80.69 attn_vo:H=0.5889,top10E=0.46,eRank=64.0,q75/q25=130.98 mlp_w1:H=0.7807,top10E=0.24,eRank=218.0,q75/q25=10.72 mlp_w2:H=0.9005,top10E=0.12,eRank=409.5,q75/q25=7.27 vo_prod:H=0.4905,top10E=0.65,eRank=32.2,q75/q25=18560.30 train_time:827755ms step_avg:89.97ms +[2025-08-22 09:30:12] [Rank 0] PRINT: step:9200/10000 val_loss:3.7876 svd_entropy: attn_qk:H=0.8771,top10E=0.09,eRank=351.4,q75/q25=80.69 attn_vo:H=0.5889,top10E=0.46,eRank=64.0,q75/q25=130.98 mlp_w1:H=0.7807,top10E=0.24,eRank=218.0,q75/q25=10.72 mlp_w2:H=0.9005,top10E=0.12,eRank=409.5,q75/q25=7.27 vo_prod:H=0.4905,top10E=0.65,eRank=32.2,q75/q25=18560.30 train_time:827755ms step_avg:89.97ms +[2025-08-22 09:30:12] [Rank 0] step:9201/10000 train_time:827769ms step_avg:89.97ms +[2025-08-22 09:30:12] [Rank 0] step:9201/10000 train_time:827769ms step_avg:89.97ms +[2025-08-22 09:30:14] [Rank 0] step:9221/10000 train_time:829597ms step_avg:89.97ms +[2025-08-22 09:30:14] [Rank 0] step:9221/10000 train_time:829597ms step_avg:89.97ms +[2025-08-22 09:30:16] [Rank 0] step:9241/10000 train_time:831504ms step_avg:89.98ms +[2025-08-22 09:30:16] [Rank 0] step:9241/10000 train_time:831504ms step_avg:89.98ms +[2025-08-22 09:30:18] [Rank 0] step:9261/10000 train_time:833411ms step_avg:89.99ms +[2025-08-22 09:30:18] [Rank 0] step:9261/10000 train_time:833411ms step_avg:89.99ms +[2025-08-22 09:30:20] [Rank 0] step:9281/10000 train_time:835299ms step_avg:90.00ms +[2025-08-22 09:30:20] [Rank 0] step:9281/10000 train_time:835299ms step_avg:90.00ms +[2025-08-22 09:30:22] [Rank 0] step:9301/10000 train_time:837193ms step_avg:90.01ms +[2025-08-22 09:30:22] [Rank 0] step:9301/10000 train_time:837193ms step_avg:90.01ms +[2025-08-22 09:30:24] [Rank 0] step:9321/10000 train_time:839098ms step_avg:90.02ms +[2025-08-22 09:30:24] [Rank 0] step:9321/10000 train_time:839098ms step_avg:90.02ms +[2025-08-22 09:30:26] [Rank 0] step:9341/10000 train_time:840995ms step_avg:90.03ms +[2025-08-22 09:30:26] [Rank 0] step:9341/10000 train_time:840995ms step_avg:90.03ms +[2025-08-22 09:30:28] [Rank 0] step:9361/10000 train_time:842898ms step_avg:90.04ms +[2025-08-22 09:30:28] [Rank 0] step:9361/10000 train_time:842898ms step_avg:90.04ms +[2025-08-22 09:30:29] [Rank 0] step:9381/10000 train_time:844813ms step_avg:90.06ms +[2025-08-22 09:30:29] [Rank 0] step:9381/10000 train_time:844813ms step_avg:90.06ms +[2025-08-22 09:30:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:30:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:30:45] [Rank 0] PRINT: step:9400/10000 val_loss:3.7788 svd_entropy: attn_qk:H=0.8772,top10E=0.09,eRank=351.5,q75/q25=80.40 attn_vo:H=0.5895,top10E=0.46,eRank=64.3,q75/q25=131.14 mlp_w1:H=0.7810,top10E=0.24,eRank=218.5,q75/q25=10.71 mlp_w2:H=0.9008,top10E=0.12,eRank=410.1,q75/q25=7.25 vo_prod:H=0.4912,top10E=0.64,eRank=32.4,q75/q25=18546.21 train_time:846813ms step_avg:90.09ms +[2025-08-22 09:30:45] [Rank 0] PRINT: step:9400/10000 val_loss:3.7788 svd_entropy: attn_qk:H=0.8772,top10E=0.09,eRank=351.5,q75/q25=80.40 attn_vo:H=0.5895,top10E=0.46,eRank=64.3,q75/q25=131.14 mlp_w1:H=0.7810,top10E=0.24,eRank=218.5,q75/q25=10.71 mlp_w2:H=0.9008,top10E=0.12,eRank=410.1,q75/q25=7.25 vo_prod:H=0.4912,top10E=0.64,eRank=32.4,q75/q25=18546.21 train_time:846813ms step_avg:90.09ms +[2025-08-22 09:30:45] [Rank 0] step:9401/10000 train_time:846827ms step_avg:90.08ms +[2025-08-22 09:30:45] [Rank 0] step:9401/10000 train_time:846827ms step_avg:90.08ms +[2025-08-22 09:30:47] [Rank 0] step:9421/10000 train_time:848651ms step_avg:90.08ms +[2025-08-22 09:30:47] [Rank 0] step:9421/10000 train_time:848651ms step_avg:90.08ms +[2025-08-22 09:30:49] [Rank 0] step:9441/10000 train_time:850549ms step_avg:90.09ms +[2025-08-22 09:30:49] [Rank 0] step:9441/10000 train_time:850549ms step_avg:90.09ms +[2025-08-22 09:30:51] [Rank 0] step:9461/10000 train_time:852451ms step_avg:90.10ms +[2025-08-22 09:30:51] [Rank 0] step:9461/10000 train_time:852451ms step_avg:90.10ms +[2025-08-22 09:30:53] [Rank 0] step:9481/10000 train_time:854350ms step_avg:90.11ms +[2025-08-22 09:30:53] [Rank 0] step:9481/10000 train_time:854350ms step_avg:90.11ms +[2025-08-22 09:30:55] [Rank 0] step:9501/10000 train_time:856265ms step_avg:90.12ms +[2025-08-22 09:30:55] [Rank 0] step:9501/10000 train_time:856265ms step_avg:90.12ms +[2025-08-22 09:30:56] [Rank 0] step:9521/10000 train_time:858157ms step_avg:90.13ms +[2025-08-22 09:30:56] [Rank 0] step:9521/10000 train_time:858157ms step_avg:90.13ms +[2025-08-22 09:30:58] [Rank 0] step:9541/10000 train_time:860056ms step_avg:90.14ms +[2025-08-22 09:30:58] [Rank 0] step:9541/10000 train_time:860056ms step_avg:90.14ms +[2025-08-22 09:31:00] [Rank 0] step:9561/10000 train_time:861951ms step_avg:90.15ms +[2025-08-22 09:31:00] [Rank 0] step:9561/10000 train_time:861951ms step_avg:90.15ms +[2025-08-22 09:31:02] [Rank 0] step:9581/10000 train_time:863852ms step_avg:90.16ms +[2025-08-22 09:31:02] [Rank 0] step:9581/10000 train_time:863852ms step_avg:90.16ms +[2025-08-22 09:31:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:31:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:31:17] [Rank 0] PRINT: step:9600/10000 val_loss:3.7693 svd_entropy: attn_qk:H=0.8773,top10E=0.09,eRank=351.6,q75/q25=80.49 attn_vo:H=0.5900,top10E=0.46,eRank=64.5,q75/q25=131.62 mlp_w1:H=0.7812,top10E=0.24,eRank=218.8,q75/q25=10.70 mlp_w2:H=0.9010,top10E=0.12,eRank=410.6,q75/q25=7.25 vo_prod:H=0.4916,top10E=0.64,eRank=32.5,q75/q25=18734.16 train_time:865864ms step_avg:90.19ms +[2025-08-22 09:31:17] [Rank 0] PRINT: step:9600/10000 val_loss:3.7693 svd_entropy: attn_qk:H=0.8773,top10E=0.09,eRank=351.6,q75/q25=80.49 attn_vo:H=0.5900,top10E=0.46,eRank=64.5,q75/q25=131.62 mlp_w1:H=0.7812,top10E=0.24,eRank=218.8,q75/q25=10.70 mlp_w2:H=0.9010,top10E=0.12,eRank=410.6,q75/q25=7.25 vo_prod:H=0.4916,top10E=0.64,eRank=32.5,q75/q25=18734.16 train_time:865864ms step_avg:90.19ms +[2025-08-22 09:31:17] [Rank 0] step:9601/10000 train_time:865879ms step_avg:90.19ms +[2025-08-22 09:31:17] [Rank 0] step:9601/10000 train_time:865879ms step_avg:90.19ms +[2025-08-22 09:31:19] [Rank 0] step:9621/10000 train_time:867704ms step_avg:90.19ms +[2025-08-22 09:31:19] [Rank 0] step:9621/10000 train_time:867704ms step_avg:90.19ms +[2025-08-22 09:31:21] [Rank 0] step:9641/10000 train_time:869607ms step_avg:90.20ms +[2025-08-22 09:31:21] [Rank 0] step:9641/10000 train_time:869607ms step_avg:90.20ms +[2025-08-22 09:31:23] [Rank 0] step:9661/10000 train_time:871541ms step_avg:90.21ms +[2025-08-22 09:31:23] [Rank 0] step:9661/10000 train_time:871541ms step_avg:90.21ms +[2025-08-22 09:31:25] [Rank 0] step:9681/10000 train_time:873467ms step_avg:90.22ms +[2025-08-22 09:31:25] [Rank 0] step:9681/10000 train_time:873467ms step_avg:90.22ms +[2025-08-22 09:31:27] [Rank 0] step:9701/10000 train_time:875406ms step_avg:90.24ms +[2025-08-22 09:31:27] [Rank 0] step:9701/10000 train_time:875406ms step_avg:90.24ms +[2025-08-22 09:31:29] [Rank 0] step:9721/10000 train_time:877327ms step_avg:90.25ms +[2025-08-22 09:31:29] [Rank 0] step:9721/10000 train_time:877327ms step_avg:90.25ms +[2025-08-22 09:31:31] [Rank 0] step:9741/10000 train_time:879280ms step_avg:90.27ms +[2025-08-22 09:31:31] [Rank 0] step:9741/10000 train_time:879280ms step_avg:90.27ms +[2025-08-22 09:31:33] [Rank 0] step:9761/10000 train_time:881213ms step_avg:90.28ms +[2025-08-22 09:31:33] [Rank 0] step:9761/10000 train_time:881213ms step_avg:90.28ms +[2025-08-22 09:31:35] [Rank 0] step:9781/10000 train_time:883152ms step_avg:90.29ms +[2025-08-22 09:31:35] [Rank 0] step:9781/10000 train_time:883152ms step_avg:90.29ms +[2025-08-22 09:31:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:31:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:31:50] [Rank 0] PRINT: step:9800/10000 val_loss:3.7606 svd_entropy: attn_qk:H=0.8773,top10E=0.09,eRank=351.7,q75/q25=80.50 attn_vo:H=0.5903,top10E=0.45,eRank=64.6,q75/q25=131.78 mlp_w1:H=0.7814,top10E=0.24,eRank=219.1,q75/q25=10.69 mlp_w2:H=0.9011,top10E=0.12,eRank=411.0,q75/q25=7.24 vo_prod:H=0.4920,top10E=0.64,eRank=32.5,q75/q25=18773.96 train_time:885200ms step_avg:90.33ms +[2025-08-22 09:31:50] [Rank 0] PRINT: step:9800/10000 val_loss:3.7606 svd_entropy: attn_qk:H=0.8773,top10E=0.09,eRank=351.7,q75/q25=80.50 attn_vo:H=0.5903,top10E=0.45,eRank=64.6,q75/q25=131.78 mlp_w1:H=0.7814,top10E=0.24,eRank=219.1,q75/q25=10.69 mlp_w2:H=0.9011,top10E=0.12,eRank=411.0,q75/q25=7.24 vo_prod:H=0.4920,top10E=0.64,eRank=32.5,q75/q25=18773.96 train_time:885200ms step_avg:90.33ms +[2025-08-22 09:31:50] [Rank 0] step:9801/10000 train_time:885215ms step_avg:90.32ms +[2025-08-22 09:31:50] [Rank 0] step:9801/10000 train_time:885215ms step_avg:90.32ms +[2025-08-22 09:31:52] [Rank 0] step:9821/10000 train_time:887049ms step_avg:90.32ms +[2025-08-22 09:31:52] [Rank 0] step:9821/10000 train_time:887049ms step_avg:90.32ms +[2025-08-22 09:31:54] [Rank 0] step:9841/10000 train_time:888985ms step_avg:90.33ms +[2025-08-22 09:31:54] [Rank 0] step:9841/10000 train_time:888985ms step_avg:90.33ms +[2025-08-22 09:31:56] [Rank 0] step:9861/10000 train_time:890897ms step_avg:90.35ms +[2025-08-22 09:31:56] [Rank 0] step:9861/10000 train_time:890897ms step_avg:90.35ms +[2025-08-22 09:31:58] [Rank 0] step:9881/10000 train_time:892813ms step_avg:90.36ms +[2025-08-22 09:31:58] [Rank 0] step:9881/10000 train_time:892813ms step_avg:90.36ms +[2025-08-22 09:32:00] [Rank 0] step:9901/10000 train_time:894743ms step_avg:90.37ms +[2025-08-22 09:32:00] [Rank 0] step:9901/10000 train_time:894743ms step_avg:90.37ms +[2025-08-22 09:32:02] [Rank 0] step:9921/10000 train_time:896671ms step_avg:90.38ms +[2025-08-22 09:32:02] [Rank 0] step:9921/10000 train_time:896671ms step_avg:90.38ms +[2025-08-22 09:32:04] [Rank 0] step:9941/10000 train_time:898598ms step_avg:90.39ms +[2025-08-22 09:32:04] [Rank 0] step:9941/10000 train_time:898598ms step_avg:90.39ms +[2025-08-22 09:32:06] [Rank 0] step:9961/10000 train_time:900521ms step_avg:90.40ms +[2025-08-22 09:32:06] [Rank 0] step:9961/10000 train_time:900521ms step_avg:90.40ms +[2025-08-22 09:32:08] [Rank 0] step:9981/10000 train_time:902448ms step_avg:90.42ms +[2025-08-22 09:32:08] [Rank 0] step:9981/10000 train_time:902448ms step_avg:90.42ms +[2025-08-22 09:32:10] [Rank 0] step:10000/10000 train_time:904282ms step_avg:90.43ms +[2025-08-22 09:32:10] [Rank 0] step:10000/10000 train_time:904282ms step_avg:90.43ms +[2025-08-22 09:32:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:32:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:32:23] [Rank 0] PRINT: step:10000/10000 val_loss:3.7518 svd_entropy: attn_qk:H=0.8773,top10E=0.09,eRank=351.8,q75/q25=80.51 attn_vo:H=0.5906,top10E=0.45,eRank=64.7,q75/q25=131.75 mlp_w1:H=0.7815,top10E=0.24,eRank=219.3,q75/q25=10.69 mlp_w2:H=0.9012,top10E=0.12,eRank=411.3,q75/q25=7.23 vo_prod:H=0.4922,top10E=0.64,eRank=32.6,q75/q25=18789.15 train_time:904482ms step_avg:90.45ms +[2025-08-22 09:32:23] [Rank 0] PRINT: step:10000/10000 val_loss:3.7518 svd_entropy: attn_qk:H=0.8773,top10E=0.09,eRank=351.8,q75/q25=80.51 attn_vo:H=0.5906,top10E=0.45,eRank=64.7,q75/q25=131.75 mlp_w1:H=0.7815,top10E=0.24,eRank=219.3,q75/q25=10.69 mlp_w2:H=0.9012,top10E=0.12,eRank=411.3,q75/q25=7.23 vo_prod:H=0.4922,top10E=0.64,eRank=32.6,q75/q25=18789.15 train_time:904482ms step_avg:90.45ms +[2025-08-22 09:32:23] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 09:32:23 2025 --- +[2025-08-22 09:32:23] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 09:32:23 2025 --- +[2025-08-22 09:32:23] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15796 MiB +[2025-08-22 09:32:23] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15796 MiB diff --git a/logs_svd_gated/mode_1_param_gated_seed_42/config.json b/logs_svd_gated/mode_1_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..27ea3c4d45cfb2bcfb7cb8ddd4ba79569df1abc2 --- /dev/null +++ b/logs_svd_gated/mode_1_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 1, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "0ed5fb85-5561-4619-89cb-de2ceb6d6d9f", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_1_param_gated_seed_42/training_log_0ed5fb85-5561-4619-89cb-de2ceb6d6d9f.txt b/logs_svd_gated/mode_1_param_gated_seed_42/training_log_0ed5fb85-5561-4619-89cb-de2ceb6d6d9f.txt new file mode 100644 index 0000000000000000000000000000000000000000..e4e8bde05a53770179012b43f98a5670f8890ae0 --- /dev/null +++ b/logs_svd_gated/mode_1_param_gated_seed_42/training_log_0ed5fb85-5561-4619-89cb-de2ceb6d6d9f.txt @@ -0,0 +1,2926 @@ +[2025-08-22 14:21:13] [Rank 0] PRINT: --- Script Start: Fri Aug 22 14:21:13 2025 --- +[2025-08-22 14:21:13] [Rank 0] PRINT: --- Script Start: Fri Aug 22 14:21:13 2025 --- +[2025-08-22 14:21:13] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=1, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 14:21:13] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=1, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 14:21:13] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 14:21:13] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 14:21:13] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 14:21:13] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 14:21:13] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_1_param_gated_seed_42 +[2025-08-22 14:21:13] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_1_param_gated_seed_42 +[2025-08-22 14:21:13] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 14:21:13] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 14:21:13] [Rank 0] PRINT: Constructing model... +[2025-08-22 14:21:13] [Rank 0] PRINT: Constructing model... +[2025-08-22 14:21:15] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 14:21:15] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 14:21:15] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 14:21:15] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 14:21:15] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 14:21:15] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 14:21:15] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-08-22 14:21:15] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-08-22 14:21:15] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.05). +[2025-08-22 14:21:15] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.05). +[2025-08-22 14:21:15] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 14:21:15] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 14:21:15] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 14:21:15] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 14:21:15] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 14:21:15] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 14:21:15] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 14:21:15] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 14:21:15] [Rank 0] PRINT: Starting warmup... +[2025-08-22 14:21:15] [Rank 0] PRINT: Starting warmup... +[2025-08-22 14:22:00] [Rank 0] PRINT: Warmup complete. +[2025-08-22 14:22:00] [Rank 0] PRINT: Warmup complete. +[2025-08-22 14:22:00] [Rank 0] PRINT: Starting training... +[2025-08-22 14:22:00] [Rank 0] PRINT: Starting training... +[2025-08-22 14:22:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:22:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:22:17] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 14:22:17] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 14:22:19] [Rank 0] step:21/10000 train_time:1643ms step_avg:78.23ms +[2025-08-22 14:22:19] [Rank 0] step:21/10000 train_time:1643ms step_avg:78.23ms +[2025-08-22 14:22:21] [Rank 0] step:41/10000 train_time:3303ms step_avg:80.55ms +[2025-08-22 14:22:21] [Rank 0] step:41/10000 train_time:3303ms step_avg:80.55ms +[2025-08-22 14:22:23] [Rank 0] step:61/10000 train_time:4966ms step_avg:81.41ms +[2025-08-22 14:22:23] [Rank 0] step:61/10000 train_time:4966ms step_avg:81.41ms +[2025-08-22 14:22:24] [Rank 0] step:81/10000 train_time:6633ms step_avg:81.89ms +[2025-08-22 14:22:24] [Rank 0] step:81/10000 train_time:6633ms step_avg:81.89ms +[2025-08-22 14:22:26] [Rank 0] step:101/10000 train_time:8304ms step_avg:82.22ms +[2025-08-22 14:22:26] [Rank 0] step:101/10000 train_time:8304ms step_avg:82.22ms +[2025-08-22 14:22:28] [Rank 0] step:121/10000 train_time:9976ms step_avg:82.45ms +[2025-08-22 14:22:28] [Rank 0] step:121/10000 train_time:9976ms step_avg:82.45ms +[2025-08-22 14:22:29] [Rank 0] step:141/10000 train_time:11649ms step_avg:82.62ms +[2025-08-22 14:22:29] [Rank 0] step:141/10000 train_time:11649ms step_avg:82.62ms +[2025-08-22 14:22:31] [Rank 0] step:161/10000 train_time:13324ms step_avg:82.76ms +[2025-08-22 14:22:31] [Rank 0] step:161/10000 train_time:13324ms step_avg:82.76ms +[2025-08-22 14:22:33] [Rank 0] step:181/10000 train_time:14999ms step_avg:82.87ms +[2025-08-22 14:22:33] [Rank 0] step:181/10000 train_time:14999ms step_avg:82.87ms +[2025-08-22 14:22:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:22:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:22:48] [Rank 0] PRINT: step:200/10000 val_loss:6.5883 svd_entropy: attn_qk:H=0.8340,top10E=0.22,eRank=267.5,q75/q25=10.83 attn_vo:H=0.1302,top10E=0.98,eRank=2.9,q75/q25=2018.33 mlp_w1:H=0.3397,top10E=0.88,eRank=9.9,q75/q25=6.21 mlp_w2:H=0.4379,top10E=0.76,eRank=18.7,q75/q25=5.78 vo_prod:H=0.0288,top10E=1.00,eRank=1.5,q75/q25=12055.63 train_time:16757ms step_avg:83.78ms +[2025-08-22 14:22:48] [Rank 0] PRINT: step:200/10000 val_loss:6.5883 svd_entropy: attn_qk:H=0.8340,top10E=0.22,eRank=267.5,q75/q25=10.83 attn_vo:H=0.1302,top10E=0.98,eRank=2.9,q75/q25=2018.33 mlp_w1:H=0.3397,top10E=0.88,eRank=9.9,q75/q25=6.21 mlp_w2:H=0.4379,top10E=0.76,eRank=18.7,q75/q25=5.78 vo_prod:H=0.0288,top10E=1.00,eRank=1.5,q75/q25=12055.63 train_time:16757ms step_avg:83.78ms +[2025-08-22 14:22:48] [Rank 0] step:201/10000 train_time:16775ms step_avg:83.46ms +[2025-08-22 14:22:48] [Rank 0] step:201/10000 train_time:16775ms step_avg:83.46ms +[2025-08-22 14:22:50] [Rank 0] step:221/10000 train_time:18375ms step_avg:83.14ms +[2025-08-22 14:22:50] [Rank 0] step:221/10000 train_time:18375ms step_avg:83.14ms +[2025-08-22 14:22:51] [Rank 0] step:241/10000 train_time:20047ms step_avg:83.18ms +[2025-08-22 14:22:51] [Rank 0] step:241/10000 train_time:20047ms step_avg:83.18ms +[2025-08-22 14:22:53] [Rank 0] step:261/10000 train_time:21719ms step_avg:83.22ms +[2025-08-22 14:22:53] [Rank 0] step:261/10000 train_time:21719ms step_avg:83.22ms +[2025-08-22 14:22:55] [Rank 0] step:281/10000 train_time:23392ms step_avg:83.25ms +[2025-08-22 14:22:55] [Rank 0] step:281/10000 train_time:23392ms step_avg:83.25ms +[2025-08-22 14:22:56] [Rank 0] step:301/10000 train_time:25065ms step_avg:83.27ms +[2025-08-22 14:22:56] [Rank 0] step:301/10000 train_time:25065ms step_avg:83.27ms +[2025-08-22 14:22:58] [Rank 0] step:321/10000 train_time:26739ms step_avg:83.30ms +[2025-08-22 14:22:58] [Rank 0] step:321/10000 train_time:26739ms step_avg:83.30ms +[2025-08-22 14:23:00] [Rank 0] step:341/10000 train_time:28412ms step_avg:83.32ms +[2025-08-22 14:23:00] [Rank 0] step:341/10000 train_time:28412ms step_avg:83.32ms +[2025-08-22 14:23:01] [Rank 0] step:361/10000 train_time:30087ms step_avg:83.34ms +[2025-08-22 14:23:01] [Rank 0] step:361/10000 train_time:30087ms step_avg:83.34ms +[2025-08-22 14:23:03] [Rank 0] step:381/10000 train_time:31762ms step_avg:83.37ms +[2025-08-22 14:23:03] [Rank 0] step:381/10000 train_time:31762ms step_avg:83.37ms +[2025-08-22 14:23:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:23:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:23:18] [Rank 0] PRINT: step:400/10000 val_loss:5.9712 svd_entropy: attn_qk:H=0.8076,top10E=0.17,eRank=231.6,q75/q25=15.01 attn_vo:H=0.2844,top10E=0.96,eRank=7.2,q75/q25=123.96 mlp_w1:H=0.5687,top10E=0.58,eRank=46.3,q75/q25=6.69 mlp_w2:H=0.6745,top10E=0.41,eRank=92.7,q75/q25=6.28 vo_prod:H=0.1664,top10E=1.00,eRank=3.4,q75/q25=1027.61 train_time:33521ms step_avg:83.80ms +[2025-08-22 14:23:18] [Rank 0] PRINT: step:400/10000 val_loss:5.9712 svd_entropy: attn_qk:H=0.8076,top10E=0.17,eRank=231.6,q75/q25=15.01 attn_vo:H=0.2844,top10E=0.96,eRank=7.2,q75/q25=123.96 mlp_w1:H=0.5687,top10E=0.58,eRank=46.3,q75/q25=6.69 mlp_w2:H=0.6745,top10E=0.41,eRank=92.7,q75/q25=6.28 vo_prod:H=0.1664,top10E=1.00,eRank=3.4,q75/q25=1027.61 train_time:33521ms step_avg:83.80ms +[2025-08-22 14:23:18] [Rank 0] step:401/10000 train_time:33540ms step_avg:83.64ms +[2025-08-22 14:23:18] [Rank 0] step:401/10000 train_time:33540ms step_avg:83.64ms +[2025-08-22 14:23:20] [Rank 0] step:421/10000 train_time:35153ms step_avg:83.50ms +[2025-08-22 14:23:20] [Rank 0] step:421/10000 train_time:35153ms step_avg:83.50ms +[2025-08-22 14:23:21] [Rank 0] step:441/10000 train_time:36825ms step_avg:83.50ms +[2025-08-22 14:23:21] [Rank 0] step:441/10000 train_time:36825ms step_avg:83.50ms +[2025-08-22 14:23:23] [Rank 0] step:461/10000 train_time:38498ms step_avg:83.51ms +[2025-08-22 14:23:23] [Rank 0] step:461/10000 train_time:38498ms step_avg:83.51ms +[2025-08-22 14:23:25] [Rank 0] step:481/10000 train_time:40173ms step_avg:83.52ms +[2025-08-22 14:23:25] [Rank 0] step:481/10000 train_time:40173ms step_avg:83.52ms +[2025-08-22 14:23:26] [Rank 0] step:501/10000 train_time:41846ms step_avg:83.53ms +[2025-08-22 14:23:26] [Rank 0] step:501/10000 train_time:41846ms step_avg:83.53ms +[2025-08-22 14:23:28] [Rank 0] step:521/10000 train_time:43522ms step_avg:83.53ms +[2025-08-22 14:23:28] [Rank 0] step:521/10000 train_time:43522ms step_avg:83.53ms +[2025-08-22 14:23:30] [Rank 0] step:541/10000 train_time:45196ms step_avg:83.54ms +[2025-08-22 14:23:30] [Rank 0] step:541/10000 train_time:45196ms step_avg:83.54ms +[2025-08-22 14:23:32] [Rank 0] step:561/10000 train_time:46871ms step_avg:83.55ms +[2025-08-22 14:23:32] [Rank 0] step:561/10000 train_time:46871ms step_avg:83.55ms +[2025-08-22 14:23:33] [Rank 0] step:581/10000 train_time:48549ms step_avg:83.56ms +[2025-08-22 14:23:33] [Rank 0] step:581/10000 train_time:48549ms step_avg:83.56ms +[2025-08-22 14:23:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:23:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:23:48] [Rank 0] PRINT: step:600/10000 val_loss:5.6348 svd_entropy: attn_qk:H=0.8138,top10E=0.15,eRank=239.5,q75/q25=32.26 attn_vo:H=0.3290,top10E=0.94,eRank=9.6,q75/q25=100.13 mlp_w1:H=0.6297,top10E=0.48,eRank=71.6,q75/q25=6.17 mlp_w2:H=0.7401,top10E=0.30,eRank=142.8,q75/q25=7.66 vo_prod:H=0.2205,top10E=0.99,eRank=4.7,q75/q25=980.89 train_time:50308ms step_avg:83.85ms +[2025-08-22 14:23:48] [Rank 0] PRINT: step:600/10000 val_loss:5.6348 svd_entropy: attn_qk:H=0.8138,top10E=0.15,eRank=239.5,q75/q25=32.26 attn_vo:H=0.3290,top10E=0.94,eRank=9.6,q75/q25=100.13 mlp_w1:H=0.6297,top10E=0.48,eRank=71.6,q75/q25=6.17 mlp_w2:H=0.7401,top10E=0.30,eRank=142.8,q75/q25=7.66 vo_prod:H=0.2205,top10E=0.99,eRank=4.7,q75/q25=980.89 train_time:50308ms step_avg:83.85ms +[2025-08-22 14:23:48] [Rank 0] step:601/10000 train_time:50325ms step_avg:83.74ms +[2025-08-22 14:23:48] [Rank 0] step:601/10000 train_time:50325ms step_avg:83.74ms +[2025-08-22 14:23:50] [Rank 0] step:621/10000 train_time:51925ms step_avg:83.62ms +[2025-08-22 14:23:50] [Rank 0] step:621/10000 train_time:51925ms step_avg:83.62ms +[2025-08-22 14:23:52] [Rank 0] step:641/10000 train_time:53593ms step_avg:83.61ms +[2025-08-22 14:23:52] [Rank 0] step:641/10000 train_time:53593ms step_avg:83.61ms +[2025-08-22 14:23:53] [Rank 0] step:661/10000 train_time:55263ms step_avg:83.61ms +[2025-08-22 14:23:53] [Rank 0] step:661/10000 train_time:55263ms step_avg:83.61ms +[2025-08-22 14:23:55] [Rank 0] step:681/10000 train_time:56934ms step_avg:83.60ms +[2025-08-22 14:23:55] [Rank 0] step:681/10000 train_time:56934ms step_avg:83.60ms +[2025-08-22 14:23:57] [Rank 0] step:701/10000 train_time:58604ms step_avg:83.60ms +[2025-08-22 14:23:57] [Rank 0] step:701/10000 train_time:58604ms step_avg:83.60ms +[2025-08-22 14:23:58] [Rank 0] step:721/10000 train_time:60275ms step_avg:83.60ms +[2025-08-22 14:23:58] [Rank 0] step:721/10000 train_time:60275ms step_avg:83.60ms +[2025-08-22 14:24:00] [Rank 0] step:741/10000 train_time:61947ms step_avg:83.60ms +[2025-08-22 14:24:00] [Rank 0] step:741/10000 train_time:61947ms step_avg:83.60ms +[2025-08-22 14:24:02] [Rank 0] step:761/10000 train_time:63631ms step_avg:83.62ms +[2025-08-22 14:24:02] [Rank 0] step:761/10000 train_time:63631ms step_avg:83.62ms +[2025-08-22 14:24:03] [Rank 0] step:781/10000 train_time:65319ms step_avg:83.64ms +[2025-08-22 14:24:03] [Rank 0] step:781/10000 train_time:65319ms step_avg:83.64ms +[2025-08-22 14:24:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:24:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:24:18] [Rank 0] PRINT: step:800/10000 val_loss:5.3839 svd_entropy: attn_qk:H=0.8238,top10E=0.13,eRank=253.5,q75/q25=49.75 attn_vo:H=0.3602,top10E=0.90,eRank=11.8,q75/q25=90.51 mlp_w1:H=0.6570,top10E=0.43,eRank=86.9,q75/q25=6.65 mlp_w2:H=0.7742,top10E=0.25,eRank=178.1,q75/q25=9.55 vo_prod:H=0.2575,top10E=0.98,eRank=6.0,q75/q25=1041.92 train_time:67090ms step_avg:83.86ms +[2025-08-22 14:24:18] [Rank 0] PRINT: step:800/10000 val_loss:5.3839 svd_entropy: attn_qk:H=0.8238,top10E=0.13,eRank=253.5,q75/q25=49.75 attn_vo:H=0.3602,top10E=0.90,eRank=11.8,q75/q25=90.51 mlp_w1:H=0.6570,top10E=0.43,eRank=86.9,q75/q25=6.65 mlp_w2:H=0.7742,top10E=0.25,eRank=178.1,q75/q25=9.55 vo_prod:H=0.2575,top10E=0.98,eRank=6.0,q75/q25=1041.92 train_time:67090ms step_avg:83.86ms +[2025-08-22 14:24:18] [Rank 0] step:801/10000 train_time:67106ms step_avg:83.78ms +[2025-08-22 14:24:18] [Rank 0] step:801/10000 train_time:67106ms step_avg:83.78ms +[2025-08-22 14:24:20] [Rank 0] step:821/10000 train_time:68702ms step_avg:83.68ms +[2025-08-22 14:24:20] [Rank 0] step:821/10000 train_time:68702ms step_avg:83.68ms +[2025-08-22 14:24:22] [Rank 0] step:841/10000 train_time:70384ms step_avg:83.69ms +[2025-08-22 14:24:22] [Rank 0] step:841/10000 train_time:70384ms step_avg:83.69ms +[2025-08-22 14:24:23] [Rank 0] step:861/10000 train_time:72066ms step_avg:83.70ms +[2025-08-22 14:24:23] [Rank 0] step:861/10000 train_time:72066ms step_avg:83.70ms +[2025-08-22 14:24:25] [Rank 0] step:881/10000 train_time:73750ms step_avg:83.71ms +[2025-08-22 14:24:25] [Rank 0] step:881/10000 train_time:73750ms step_avg:83.71ms +[2025-08-22 14:24:27] [Rank 0] step:901/10000 train_time:75433ms step_avg:83.72ms +[2025-08-22 14:24:27] [Rank 0] step:901/10000 train_time:75433ms step_avg:83.72ms +[2025-08-22 14:24:28] [Rank 0] step:921/10000 train_time:77117ms step_avg:83.73ms +[2025-08-22 14:24:28] [Rank 0] step:921/10000 train_time:77117ms step_avg:83.73ms +[2025-08-22 14:24:30] [Rank 0] step:941/10000 train_time:78800ms step_avg:83.74ms +[2025-08-22 14:24:30] [Rank 0] step:941/10000 train_time:78800ms step_avg:83.74ms +[2025-08-22 14:24:32] [Rank 0] step:961/10000 train_time:80484ms step_avg:83.75ms +[2025-08-22 14:24:32] [Rank 0] step:961/10000 train_time:80484ms step_avg:83.75ms +[2025-08-22 14:24:34] [Rank 0] step:981/10000 train_time:82172ms step_avg:83.76ms +[2025-08-22 14:24:34] [Rank 0] step:981/10000 train_time:82172ms step_avg:83.76ms +[2025-08-22 14:24:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:24:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:24:49] [Rank 0] PRINT: step:1000/10000 val_loss:5.2212 svd_entropy: attn_qk:H=0.8328,top10E=0.12,eRank=267.3,q75/q25=60.03 attn_vo:H=0.3848,top10E=0.86,eRank=14.1,q75/q25=86.55 mlp_w1:H=0.6751,top10E=0.40,eRank=98.9,q75/q25=7.31 mlp_w2:H=0.7972,top10E=0.22,eRank=207.0,q75/q25=11.07 vo_prod:H=0.2819,top10E=0.97,eRank=7.1,q75/q25=1231.54 train_time:83942ms step_avg:83.94ms +[2025-08-22 14:24:49] [Rank 0] PRINT: step:1000/10000 val_loss:5.2212 svd_entropy: attn_qk:H=0.8328,top10E=0.12,eRank=267.3,q75/q25=60.03 attn_vo:H=0.3848,top10E=0.86,eRank=14.1,q75/q25=86.55 mlp_w1:H=0.6751,top10E=0.40,eRank=98.9,q75/q25=7.31 mlp_w2:H=0.7972,top10E=0.22,eRank=207.0,q75/q25=11.07 vo_prod:H=0.2819,top10E=0.97,eRank=7.1,q75/q25=1231.54 train_time:83942ms step_avg:83.94ms +[2025-08-22 14:24:49] [Rank 0] step:1001/10000 train_time:83958ms step_avg:83.87ms +[2025-08-22 14:24:49] [Rank 0] step:1001/10000 train_time:83958ms step_avg:83.87ms +[2025-08-22 14:24:51] [Rank 0] step:1021/10000 train_time:85575ms step_avg:83.81ms +[2025-08-22 14:24:51] [Rank 0] step:1021/10000 train_time:85575ms step_avg:83.81ms +[2025-08-22 14:24:52] [Rank 0] step:1041/10000 train_time:87259ms step_avg:83.82ms +[2025-08-22 14:24:52] [Rank 0] step:1041/10000 train_time:87259ms step_avg:83.82ms +[2025-08-22 14:24:54] [Rank 0] step:1061/10000 train_time:88942ms step_avg:83.83ms +[2025-08-22 14:24:54] [Rank 0] step:1061/10000 train_time:88942ms step_avg:83.83ms +[2025-08-22 14:24:56] [Rank 0] step:1081/10000 train_time:90628ms step_avg:83.84ms +[2025-08-22 14:24:56] [Rank 0] step:1081/10000 train_time:90628ms step_avg:83.84ms +[2025-08-22 14:24:57] [Rank 0] step:1101/10000 train_time:92314ms step_avg:83.85ms +[2025-08-22 14:24:57] [Rank 0] step:1101/10000 train_time:92314ms step_avg:83.85ms +[2025-08-22 14:24:59] [Rank 0] step:1121/10000 train_time:94000ms step_avg:83.85ms +[2025-08-22 14:24:59] [Rank 0] step:1121/10000 train_time:94000ms step_avg:83.85ms +[2025-08-22 14:25:01] [Rank 0] step:1141/10000 train_time:95686ms step_avg:83.86ms +[2025-08-22 14:25:01] [Rank 0] step:1141/10000 train_time:95686ms step_avg:83.86ms +[2025-08-22 14:25:02] [Rank 0] step:1161/10000 train_time:97372ms step_avg:83.87ms +[2025-08-22 14:25:02] [Rank 0] step:1161/10000 train_time:97372ms step_avg:83.87ms +[2025-08-22 14:25:04] [Rank 0] step:1181/10000 train_time:99061ms step_avg:83.88ms +[2025-08-22 14:25:04] [Rank 0] step:1181/10000 train_time:99061ms step_avg:83.88ms +[2025-08-22 14:25:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:25:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:25:19] [Rank 0] PRINT: step:1200/10000 val_loss:5.0807 svd_entropy: attn_qk:H=0.8401,top10E=0.12,eRank=279.4,q75/q25=65.50 attn_vo:H=0.4071,top10E=0.82,eRank=16.7,q75/q25=86.52 mlp_w1:H=0.6880,top10E=0.38,eRank=108.6,q75/q25=8.08 mlp_w2:H=0.8142,top10E=0.20,eRank=231.3,q75/q25=12.09 vo_prod:H=0.3040,top10E=0.95,eRank=8.4,q75/q25=1477.82 train_time:100833ms step_avg:84.03ms +[2025-08-22 14:25:19] [Rank 0] PRINT: step:1200/10000 val_loss:5.0807 svd_entropy: attn_qk:H=0.8401,top10E=0.12,eRank=279.4,q75/q25=65.50 attn_vo:H=0.4071,top10E=0.82,eRank=16.7,q75/q25=86.52 mlp_w1:H=0.6880,top10E=0.38,eRank=108.6,q75/q25=8.08 mlp_w2:H=0.8142,top10E=0.20,eRank=231.3,q75/q25=12.09 vo_prod:H=0.3040,top10E=0.95,eRank=8.4,q75/q25=1477.82 train_time:100833ms step_avg:84.03ms +[2025-08-22 14:25:19] [Rank 0] step:1201/10000 train_time:100847ms step_avg:83.97ms +[2025-08-22 14:25:19] [Rank 0] step:1201/10000 train_time:100847ms step_avg:83.97ms +[2025-08-22 14:25:21] [Rank 0] step:1221/10000 train_time:102448ms step_avg:83.90ms +[2025-08-22 14:25:21] [Rank 0] step:1221/10000 train_time:102448ms step_avg:83.90ms +[2025-08-22 14:25:23] [Rank 0] step:1241/10000 train_time:104130ms step_avg:83.91ms +[2025-08-22 14:25:23] [Rank 0] step:1241/10000 train_time:104130ms step_avg:83.91ms +[2025-08-22 14:25:24] [Rank 0] step:1261/10000 train_time:105816ms step_avg:83.91ms +[2025-08-22 14:25:24] [Rank 0] step:1261/10000 train_time:105816ms step_avg:83.91ms +[2025-08-22 14:25:26] [Rank 0] step:1281/10000 train_time:107502ms step_avg:83.92ms +[2025-08-22 14:25:26] [Rank 0] step:1281/10000 train_time:107502ms step_avg:83.92ms +[2025-08-22 14:25:28] [Rank 0] step:1301/10000 train_time:109190ms step_avg:83.93ms +[2025-08-22 14:25:28] [Rank 0] step:1301/10000 train_time:109190ms step_avg:83.93ms +[2025-08-22 14:25:29] [Rank 0] step:1321/10000 train_time:110877ms step_avg:83.93ms +[2025-08-22 14:25:29] [Rank 0] step:1321/10000 train_time:110877ms step_avg:83.93ms +[2025-08-22 14:25:31] [Rank 0] step:1341/10000 train_time:112565ms step_avg:83.94ms +[2025-08-22 14:25:31] [Rank 0] step:1341/10000 train_time:112565ms step_avg:83.94ms +[2025-08-22 14:25:33] [Rank 0] step:1361/10000 train_time:114253ms step_avg:83.95ms +[2025-08-22 14:25:33] [Rank 0] step:1361/10000 train_time:114253ms step_avg:83.95ms +[2025-08-22 14:25:35] [Rank 0] step:1381/10000 train_time:115942ms step_avg:83.96ms +[2025-08-22 14:25:35] [Rank 0] step:1381/10000 train_time:115942ms step_avg:83.96ms +[2025-08-22 14:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:25:50] [Rank 0] PRINT: step:1400/10000 val_loss:4.9763 svd_entropy: attn_qk:H=0.8460,top10E=0.11,eRank=289.7,q75/q25=68.17 attn_vo:H=0.4268,top10E=0.78,eRank=19.4,q75/q25=87.98 mlp_w1:H=0.6988,top10E=0.36,eRank=117.4,q75/q25=8.85 mlp_w2:H=0.8274,top10E=0.19,eRank=252.1,q75/q25=12.54 vo_prod:H=0.3235,top10E=0.92,eRank=9.6,q75/q25=1904.47 train_time:117715ms step_avg:84.08ms +[2025-08-22 14:25:50] [Rank 0] PRINT: step:1400/10000 val_loss:4.9763 svd_entropy: attn_qk:H=0.8460,top10E=0.11,eRank=289.7,q75/q25=68.17 attn_vo:H=0.4268,top10E=0.78,eRank=19.4,q75/q25=87.98 mlp_w1:H=0.6988,top10E=0.36,eRank=117.4,q75/q25=8.85 mlp_w2:H=0.8274,top10E=0.19,eRank=252.1,q75/q25=12.54 vo_prod:H=0.3235,top10E=0.92,eRank=9.6,q75/q25=1904.47 train_time:117715ms step_avg:84.08ms +[2025-08-22 14:25:50] [Rank 0] step:1401/10000 train_time:117730ms step_avg:84.03ms +[2025-08-22 14:25:50] [Rank 0] step:1401/10000 train_time:117730ms step_avg:84.03ms +[2025-08-22 14:25:51] [Rank 0] step:1421/10000 train_time:119333ms step_avg:83.98ms +[2025-08-22 14:25:51] [Rank 0] step:1421/10000 train_time:119333ms step_avg:83.98ms +[2025-08-22 14:25:53] [Rank 0] step:1441/10000 train_time:121015ms step_avg:83.98ms +[2025-08-22 14:25:53] [Rank 0] step:1441/10000 train_time:121015ms step_avg:83.98ms +[2025-08-22 14:25:55] [Rank 0] step:1461/10000 train_time:122699ms step_avg:83.98ms +[2025-08-22 14:25:55] [Rank 0] step:1461/10000 train_time:122699ms step_avg:83.98ms +[2025-08-22 14:25:56] [Rank 0] step:1481/10000 train_time:124383ms step_avg:83.99ms +[2025-08-22 14:25:56] [Rank 0] step:1481/10000 train_time:124383ms step_avg:83.99ms +[2025-08-22 14:25:58] [Rank 0] step:1501/10000 train_time:126078ms step_avg:84.00ms +[2025-08-22 14:25:58] [Rank 0] step:1501/10000 train_time:126078ms step_avg:84.00ms +[2025-08-22 14:26:00] [Rank 0] step:1521/10000 train_time:127776ms step_avg:84.01ms +[2025-08-22 14:26:00] [Rank 0] step:1521/10000 train_time:127776ms step_avg:84.01ms +[2025-08-22 14:26:02] [Rank 0] step:1541/10000 train_time:129473ms step_avg:84.02ms +[2025-08-22 14:26:02] [Rank 0] step:1541/10000 train_time:129473ms step_avg:84.02ms +[2025-08-22 14:26:03] [Rank 0] step:1561/10000 train_time:131175ms step_avg:84.03ms +[2025-08-22 14:26:03] [Rank 0] step:1561/10000 train_time:131175ms step_avg:84.03ms +[2025-08-22 14:26:05] [Rank 0] step:1581/10000 train_time:132874ms step_avg:84.04ms +[2025-08-22 14:26:05] [Rank 0] step:1581/10000 train_time:132874ms step_avg:84.04ms +[2025-08-22 14:26:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:26:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:26:20] [Rank 0] PRINT: step:1600/10000 val_loss:4.8210 svd_entropy: attn_qk:H=0.8508,top10E=0.11,eRank=298.4,q75/q25=68.97 attn_vo:H=0.4444,top10E=0.75,eRank=22.2,q75/q25=89.84 mlp_w1:H=0.7080,top10E=0.34,eRank=125.5,q75/q25=9.60 mlp_w2:H=0.8375,top10E=0.18,eRank=269.6,q75/q25=12.66 vo_prod:H=0.3415,top10E=0.90,eRank=11.0,q75/q25=2516.71 train_time:134656ms step_avg:84.16ms +[2025-08-22 14:26:20] [Rank 0] PRINT: step:1600/10000 val_loss:4.8210 svd_entropy: attn_qk:H=0.8508,top10E=0.11,eRank=298.4,q75/q25=68.97 attn_vo:H=0.4444,top10E=0.75,eRank=22.2,q75/q25=89.84 mlp_w1:H=0.7080,top10E=0.34,eRank=125.5,q75/q25=9.60 mlp_w2:H=0.8375,top10E=0.18,eRank=269.6,q75/q25=12.66 vo_prod:H=0.3415,top10E=0.90,eRank=11.0,q75/q25=2516.71 train_time:134656ms step_avg:84.16ms +[2025-08-22 14:26:20] [Rank 0] step:1601/10000 train_time:134671ms step_avg:84.12ms +[2025-08-22 14:26:20] [Rank 0] step:1601/10000 train_time:134671ms step_avg:84.12ms +[2025-08-22 14:26:22] [Rank 0] step:1621/10000 train_time:136324ms step_avg:84.10ms +[2025-08-22 14:26:22] [Rank 0] step:1621/10000 train_time:136324ms step_avg:84.10ms +[2025-08-22 14:26:23] [Rank 0] step:1641/10000 train_time:138017ms step_avg:84.11ms +[2025-08-22 14:26:23] [Rank 0] step:1641/10000 train_time:138017ms step_avg:84.11ms +[2025-08-22 14:26:25] [Rank 0] step:1661/10000 train_time:139711ms step_avg:84.11ms +[2025-08-22 14:26:25] [Rank 0] step:1661/10000 train_time:139711ms step_avg:84.11ms +[2025-08-22 14:26:27] [Rank 0] step:1681/10000 train_time:141412ms step_avg:84.12ms +[2025-08-22 14:26:27] [Rank 0] step:1681/10000 train_time:141412ms step_avg:84.12ms +[2025-08-22 14:26:29] [Rank 0] step:1701/10000 train_time:143108ms step_avg:84.13ms +[2025-08-22 14:26:29] [Rank 0] step:1701/10000 train_time:143108ms step_avg:84.13ms +[2025-08-22 14:26:30] [Rank 0] step:1721/10000 train_time:144805ms step_avg:84.14ms +[2025-08-22 14:26:30] [Rank 0] step:1721/10000 train_time:144805ms step_avg:84.14ms +[2025-08-22 14:26:32] [Rank 0] step:1741/10000 train_time:146502ms step_avg:84.15ms +[2025-08-22 14:26:32] [Rank 0] step:1741/10000 train_time:146502ms step_avg:84.15ms +[2025-08-22 14:26:34] [Rank 0] step:1761/10000 train_time:148200ms step_avg:84.16ms +[2025-08-22 14:26:34] [Rank 0] step:1761/10000 train_time:148200ms step_avg:84.16ms +[2025-08-22 14:26:35] [Rank 0] step:1781/10000 train_time:149899ms step_avg:84.17ms +[2025-08-22 14:26:35] [Rank 0] step:1781/10000 train_time:149899ms step_avg:84.17ms +[2025-08-22 14:26:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:26:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:26:50] [Rank 0] PRINT: step:1800/10000 val_loss:4.6993 svd_entropy: attn_qk:H=0.8548,top10E=0.11,eRank=305.9,q75/q25=69.23 attn_vo:H=0.4599,top10E=0.72,eRank=25.0,q75/q25=93.56 mlp_w1:H=0.7156,top10E=0.33,eRank=132.9,q75/q25=10.29 mlp_w2:H=0.8456,top10E=0.17,eRank=284.4,q75/q25=12.53 vo_prod:H=0.3575,top10E=0.88,eRank=12.4,q75/q25=3327.33 train_time:151682ms step_avg:84.27ms +[2025-08-22 14:26:50] [Rank 0] PRINT: step:1800/10000 val_loss:4.6993 svd_entropy: attn_qk:H=0.8548,top10E=0.11,eRank=305.9,q75/q25=69.23 attn_vo:H=0.4599,top10E=0.72,eRank=25.0,q75/q25=93.56 mlp_w1:H=0.7156,top10E=0.33,eRank=132.9,q75/q25=10.29 mlp_w2:H=0.8456,top10E=0.17,eRank=284.4,q75/q25=12.53 vo_prod:H=0.3575,top10E=0.88,eRank=12.4,q75/q25=3327.33 train_time:151682ms step_avg:84.27ms +[2025-08-22 14:26:51] [Rank 0] step:1801/10000 train_time:151697ms step_avg:84.23ms +[2025-08-22 14:26:51] [Rank 0] step:1801/10000 train_time:151697ms step_avg:84.23ms +[2025-08-22 14:26:52] [Rank 0] step:1821/10000 train_time:153330ms step_avg:84.20ms +[2025-08-22 14:26:52] [Rank 0] step:1821/10000 train_time:153330ms step_avg:84.20ms +[2025-08-22 14:26:54] [Rank 0] step:1841/10000 train_time:155027ms step_avg:84.21ms +[2025-08-22 14:26:54] [Rank 0] step:1841/10000 train_time:155027ms step_avg:84.21ms +[2025-08-22 14:26:56] [Rank 0] step:1861/10000 train_time:156723ms step_avg:84.21ms +[2025-08-22 14:26:56] [Rank 0] step:1861/10000 train_time:156723ms step_avg:84.21ms +[2025-08-22 14:26:57] [Rank 0] step:1881/10000 train_time:158420ms step_avg:84.22ms +[2025-08-22 14:26:57] [Rank 0] step:1881/10000 train_time:158420ms step_avg:84.22ms +[2025-08-22 14:26:59] [Rank 0] step:1901/10000 train_time:160119ms step_avg:84.23ms +[2025-08-22 14:26:59] [Rank 0] step:1901/10000 train_time:160119ms step_avg:84.23ms +[2025-08-22 14:27:01] [Rank 0] step:1921/10000 train_time:161820ms step_avg:84.24ms +[2025-08-22 14:27:01] [Rank 0] step:1921/10000 train_time:161820ms step_avg:84.24ms +[2025-08-22 14:27:02] [Rank 0] step:1941/10000 train_time:163521ms step_avg:84.25ms +[2025-08-22 14:27:02] [Rank 0] step:1941/10000 train_time:163521ms step_avg:84.25ms +[2025-08-22 14:27:04] [Rank 0] step:1961/10000 train_time:165225ms step_avg:84.26ms +[2025-08-22 14:27:04] [Rank 0] step:1961/10000 train_time:165225ms step_avg:84.26ms +[2025-08-22 14:27:06] [Rank 0] step:1981/10000 train_time:166930ms step_avg:84.27ms +[2025-08-22 14:27:06] [Rank 0] step:1981/10000 train_time:166930ms step_avg:84.27ms +[2025-08-22 14:27:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:27:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:27:21] [Rank 0] PRINT: step:2000/10000 val_loss:4.6352 svd_entropy: attn_qk:H=0.8582,top10E=0.10,eRank=312.4,q75/q25=68.87 attn_vo:H=0.4735,top10E=0.69,eRank=27.7,q75/q25=97.13 mlp_w1:H=0.7223,top10E=0.32,eRank=139.7,q75/q25=10.93 mlp_w2:H=0.8521,top10E=0.16,eRank=296.9,q75/q25=12.29 vo_prod:H=0.3725,top10E=0.86,eRank=13.8,q75/q25=4312.47 train_time:168718ms step_avg:84.36ms +[2025-08-22 14:27:21] [Rank 0] PRINT: step:2000/10000 val_loss:4.6352 svd_entropy: attn_qk:H=0.8582,top10E=0.10,eRank=312.4,q75/q25=68.87 attn_vo:H=0.4735,top10E=0.69,eRank=27.7,q75/q25=97.13 mlp_w1:H=0.7223,top10E=0.32,eRank=139.7,q75/q25=10.93 mlp_w2:H=0.8521,top10E=0.16,eRank=296.9,q75/q25=12.29 vo_prod:H=0.3725,top10E=0.86,eRank=13.8,q75/q25=4312.47 train_time:168718ms step_avg:84.36ms +[2025-08-22 14:27:21] [Rank 0] step:2001/10000 train_time:168734ms step_avg:84.32ms +[2025-08-22 14:27:21] [Rank 0] step:2001/10000 train_time:168734ms step_avg:84.32ms +[2025-08-22 14:27:23] [Rank 0] step:2021/10000 train_time:170403ms step_avg:84.32ms +[2025-08-22 14:27:23] [Rank 0] step:2021/10000 train_time:170403ms step_avg:84.32ms +[2025-08-22 14:27:25] [Rank 0] step:2041/10000 train_time:172275ms step_avg:84.41ms +[2025-08-22 14:27:25] [Rank 0] step:2041/10000 train_time:172275ms step_avg:84.41ms +[2025-08-22 14:27:27] [Rank 0] step:2061/10000 train_time:174039ms step_avg:84.44ms +[2025-08-22 14:27:27] [Rank 0] step:2061/10000 train_time:174039ms step_avg:84.44ms +[2025-08-22 14:27:28] [Rank 0] step:2081/10000 train_time:175736ms step_avg:84.45ms +[2025-08-22 14:27:28] [Rank 0] step:2081/10000 train_time:175736ms step_avg:84.45ms +[2025-08-22 14:27:30] [Rank 0] step:2101/10000 train_time:177434ms step_avg:84.45ms +[2025-08-22 14:27:30] [Rank 0] step:2101/10000 train_time:177434ms step_avg:84.45ms +[2025-08-22 14:27:32] [Rank 0] step:2121/10000 train_time:179132ms step_avg:84.46ms +[2025-08-22 14:27:32] [Rank 0] step:2121/10000 train_time:179132ms step_avg:84.46ms +[2025-08-22 14:27:33] [Rank 0] step:2141/10000 train_time:180830ms step_avg:84.46ms +[2025-08-22 14:27:33] [Rank 0] step:2141/10000 train_time:180830ms step_avg:84.46ms +[2025-08-22 14:27:35] [Rank 0] step:2161/10000 train_time:182529ms step_avg:84.47ms +[2025-08-22 14:27:35] [Rank 0] step:2161/10000 train_time:182529ms step_avg:84.47ms +[2025-08-22 14:27:37] [Rank 0] step:2181/10000 train_time:184228ms step_avg:84.47ms +[2025-08-22 14:27:37] [Rank 0] step:2181/10000 train_time:184228ms step_avg:84.47ms +[2025-08-22 14:27:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:27:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:27:52] [Rank 0] PRINT: step:2200/10000 val_loss:4.5469 svd_entropy: attn_qk:H=0.8609,top10E=0.10,eRank=317.6,q75/q25=68.34 attn_vo:H=0.4847,top10E=0.67,eRank=30.1,q75/q25=98.34 mlp_w1:H=0.7280,top10E=0.31,eRank=145.7,q75/q25=11.43 mlp_w2:H=0.8574,top10E=0.16,eRank=307.5,q75/q25=11.95 vo_prod:H=0.3841,top10E=0.84,eRank=15.0,q75/q25=5329.78 train_time:186010ms step_avg:84.55ms +[2025-08-22 14:27:52] [Rank 0] PRINT: step:2200/10000 val_loss:4.5469 svd_entropy: attn_qk:H=0.8609,top10E=0.10,eRank=317.6,q75/q25=68.34 attn_vo:H=0.4847,top10E=0.67,eRank=30.1,q75/q25=98.34 mlp_w1:H=0.7280,top10E=0.31,eRank=145.7,q75/q25=11.43 mlp_w2:H=0.8574,top10E=0.16,eRank=307.5,q75/q25=11.95 vo_prod:H=0.3841,top10E=0.84,eRank=15.0,q75/q25=5329.78 train_time:186010ms step_avg:84.55ms +[2025-08-22 14:27:52] [Rank 0] step:2201/10000 train_time:186025ms step_avg:84.52ms +[2025-08-22 14:27:52] [Rank 0] step:2201/10000 train_time:186025ms step_avg:84.52ms +[2025-08-22 14:27:54] [Rank 0] step:2221/10000 train_time:187665ms step_avg:84.50ms +[2025-08-22 14:27:54] [Rank 0] step:2221/10000 train_time:187665ms step_avg:84.50ms +[2025-08-22 14:27:55] [Rank 0] step:2241/10000 train_time:189394ms step_avg:84.51ms +[2025-08-22 14:27:55] [Rank 0] step:2241/10000 train_time:189394ms step_avg:84.51ms +[2025-08-22 14:27:57] [Rank 0] step:2261/10000 train_time:191134ms step_avg:84.54ms +[2025-08-22 14:27:57] [Rank 0] step:2261/10000 train_time:191134ms step_avg:84.54ms +[2025-08-22 14:27:59] [Rank 0] step:2281/10000 train_time:192875ms step_avg:84.56ms +[2025-08-22 14:27:59] [Rank 0] step:2281/10000 train_time:192875ms step_avg:84.56ms +[2025-08-22 14:28:01] [Rank 0] step:2301/10000 train_time:194616ms step_avg:84.58ms +[2025-08-22 14:28:01] [Rank 0] step:2301/10000 train_time:194616ms step_avg:84.58ms +[2025-08-22 14:28:02] [Rank 0] step:2321/10000 train_time:196358ms step_avg:84.60ms +[2025-08-22 14:28:02] [Rank 0] step:2321/10000 train_time:196358ms step_avg:84.60ms +[2025-08-22 14:28:04] [Rank 0] step:2341/10000 train_time:198098ms step_avg:84.62ms +[2025-08-22 14:28:04] [Rank 0] step:2341/10000 train_time:198098ms step_avg:84.62ms +[2025-08-22 14:28:06] [Rank 0] step:2361/10000 train_time:199840ms step_avg:84.64ms +[2025-08-22 14:28:06] [Rank 0] step:2361/10000 train_time:199840ms step_avg:84.64ms +[2025-08-22 14:28:08] [Rank 0] step:2381/10000 train_time:201586ms step_avg:84.66ms +[2025-08-22 14:28:08] [Rank 0] step:2381/10000 train_time:201586ms step_avg:84.66ms +[2025-08-22 14:28:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:28:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:28:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.4725 svd_entropy: attn_qk:H=0.8632,top10E=0.10,eRank=322.3,q75/q25=68.61 attn_vo:H=0.4945,top10E=0.65,eRank=32.4,q75/q25=102.12 mlp_w1:H=0.7333,top10E=0.30,eRank=151.6,q75/q25=11.81 mlp_w2:H=0.8619,top10E=0.15,eRank=316.7,q75/q25=11.61 vo_prod:H=0.3952,top10E=0.82,eRank=16.3,q75/q25=6470.97 train_time:203418ms step_avg:84.76ms +[2025-08-22 14:28:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.4725 svd_entropy: attn_qk:H=0.8632,top10E=0.10,eRank=322.3,q75/q25=68.61 attn_vo:H=0.4945,top10E=0.65,eRank=32.4,q75/q25=102.12 mlp_w1:H=0.7333,top10E=0.30,eRank=151.6,q75/q25=11.81 mlp_w2:H=0.8619,top10E=0.15,eRank=316.7,q75/q25=11.61 vo_prod:H=0.3952,top10E=0.82,eRank=16.3,q75/q25=6470.97 train_time:203418ms step_avg:84.76ms +[2025-08-22 14:28:23] [Rank 0] step:2401/10000 train_time:203433ms step_avg:84.73ms +[2025-08-22 14:28:23] [Rank 0] step:2401/10000 train_time:203433ms step_avg:84.73ms +[2025-08-22 14:28:25] [Rank 0] step:2421/10000 train_time:205093ms step_avg:84.71ms +[2025-08-22 14:28:25] [Rank 0] step:2421/10000 train_time:205093ms step_avg:84.71ms +[2025-08-22 14:28:26] [Rank 0] step:2441/10000 train_time:206882ms step_avg:84.75ms +[2025-08-22 14:28:26] [Rank 0] step:2441/10000 train_time:206882ms step_avg:84.75ms +[2025-08-22 14:28:28] [Rank 0] step:2461/10000 train_time:208667ms step_avg:84.79ms +[2025-08-22 14:28:28] [Rank 0] step:2461/10000 train_time:208667ms step_avg:84.79ms +[2025-08-22 14:28:30] [Rank 0] step:2481/10000 train_time:210410ms step_avg:84.81ms +[2025-08-22 14:28:30] [Rank 0] step:2481/10000 train_time:210410ms step_avg:84.81ms +[2025-08-22 14:28:32] [Rank 0] step:2501/10000 train_time:212157ms step_avg:84.83ms +[2025-08-22 14:28:32] [Rank 0] step:2501/10000 train_time:212157ms step_avg:84.83ms +[2025-08-22 14:28:33] [Rank 0] step:2521/10000 train_time:213903ms step_avg:84.85ms +[2025-08-22 14:28:33] [Rank 0] step:2521/10000 train_time:213903ms step_avg:84.85ms +[2025-08-22 14:28:35] [Rank 0] step:2541/10000 train_time:215648ms step_avg:84.87ms +[2025-08-22 14:28:35] [Rank 0] step:2541/10000 train_time:215648ms step_avg:84.87ms +[2025-08-22 14:28:37] [Rank 0] step:2561/10000 train_time:217396ms step_avg:84.89ms +[2025-08-22 14:28:37] [Rank 0] step:2561/10000 train_time:217396ms step_avg:84.89ms +[2025-08-22 14:28:39] [Rank 0] step:2581/10000 train_time:219144ms step_avg:84.91ms +[2025-08-22 14:28:39] [Rank 0] step:2581/10000 train_time:219144ms step_avg:84.91ms +[2025-08-22 14:28:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:28:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:28:54] [Rank 0] PRINT: step:2600/10000 val_loss:4.4083 svd_entropy: attn_qk:H=0.8654,top10E=0.10,eRank=326.5,q75/q25=66.71 attn_vo:H=0.5035,top10E=0.63,eRank=34.7,q75/q25=105.89 mlp_w1:H=0.7379,top10E=0.30,eRank=156.8,q75/q25=12.17 mlp_w2:H=0.8659,top10E=0.15,eRank=325.0,q75/q25=11.28 vo_prod:H=0.4050,top10E=0.80,eRank=17.5,q75/q25=7560.20 train_time:220978ms step_avg:84.99ms +[2025-08-22 14:28:54] [Rank 0] PRINT: step:2600/10000 val_loss:4.4083 svd_entropy: attn_qk:H=0.8654,top10E=0.10,eRank=326.5,q75/q25=66.71 attn_vo:H=0.5035,top10E=0.63,eRank=34.7,q75/q25=105.89 mlp_w1:H=0.7379,top10E=0.30,eRank=156.8,q75/q25=12.17 mlp_w2:H=0.8659,top10E=0.15,eRank=325.0,q75/q25=11.28 vo_prod:H=0.4050,top10E=0.80,eRank=17.5,q75/q25=7560.20 train_time:220978ms step_avg:84.99ms +[2025-08-22 14:28:54] [Rank 0] step:2601/10000 train_time:220996ms step_avg:84.97ms +[2025-08-22 14:28:54] [Rank 0] step:2601/10000 train_time:220996ms step_avg:84.97ms +[2025-08-22 14:28:56] [Rank 0] step:2621/10000 train_time:222651ms step_avg:84.95ms +[2025-08-22 14:28:56] [Rank 0] step:2621/10000 train_time:222651ms step_avg:84.95ms +[2025-08-22 14:28:57] [Rank 0] step:2641/10000 train_time:224393ms step_avg:84.97ms +[2025-08-22 14:28:57] [Rank 0] step:2641/10000 train_time:224393ms step_avg:84.97ms +[2025-08-22 14:28:59] [Rank 0] step:2661/10000 train_time:226135ms step_avg:84.98ms +[2025-08-22 14:28:59] [Rank 0] step:2661/10000 train_time:226135ms step_avg:84.98ms +[2025-08-22 14:29:01] [Rank 0] step:2681/10000 train_time:227877ms step_avg:85.00ms +[2025-08-22 14:29:01] [Rank 0] step:2681/10000 train_time:227877ms step_avg:85.00ms +[2025-08-22 14:29:03] [Rank 0] step:2701/10000 train_time:229620ms step_avg:85.01ms +[2025-08-22 14:29:03] [Rank 0] step:2701/10000 train_time:229620ms step_avg:85.01ms +[2025-08-22 14:29:04] [Rank 0] step:2721/10000 train_time:231364ms step_avg:85.03ms +[2025-08-22 14:29:04] [Rank 0] step:2721/10000 train_time:231364ms step_avg:85.03ms +[2025-08-22 14:29:06] [Rank 0] step:2741/10000 train_time:233108ms step_avg:85.04ms +[2025-08-22 14:29:06] [Rank 0] step:2741/10000 train_time:233108ms step_avg:85.04ms +[2025-08-22 14:29:08] [Rank 0] step:2761/10000 train_time:234852ms step_avg:85.06ms +[2025-08-22 14:29:08] [Rank 0] step:2761/10000 train_time:234852ms step_avg:85.06ms +[2025-08-22 14:29:10] [Rank 0] step:2781/10000 train_time:236595ms step_avg:85.08ms +[2025-08-22 14:29:10] [Rank 0] step:2781/10000 train_time:236595ms step_avg:85.08ms +[2025-08-22 14:29:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:29:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:29:25] [Rank 0] PRINT: step:2800/10000 val_loss:4.3735 svd_entropy: attn_qk:H=0.8673,top10E=0.10,eRank=330.5,q75/q25=65.74 attn_vo:H=0.5116,top10E=0.61,eRank=36.8,q75/q25=108.16 mlp_w1:H=0.7423,top10E=0.29,eRank=162.1,q75/q25=12.45 mlp_w2:H=0.8695,top10E=0.15,eRank=332.7,q75/q25=10.91 vo_prod:H=0.4136,top10E=0.79,eRank=18.6,q75/q25=8486.50 train_time:238426ms step_avg:85.15ms +[2025-08-22 14:29:25] [Rank 0] PRINT: step:2800/10000 val_loss:4.3735 svd_entropy: attn_qk:H=0.8673,top10E=0.10,eRank=330.5,q75/q25=65.74 attn_vo:H=0.5116,top10E=0.61,eRank=36.8,q75/q25=108.16 mlp_w1:H=0.7423,top10E=0.29,eRank=162.1,q75/q25=12.45 mlp_w2:H=0.8695,top10E=0.15,eRank=332.7,q75/q25=10.91 vo_prod:H=0.4136,top10E=0.79,eRank=18.6,q75/q25=8486.50 train_time:238426ms step_avg:85.15ms +[2025-08-22 14:29:25] [Rank 0] step:2801/10000 train_time:238441ms step_avg:85.13ms +[2025-08-22 14:29:25] [Rank 0] step:2801/10000 train_time:238441ms step_avg:85.13ms +[2025-08-22 14:29:27] [Rank 0] step:2821/10000 train_time:240106ms step_avg:85.11ms +[2025-08-22 14:29:27] [Rank 0] step:2821/10000 train_time:240106ms step_avg:85.11ms +[2025-08-22 14:29:28] [Rank 0] step:2841/10000 train_time:241845ms step_avg:85.13ms +[2025-08-22 14:29:28] [Rank 0] step:2841/10000 train_time:241845ms step_avg:85.13ms +[2025-08-22 14:29:30] [Rank 0] step:2861/10000 train_time:243586ms step_avg:85.14ms +[2025-08-22 14:29:30] [Rank 0] step:2861/10000 train_time:243586ms step_avg:85.14ms +[2025-08-22 14:29:32] [Rank 0] step:2881/10000 train_time:245383ms step_avg:85.17ms +[2025-08-22 14:29:32] [Rank 0] step:2881/10000 train_time:245383ms step_avg:85.17ms +[2025-08-22 14:29:34] [Rank 0] step:2901/10000 train_time:247126ms step_avg:85.19ms +[2025-08-22 14:29:34] [Rank 0] step:2901/10000 train_time:247126ms step_avg:85.19ms +[2025-08-22 14:29:35] [Rank 0] step:2921/10000 train_time:248867ms step_avg:85.20ms +[2025-08-22 14:29:35] [Rank 0] step:2921/10000 train_time:248867ms step_avg:85.20ms +[2025-08-22 14:29:37] [Rank 0] step:2941/10000 train_time:250610ms step_avg:85.21ms +[2025-08-22 14:29:37] [Rank 0] step:2941/10000 train_time:250610ms step_avg:85.21ms +[2025-08-22 14:29:39] [Rank 0] step:2961/10000 train_time:252354ms step_avg:85.23ms +[2025-08-22 14:29:39] [Rank 0] step:2961/10000 train_time:252354ms step_avg:85.23ms +[2025-08-22 14:29:41] [Rank 0] step:2981/10000 train_time:254104ms step_avg:85.24ms +[2025-08-22 14:29:41] [Rank 0] step:2981/10000 train_time:254104ms step_avg:85.24ms +[2025-08-22 14:29:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:29:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:29:56] [Rank 0] PRINT: step:3000/10000 val_loss:4.3337 svd_entropy: attn_qk:H=0.8690,top10E=0.10,eRank=334.0,q75/q25=64.53 attn_vo:H=0.5188,top10E=0.59,eRank=38.9,q75/q25=111.13 mlp_w1:H=0.7462,top10E=0.28,eRank=166.8,q75/q25=12.68 mlp_w2:H=0.8725,top10E=0.14,eRank=339.3,q75/q25=10.61 vo_prod:H=0.4213,top10E=0.77,eRank=19.7,q75/q25=9424.20 train_time:255944ms step_avg:85.31ms +[2025-08-22 14:29:56] [Rank 0] PRINT: step:3000/10000 val_loss:4.3337 svd_entropy: attn_qk:H=0.8690,top10E=0.10,eRank=334.0,q75/q25=64.53 attn_vo:H=0.5188,top10E=0.59,eRank=38.9,q75/q25=111.13 mlp_w1:H=0.7462,top10E=0.28,eRank=166.8,q75/q25=12.68 mlp_w2:H=0.8725,top10E=0.14,eRank=339.3,q75/q25=10.61 vo_prod:H=0.4213,top10E=0.77,eRank=19.7,q75/q25=9424.20 train_time:255944ms step_avg:85.31ms +[2025-08-22 14:29:56] [Rank 0] step:3001/10000 train_time:255958ms step_avg:85.29ms +[2025-08-22 14:29:56] [Rank 0] step:3001/10000 train_time:255958ms step_avg:85.29ms +[2025-08-22 14:29:58] [Rank 0] step:3021/10000 train_time:257647ms step_avg:85.29ms +[2025-08-22 14:29:58] [Rank 0] step:3021/10000 train_time:257647ms step_avg:85.29ms +[2025-08-22 14:30:00] [Rank 0] step:3041/10000 train_time:259392ms step_avg:85.30ms +[2025-08-22 14:30:00] [Rank 0] step:3041/10000 train_time:259392ms step_avg:85.30ms +[2025-08-22 14:30:01] [Rank 0] step:3061/10000 train_time:261139ms step_avg:85.31ms +[2025-08-22 14:30:01] [Rank 0] step:3061/10000 train_time:261139ms step_avg:85.31ms +[2025-08-22 14:30:03] [Rank 0] step:3081/10000 train_time:262891ms step_avg:85.33ms +[2025-08-22 14:30:03] [Rank 0] step:3081/10000 train_time:262891ms step_avg:85.33ms +[2025-08-22 14:30:05] [Rank 0] step:3101/10000 train_time:264642ms step_avg:85.34ms +[2025-08-22 14:30:05] [Rank 0] step:3101/10000 train_time:264642ms step_avg:85.34ms +[2025-08-22 14:30:07] [Rank 0] step:3121/10000 train_time:266393ms step_avg:85.35ms +[2025-08-22 14:30:07] [Rank 0] step:3121/10000 train_time:266393ms step_avg:85.35ms +[2025-08-22 14:30:08] [Rank 0] step:3141/10000 train_time:268143ms step_avg:85.37ms +[2025-08-22 14:30:08] [Rank 0] step:3141/10000 train_time:268143ms step_avg:85.37ms +[2025-08-22 14:30:10] [Rank 0] step:3161/10000 train_time:269894ms step_avg:85.38ms +[2025-08-22 14:30:10] [Rank 0] step:3161/10000 train_time:269894ms step_avg:85.38ms +[2025-08-22 14:30:12] [Rank 0] step:3181/10000 train_time:271647ms step_avg:85.40ms +[2025-08-22 14:30:12] [Rank 0] step:3181/10000 train_time:271647ms step_avg:85.40ms +[2025-08-22 14:30:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:30:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:30:27] [Rank 0] PRINT: step:3200/10000 val_loss:4.3095 svd_entropy: attn_qk:H=0.8706,top10E=0.09,eRank=337.1,q75/q25=63.81 attn_vo:H=0.5254,top10E=0.58,eRank=40.8,q75/q25=112.60 mlp_w1:H=0.7497,top10E=0.28,eRank=171.1,q75/q25=12.89 mlp_w2:H=0.8752,top10E=0.14,eRank=345.1,q75/q25=10.31 vo_prod:H=0.4280,top10E=0.76,eRank=20.7,q75/q25=10226.58 train_time:273487ms step_avg:85.46ms +[2025-08-22 14:30:27] [Rank 0] PRINT: step:3200/10000 val_loss:4.3095 svd_entropy: attn_qk:H=0.8706,top10E=0.09,eRank=337.1,q75/q25=63.81 attn_vo:H=0.5254,top10E=0.58,eRank=40.8,q75/q25=112.60 mlp_w1:H=0.7497,top10E=0.28,eRank=171.1,q75/q25=12.89 mlp_w2:H=0.8752,top10E=0.14,eRank=345.1,q75/q25=10.31 vo_prod:H=0.4280,top10E=0.76,eRank=20.7,q75/q25=10226.58 train_time:273487ms step_avg:85.46ms +[2025-08-22 14:30:27] [Rank 0] step:3201/10000 train_time:273503ms step_avg:85.44ms +[2025-08-22 14:30:27] [Rank 0] step:3201/10000 train_time:273503ms step_avg:85.44ms +[2025-08-22 14:30:29] [Rank 0] step:3221/10000 train_time:275180ms step_avg:85.43ms +[2025-08-22 14:30:29] [Rank 0] step:3221/10000 train_time:275180ms step_avg:85.43ms +[2025-08-22 14:30:31] [Rank 0] step:3241/10000 train_time:276928ms step_avg:85.45ms +[2025-08-22 14:30:31] [Rank 0] step:3241/10000 train_time:276928ms step_avg:85.45ms +[2025-08-22 14:30:32] [Rank 0] step:3261/10000 train_time:278752ms step_avg:85.48ms +[2025-08-22 14:30:32] [Rank 0] step:3261/10000 train_time:278752ms step_avg:85.48ms +[2025-08-22 14:30:34] [Rank 0] step:3281/10000 train_time:280483ms step_avg:85.49ms +[2025-08-22 14:30:34] [Rank 0] step:3281/10000 train_time:280483ms step_avg:85.49ms +[2025-08-22 14:30:36] [Rank 0] step:3301/10000 train_time:282287ms step_avg:85.52ms +[2025-08-22 14:30:36] [Rank 0] step:3301/10000 train_time:282287ms step_avg:85.52ms +[2025-08-22 14:30:38] [Rank 0] step:3321/10000 train_time:284040ms step_avg:85.53ms +[2025-08-22 14:30:38] [Rank 0] step:3321/10000 train_time:284040ms step_avg:85.53ms +[2025-08-22 14:30:39] [Rank 0] step:3341/10000 train_time:285793ms step_avg:85.54ms +[2025-08-22 14:30:39] [Rank 0] step:3341/10000 train_time:285793ms step_avg:85.54ms +[2025-08-22 14:30:41] [Rank 0] step:3361/10000 train_time:287544ms step_avg:85.55ms +[2025-08-22 14:30:41] [Rank 0] step:3361/10000 train_time:287544ms step_avg:85.55ms +[2025-08-22 14:30:43] [Rank 0] step:3381/10000 train_time:289298ms step_avg:85.57ms +[2025-08-22 14:30:43] [Rank 0] step:3381/10000 train_time:289298ms step_avg:85.57ms +[2025-08-22 14:30:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:30:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:30:58] [Rank 0] PRINT: step:3400/10000 val_loss:4.2663 svd_entropy: attn_qk:H=0.8720,top10E=0.09,eRank=340.2,q75/q25=62.91 attn_vo:H=0.5319,top10E=0.57,eRank=42.8,q75/q25=115.14 mlp_w1:H=0.7531,top10E=0.28,eRank=175.4,q75/q25=13.02 mlp_w2:H=0.8776,top10E=0.14,eRank=350.6,q75/q25=10.03 vo_prod:H=0.4348,top10E=0.75,eRank=21.8,q75/q25=10943.12 train_time:291139ms step_avg:85.63ms +[2025-08-22 14:30:58] [Rank 0] PRINT: step:3400/10000 val_loss:4.2663 svd_entropy: attn_qk:H=0.8720,top10E=0.09,eRank=340.2,q75/q25=62.91 attn_vo:H=0.5319,top10E=0.57,eRank=42.8,q75/q25=115.14 mlp_w1:H=0.7531,top10E=0.28,eRank=175.4,q75/q25=13.02 mlp_w2:H=0.8776,top10E=0.14,eRank=350.6,q75/q25=10.03 vo_prod:H=0.4348,top10E=0.75,eRank=21.8,q75/q25=10943.12 train_time:291139ms step_avg:85.63ms +[2025-08-22 14:30:58] [Rank 0] step:3401/10000 train_time:291153ms step_avg:85.61ms +[2025-08-22 14:30:58] [Rank 0] step:3401/10000 train_time:291153ms step_avg:85.61ms +[2025-08-22 14:31:00] [Rank 0] step:3421/10000 train_time:292825ms step_avg:85.60ms +[2025-08-22 14:31:00] [Rank 0] step:3421/10000 train_time:292825ms step_avg:85.60ms +[2025-08-22 14:31:02] [Rank 0] step:3441/10000 train_time:294575ms step_avg:85.61ms +[2025-08-22 14:31:02] [Rank 0] step:3441/10000 train_time:294575ms step_avg:85.61ms +[2025-08-22 14:31:04] [Rank 0] step:3461/10000 train_time:296324ms step_avg:85.62ms +[2025-08-22 14:31:04] [Rank 0] step:3461/10000 train_time:296324ms step_avg:85.62ms +[2025-08-22 14:31:05] [Rank 0] step:3481/10000 train_time:298074ms step_avg:85.63ms +[2025-08-22 14:31:05] [Rank 0] step:3481/10000 train_time:298074ms step_avg:85.63ms +[2025-08-22 14:31:07] [Rank 0] step:3501/10000 train_time:299827ms step_avg:85.64ms +[2025-08-22 14:31:07] [Rank 0] step:3501/10000 train_time:299827ms step_avg:85.64ms +[2025-08-22 14:31:09] [Rank 0] step:3521/10000 train_time:301580ms step_avg:85.65ms +[2025-08-22 14:31:09] [Rank 0] step:3521/10000 train_time:301580ms step_avg:85.65ms +[2025-08-22 14:31:11] [Rank 0] step:3541/10000 train_time:303329ms step_avg:85.66ms +[2025-08-22 14:31:11] [Rank 0] step:3541/10000 train_time:303329ms step_avg:85.66ms +[2025-08-22 14:31:12] [Rank 0] step:3561/10000 train_time:305079ms step_avg:85.67ms +[2025-08-22 14:31:12] [Rank 0] step:3561/10000 train_time:305079ms step_avg:85.67ms +[2025-08-22 14:31:14] [Rank 0] step:3581/10000 train_time:306830ms step_avg:85.68ms +[2025-08-22 14:31:14] [Rank 0] step:3581/10000 train_time:306830ms step_avg:85.68ms +[2025-08-22 14:31:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:31:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:31:29] [Rank 0] PRINT: step:3600/10000 val_loss:4.2633 svd_entropy: attn_qk:H=0.8734,top10E=0.09,eRank=342.9,q75/q25=62.01 attn_vo:H=0.5378,top10E=0.56,eRank=44.7,q75/q25=116.68 mlp_w1:H=0.7561,top10E=0.27,eRank=179.3,q75/q25=13.11 mlp_w2:H=0.8797,top10E=0.14,eRank=355.4,q75/q25=9.81 vo_prod:H=0.4408,top10E=0.73,eRank=22.8,q75/q25=11537.23 train_time:308675ms step_avg:85.74ms +[2025-08-22 14:31:29] [Rank 0] PRINT: step:3600/10000 val_loss:4.2633 svd_entropy: attn_qk:H=0.8734,top10E=0.09,eRank=342.9,q75/q25=62.01 attn_vo:H=0.5378,top10E=0.56,eRank=44.7,q75/q25=116.68 mlp_w1:H=0.7561,top10E=0.27,eRank=179.3,q75/q25=13.11 mlp_w2:H=0.8797,top10E=0.14,eRank=355.4,q75/q25=9.81 vo_prod:H=0.4408,top10E=0.73,eRank=22.8,q75/q25=11537.23 train_time:308675ms step_avg:85.74ms +[2025-08-22 14:31:29] [Rank 0] step:3601/10000 train_time:308689ms step_avg:85.72ms +[2025-08-22 14:31:29] [Rank 0] step:3601/10000 train_time:308689ms step_avg:85.72ms +[2025-08-22 14:31:31] [Rank 0] step:3621/10000 train_time:310361ms step_avg:85.71ms +[2025-08-22 14:31:31] [Rank 0] step:3621/10000 train_time:310361ms step_avg:85.71ms +[2025-08-22 14:31:33] [Rank 0] step:3641/10000 train_time:312110ms step_avg:85.72ms +[2025-08-22 14:31:33] [Rank 0] step:3641/10000 train_time:312110ms step_avg:85.72ms +[2025-08-22 14:31:35] [Rank 0] step:3661/10000 train_time:313857ms step_avg:85.73ms +[2025-08-22 14:31:35] [Rank 0] step:3661/10000 train_time:313857ms step_avg:85.73ms +[2025-08-22 14:31:36] [Rank 0] step:3681/10000 train_time:315667ms step_avg:85.76ms +[2025-08-22 14:31:36] [Rank 0] step:3681/10000 train_time:315667ms step_avg:85.76ms +[2025-08-22 14:31:38] [Rank 0] step:3701/10000 train_time:317467ms step_avg:85.78ms +[2025-08-22 14:31:38] [Rank 0] step:3701/10000 train_time:317467ms step_avg:85.78ms +[2025-08-22 14:31:40] [Rank 0] step:3721/10000 train_time:319244ms step_avg:85.80ms +[2025-08-22 14:31:40] [Rank 0] step:3721/10000 train_time:319244ms step_avg:85.80ms +[2025-08-22 14:31:42] [Rank 0] step:3741/10000 train_time:321031ms step_avg:85.81ms +[2025-08-22 14:31:42] [Rank 0] step:3741/10000 train_time:321031ms step_avg:85.81ms +[2025-08-22 14:31:44] [Rank 0] step:3761/10000 train_time:322818ms step_avg:85.83ms +[2025-08-22 14:31:44] [Rank 0] step:3761/10000 train_time:322818ms step_avg:85.83ms +[2025-08-22 14:31:45] [Rank 0] step:3781/10000 train_time:324607ms step_avg:85.85ms +[2025-08-22 14:31:45] [Rank 0] step:3781/10000 train_time:324607ms step_avg:85.85ms +[2025-08-22 14:31:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:31:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:32:01] [Rank 0] PRINT: step:3800/10000 val_loss:4.1981 svd_entropy: attn_qk:H=0.8746,top10E=0.09,eRank=345.4,q75/q25=61.13 attn_vo:H=0.5430,top10E=0.54,eRank=46.5,q75/q25=117.54 mlp_w1:H=0.7590,top10E=0.27,eRank=183.0,q75/q25=13.24 mlp_w2:H=0.8816,top10E=0.13,eRank=359.6,q75/q25=9.58 vo_prod:H=0.4463,top10E=0.72,eRank=23.7,q75/q25=11998.30 train_time:326488ms step_avg:85.92ms +[2025-08-22 14:32:01] [Rank 0] PRINT: step:3800/10000 val_loss:4.1981 svd_entropy: attn_qk:H=0.8746,top10E=0.09,eRank=345.4,q75/q25=61.13 attn_vo:H=0.5430,top10E=0.54,eRank=46.5,q75/q25=117.54 mlp_w1:H=0.7590,top10E=0.27,eRank=183.0,q75/q25=13.24 mlp_w2:H=0.8816,top10E=0.13,eRank=359.6,q75/q25=9.58 vo_prod:H=0.4463,top10E=0.72,eRank=23.7,q75/q25=11998.30 train_time:326488ms step_avg:85.92ms +[2025-08-22 14:32:01] [Rank 0] step:3801/10000 train_time:326504ms step_avg:85.90ms +[2025-08-22 14:32:01] [Rank 0] step:3801/10000 train_time:326504ms step_avg:85.90ms +[2025-08-22 14:32:02] [Rank 0] step:3821/10000 train_time:328216ms step_avg:85.90ms +[2025-08-22 14:32:02] [Rank 0] step:3821/10000 train_time:328216ms step_avg:85.90ms +[2025-08-22 14:32:04] [Rank 0] step:3841/10000 train_time:330007ms step_avg:85.92ms +[2025-08-22 14:32:04] [Rank 0] step:3841/10000 train_time:330007ms step_avg:85.92ms +[2025-08-22 14:32:06] [Rank 0] step:3861/10000 train_time:331800ms step_avg:85.94ms +[2025-08-22 14:32:06] [Rank 0] step:3861/10000 train_time:331800ms step_avg:85.94ms +[2025-08-22 14:32:08] [Rank 0] step:3881/10000 train_time:333586ms step_avg:85.95ms +[2025-08-22 14:32:08] [Rank 0] step:3881/10000 train_time:333586ms step_avg:85.95ms +[2025-08-22 14:32:10] [Rank 0] step:3901/10000 train_time:335375ms step_avg:85.97ms +[2025-08-22 14:32:10] [Rank 0] step:3901/10000 train_time:335375ms step_avg:85.97ms +[2025-08-22 14:32:11] [Rank 0] step:3921/10000 train_time:337163ms step_avg:85.99ms +[2025-08-22 14:32:11] [Rank 0] step:3921/10000 train_time:337163ms step_avg:85.99ms +[2025-08-22 14:32:13] [Rank 0] step:3941/10000 train_time:338956ms step_avg:86.01ms +[2025-08-22 14:32:13] [Rank 0] step:3941/10000 train_time:338956ms step_avg:86.01ms +[2025-08-22 14:32:15] [Rank 0] step:3961/10000 train_time:340745ms step_avg:86.03ms +[2025-08-22 14:32:15] [Rank 0] step:3961/10000 train_time:340745ms step_avg:86.03ms +[2025-08-22 14:32:17] [Rank 0] step:3981/10000 train_time:342537ms step_avg:86.04ms +[2025-08-22 14:32:17] [Rank 0] step:3981/10000 train_time:342537ms step_avg:86.04ms +[2025-08-22 14:32:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:32:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:32:32] [Rank 0] PRINT: step:4000/10000 val_loss:4.1699 svd_entropy: attn_qk:H=0.8757,top10E=0.09,eRank=347.9,q75/q25=60.11 attn_vo:H=0.5481,top10E=0.53,eRank=48.2,q75/q25=116.07 mlp_w1:H=0.7617,top10E=0.26,eRank=186.6,q75/q25=13.27 mlp_w2:H=0.8834,top10E=0.13,eRank=363.7,q75/q25=9.40 vo_prod:H=0.4516,top10E=0.71,eRank=24.6,q75/q25=12026.79 train_time:344416ms step_avg:86.10ms +[2025-08-22 14:32:32] [Rank 0] PRINT: step:4000/10000 val_loss:4.1699 svd_entropy: attn_qk:H=0.8757,top10E=0.09,eRank=347.9,q75/q25=60.11 attn_vo:H=0.5481,top10E=0.53,eRank=48.2,q75/q25=116.07 mlp_w1:H=0.7617,top10E=0.26,eRank=186.6,q75/q25=13.27 mlp_w2:H=0.8834,top10E=0.13,eRank=363.7,q75/q25=9.40 vo_prod:H=0.4516,top10E=0.71,eRank=24.6,q75/q25=12026.79 train_time:344416ms step_avg:86.10ms +[2025-08-22 14:32:32] [Rank 0] step:4001/10000 train_time:344430ms step_avg:86.09ms +[2025-08-22 14:32:32] [Rank 0] step:4001/10000 train_time:344430ms step_avg:86.09ms +[2025-08-22 14:32:34] [Rank 0] step:4021/10000 train_time:346145ms step_avg:86.08ms +[2025-08-22 14:32:34] [Rank 0] step:4021/10000 train_time:346145ms step_avg:86.08ms +[2025-08-22 14:32:36] [Rank 0] step:4041/10000 train_time:347930ms step_avg:86.10ms +[2025-08-22 14:32:36] [Rank 0] step:4041/10000 train_time:347930ms step_avg:86.10ms +[2025-08-22 14:32:38] [Rank 0] step:4061/10000 train_time:349716ms step_avg:86.12ms +[2025-08-22 14:32:38] [Rank 0] step:4061/10000 train_time:349716ms step_avg:86.12ms +[2025-08-22 14:32:40] [Rank 0] step:4081/10000 train_time:351675ms step_avg:86.17ms +[2025-08-22 14:32:40] [Rank 0] step:4081/10000 train_time:351675ms step_avg:86.17ms +[2025-08-22 14:32:41] [Rank 0] step:4101/10000 train_time:353463ms step_avg:86.19ms +[2025-08-22 14:32:41] [Rank 0] step:4101/10000 train_time:353463ms step_avg:86.19ms +[2025-08-22 14:32:43] [Rank 0] step:4121/10000 train_time:355251ms step_avg:86.20ms +[2025-08-22 14:32:43] [Rank 0] step:4121/10000 train_time:355251ms step_avg:86.20ms +[2025-08-22 14:32:45] [Rank 0] step:4141/10000 train_time:357038ms step_avg:86.22ms +[2025-08-22 14:32:45] [Rank 0] step:4141/10000 train_time:357038ms step_avg:86.22ms +[2025-08-22 14:32:47] [Rank 0] step:4161/10000 train_time:358825ms step_avg:86.24ms +[2025-08-22 14:32:47] [Rank 0] step:4161/10000 train_time:358825ms step_avg:86.24ms +[2025-08-22 14:32:48] [Rank 0] step:4181/10000 train_time:360616ms step_avg:86.25ms +[2025-08-22 14:32:48] [Rank 0] step:4181/10000 train_time:360616ms step_avg:86.25ms +[2025-08-22 14:32:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:32:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:33:04] [Rank 0] PRINT: step:4200/10000 val_loss:4.1571 svd_entropy: attn_qk:H=0.8768,top10E=0.09,eRank=350.2,q75/q25=59.39 attn_vo:H=0.5527,top10E=0.53,eRank=49.9,q75/q25=116.05 mlp_w1:H=0.7642,top10E=0.26,eRank=189.9,q75/q25=13.31 mlp_w2:H=0.8850,top10E=0.13,eRank=367.5,q75/q25=9.21 vo_prod:H=0.4560,top10E=0.70,eRank=25.5,q75/q25=12352.17 train_time:362492ms step_avg:86.31ms +[2025-08-22 14:33:04] [Rank 0] PRINT: step:4200/10000 val_loss:4.1571 svd_entropy: attn_qk:H=0.8768,top10E=0.09,eRank=350.2,q75/q25=59.39 attn_vo:H=0.5527,top10E=0.53,eRank=49.9,q75/q25=116.05 mlp_w1:H=0.7642,top10E=0.26,eRank=189.9,q75/q25=13.31 mlp_w2:H=0.8850,top10E=0.13,eRank=367.5,q75/q25=9.21 vo_prod:H=0.4560,top10E=0.70,eRank=25.5,q75/q25=12352.17 train_time:362492ms step_avg:86.31ms +[2025-08-22 14:33:04] [Rank 0] step:4201/10000 train_time:362506ms step_avg:86.29ms +[2025-08-22 14:33:04] [Rank 0] step:4201/10000 train_time:362506ms step_avg:86.29ms +[2025-08-22 14:33:06] [Rank 0] step:4221/10000 train_time:364229ms step_avg:86.29ms +[2025-08-22 14:33:06] [Rank 0] step:4221/10000 train_time:364229ms step_avg:86.29ms +[2025-08-22 14:33:07] [Rank 0] step:4241/10000 train_time:366015ms step_avg:86.30ms +[2025-08-22 14:33:07] [Rank 0] step:4241/10000 train_time:366015ms step_avg:86.30ms +[2025-08-22 14:33:09] [Rank 0] step:4261/10000 train_time:367799ms step_avg:86.32ms +[2025-08-22 14:33:09] [Rank 0] step:4261/10000 train_time:367799ms step_avg:86.32ms +[2025-08-22 14:33:11] [Rank 0] step:4281/10000 train_time:369585ms step_avg:86.33ms +[2025-08-22 14:33:11] [Rank 0] step:4281/10000 train_time:369585ms step_avg:86.33ms +[2025-08-22 14:33:13] [Rank 0] step:4301/10000 train_time:371371ms step_avg:86.35ms +[2025-08-22 14:33:13] [Rank 0] step:4301/10000 train_time:371371ms step_avg:86.35ms +[2025-08-22 14:33:15] [Rank 0] step:4321/10000 train_time:373159ms step_avg:86.36ms +[2025-08-22 14:33:15] [Rank 0] step:4321/10000 train_time:373159ms step_avg:86.36ms +[2025-08-22 14:33:16] [Rank 0] step:4341/10000 train_time:374945ms step_avg:86.37ms +[2025-08-22 14:33:16] [Rank 0] step:4341/10000 train_time:374945ms step_avg:86.37ms +[2025-08-22 14:33:18] [Rank 0] step:4361/10000 train_time:376733ms step_avg:86.39ms +[2025-08-22 14:33:18] [Rank 0] step:4361/10000 train_time:376733ms step_avg:86.39ms +[2025-08-22 14:33:20] [Rank 0] step:4381/10000 train_time:378521ms step_avg:86.40ms +[2025-08-22 14:33:20] [Rank 0] step:4381/10000 train_time:378521ms step_avg:86.40ms +[2025-08-22 14:33:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:33:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:33:35] [Rank 0] PRINT: step:4400/10000 val_loss:4.1529 svd_entropy: attn_qk:H=0.8778,top10E=0.09,eRank=352.3,q75/q25=58.73 attn_vo:H=0.5571,top10E=0.52,eRank=51.5,q75/q25=116.81 mlp_w1:H=0.7666,top10E=0.26,eRank=193.1,q75/q25=13.29 mlp_w2:H=0.8865,top10E=0.13,eRank=371.0,q75/q25=9.04 vo_prod:H=0.4606,top10E=0.70,eRank=26.3,q75/q25=12808.24 train_time:380397ms step_avg:86.45ms +[2025-08-22 14:33:35] [Rank 0] PRINT: step:4400/10000 val_loss:4.1529 svd_entropy: attn_qk:H=0.8778,top10E=0.09,eRank=352.3,q75/q25=58.73 attn_vo:H=0.5571,top10E=0.52,eRank=51.5,q75/q25=116.81 mlp_w1:H=0.7666,top10E=0.26,eRank=193.1,q75/q25=13.29 mlp_w2:H=0.8865,top10E=0.13,eRank=371.0,q75/q25=9.04 vo_prod:H=0.4606,top10E=0.70,eRank=26.3,q75/q25=12808.24 train_time:380397ms step_avg:86.45ms +[2025-08-22 14:33:35] [Rank 0] step:4401/10000 train_time:380413ms step_avg:86.44ms +[2025-08-22 14:33:35] [Rank 0] step:4401/10000 train_time:380413ms step_avg:86.44ms +[2025-08-22 14:33:37] [Rank 0] step:4421/10000 train_time:382105ms step_avg:86.43ms +[2025-08-22 14:33:37] [Rank 0] step:4421/10000 train_time:382105ms step_avg:86.43ms +[2025-08-22 14:33:39] [Rank 0] step:4441/10000 train_time:383888ms step_avg:86.44ms +[2025-08-22 14:33:39] [Rank 0] step:4441/10000 train_time:383888ms step_avg:86.44ms +[2025-08-22 14:33:41] [Rank 0] step:4461/10000 train_time:385678ms step_avg:86.46ms +[2025-08-22 14:33:41] [Rank 0] step:4461/10000 train_time:385678ms step_avg:86.46ms +[2025-08-22 14:33:43] [Rank 0] step:4481/10000 train_time:387527ms step_avg:86.48ms +[2025-08-22 14:33:43] [Rank 0] step:4481/10000 train_time:387527ms step_avg:86.48ms +[2025-08-22 14:33:44] [Rank 0] step:4501/10000 train_time:389320ms step_avg:86.50ms +[2025-08-22 14:33:44] [Rank 0] step:4501/10000 train_time:389320ms step_avg:86.50ms +[2025-08-22 14:33:46] [Rank 0] step:4521/10000 train_time:391114ms step_avg:86.51ms +[2025-08-22 14:33:46] [Rank 0] step:4521/10000 train_time:391114ms step_avg:86.51ms +[2025-08-22 14:33:48] [Rank 0] step:4541/10000 train_time:392908ms step_avg:86.52ms +[2025-08-22 14:33:48] [Rank 0] step:4541/10000 train_time:392908ms step_avg:86.52ms +[2025-08-22 14:33:50] [Rank 0] step:4561/10000 train_time:394704ms step_avg:86.54ms +[2025-08-22 14:33:50] [Rank 0] step:4561/10000 train_time:394704ms step_avg:86.54ms +[2025-08-22 14:33:51] [Rank 0] step:4581/10000 train_time:396501ms step_avg:86.55ms +[2025-08-22 14:33:51] [Rank 0] step:4581/10000 train_time:396501ms step_avg:86.55ms +[2025-08-22 14:33:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:33:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:34:07] [Rank 0] PRINT: step:4600/10000 val_loss:4.1057 svd_entropy: attn_qk:H=0.8788,top10E=0.09,eRank=354.3,q75/q25=57.84 attn_vo:H=0.5614,top10E=0.51,eRank=53.2,q75/q25=116.88 mlp_w1:H=0.7689,top10E=0.25,eRank=196.3,q75/q25=13.34 mlp_w2:H=0.8880,top10E=0.13,eRank=374.4,q75/q25=8.91 vo_prod:H=0.4652,top10E=0.69,eRank=27.2,q75/q25=13202.60 train_time:398388ms step_avg:86.61ms +[2025-08-22 14:34:07] [Rank 0] PRINT: step:4600/10000 val_loss:4.1057 svd_entropy: attn_qk:H=0.8788,top10E=0.09,eRank=354.3,q75/q25=57.84 attn_vo:H=0.5614,top10E=0.51,eRank=53.2,q75/q25=116.88 mlp_w1:H=0.7689,top10E=0.25,eRank=196.3,q75/q25=13.34 mlp_w2:H=0.8880,top10E=0.13,eRank=374.4,q75/q25=8.91 vo_prod:H=0.4652,top10E=0.69,eRank=27.2,q75/q25=13202.60 train_time:398388ms step_avg:86.61ms +[2025-08-22 14:34:07] [Rank 0] step:4601/10000 train_time:398402ms step_avg:86.59ms +[2025-08-22 14:34:07] [Rank 0] step:4601/10000 train_time:398402ms step_avg:86.59ms +[2025-08-22 14:34:09] [Rank 0] step:4621/10000 train_time:400126ms step_avg:86.59ms +[2025-08-22 14:34:09] [Rank 0] step:4621/10000 train_time:400126ms step_avg:86.59ms +[2025-08-22 14:34:10] [Rank 0] step:4641/10000 train_time:401918ms step_avg:86.60ms +[2025-08-22 14:34:10] [Rank 0] step:4641/10000 train_time:401918ms step_avg:86.60ms +[2025-08-22 14:34:12] [Rank 0] step:4661/10000 train_time:403710ms step_avg:86.61ms +[2025-08-22 14:34:12] [Rank 0] step:4661/10000 train_time:403710ms step_avg:86.61ms +[2025-08-22 14:34:14] [Rank 0] step:4681/10000 train_time:405503ms step_avg:86.63ms +[2025-08-22 14:34:14] [Rank 0] step:4681/10000 train_time:405503ms step_avg:86.63ms +[2025-08-22 14:34:16] [Rank 0] step:4701/10000 train_time:407300ms step_avg:86.64ms +[2025-08-22 14:34:16] [Rank 0] step:4701/10000 train_time:407300ms step_avg:86.64ms +[2025-08-22 14:34:18] [Rank 0] step:4721/10000 train_time:409094ms step_avg:86.65ms +[2025-08-22 14:34:18] [Rank 0] step:4721/10000 train_time:409094ms step_avg:86.65ms +[2025-08-22 14:34:19] [Rank 0] step:4741/10000 train_time:410890ms step_avg:86.67ms +[2025-08-22 14:34:19] [Rank 0] step:4741/10000 train_time:410890ms step_avg:86.67ms +[2025-08-22 14:34:21] [Rank 0] step:4761/10000 train_time:412688ms step_avg:86.68ms +[2025-08-22 14:34:21] [Rank 0] step:4761/10000 train_time:412688ms step_avg:86.68ms +[2025-08-22 14:34:23] [Rank 0] step:4781/10000 train_time:414484ms step_avg:86.69ms +[2025-08-22 14:34:23] [Rank 0] step:4781/10000 train_time:414484ms step_avg:86.69ms +[2025-08-22 14:34:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:34:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:34:38] [Rank 0] PRINT: step:4800/10000 val_loss:4.1009 svd_entropy: attn_qk:H=0.8797,top10E=0.09,eRank=356.3,q75/q25=57.32 attn_vo:H=0.5655,top10E=0.50,eRank=54.8,q75/q25=117.92 mlp_w1:H=0.7711,top10E=0.25,eRank=199.3,q75/q25=13.34 mlp_w2:H=0.8893,top10E=0.13,eRank=377.4,q75/q25=8.77 vo_prod:H=0.4694,top10E=0.68,eRank=28.0,q75/q25=13398.82 train_time:416372ms step_avg:86.74ms +[2025-08-22 14:34:38] [Rank 0] PRINT: step:4800/10000 val_loss:4.1009 svd_entropy: attn_qk:H=0.8797,top10E=0.09,eRank=356.3,q75/q25=57.32 attn_vo:H=0.5655,top10E=0.50,eRank=54.8,q75/q25=117.92 mlp_w1:H=0.7711,top10E=0.25,eRank=199.3,q75/q25=13.34 mlp_w2:H=0.8893,top10E=0.13,eRank=377.4,q75/q25=8.77 vo_prod:H=0.4694,top10E=0.68,eRank=28.0,q75/q25=13398.82 train_time:416372ms step_avg:86.74ms +[2025-08-22 14:34:39] [Rank 0] step:4801/10000 train_time:416386ms step_avg:86.73ms +[2025-08-22 14:34:39] [Rank 0] step:4801/10000 train_time:416386ms step_avg:86.73ms +[2025-08-22 14:34:40] [Rank 0] step:4821/10000 train_time:418111ms step_avg:86.73ms +[2025-08-22 14:34:40] [Rank 0] step:4821/10000 train_time:418111ms step_avg:86.73ms +[2025-08-22 14:34:42] [Rank 0] step:4841/10000 train_time:419902ms step_avg:86.74ms +[2025-08-22 14:34:42] [Rank 0] step:4841/10000 train_time:419902ms step_avg:86.74ms +[2025-08-22 14:34:44] [Rank 0] step:4861/10000 train_time:421695ms step_avg:86.75ms +[2025-08-22 14:34:44] [Rank 0] step:4861/10000 train_time:421695ms step_avg:86.75ms +[2025-08-22 14:34:46] [Rank 0] step:4881/10000 train_time:423538ms step_avg:86.77ms +[2025-08-22 14:34:46] [Rank 0] step:4881/10000 train_time:423538ms step_avg:86.77ms +[2025-08-22 14:34:48] [Rank 0] step:4901/10000 train_time:425376ms step_avg:86.79ms +[2025-08-22 14:34:48] [Rank 0] step:4901/10000 train_time:425376ms step_avg:86.79ms +[2025-08-22 14:34:49] [Rank 0] step:4921/10000 train_time:427169ms step_avg:86.81ms +[2025-08-22 14:34:49] [Rank 0] step:4921/10000 train_time:427169ms step_avg:86.81ms +[2025-08-22 14:34:51] [Rank 0] step:4941/10000 train_time:428964ms step_avg:86.82ms +[2025-08-22 14:34:51] [Rank 0] step:4941/10000 train_time:428964ms step_avg:86.82ms +[2025-08-22 14:34:53] [Rank 0] step:4961/10000 train_time:430759ms step_avg:86.83ms +[2025-08-22 14:34:53] [Rank 0] step:4961/10000 train_time:430759ms step_avg:86.83ms +[2025-08-22 14:34:55] [Rank 0] step:4981/10000 train_time:432556ms step_avg:86.84ms +[2025-08-22 14:34:55] [Rank 0] step:4981/10000 train_time:432556ms step_avg:86.84ms +[2025-08-22 14:34:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:34:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:35:10] [Rank 0] PRINT: step:5000/10000 val_loss:4.0792 svd_entropy: attn_qk:H=0.8805,top10E=0.09,eRank=358.1,q75/q25=56.83 attn_vo:H=0.5694,top10E=0.49,eRank=56.3,q75/q25=117.77 mlp_w1:H=0.7730,top10E=0.25,eRank=202.0,q75/q25=13.33 mlp_w2:H=0.8905,top10E=0.13,eRank=380.2,q75/q25=8.63 vo_prod:H=0.4732,top10E=0.67,eRank=28.8,q75/q25=13687.41 train_time:434440ms step_avg:86.89ms +[2025-08-22 14:35:10] [Rank 0] PRINT: step:5000/10000 val_loss:4.0792 svd_entropy: attn_qk:H=0.8805,top10E=0.09,eRank=358.1,q75/q25=56.83 attn_vo:H=0.5694,top10E=0.49,eRank=56.3,q75/q25=117.77 mlp_w1:H=0.7730,top10E=0.25,eRank=202.0,q75/q25=13.33 mlp_w2:H=0.8905,top10E=0.13,eRank=380.2,q75/q25=8.63 vo_prod:H=0.4732,top10E=0.67,eRank=28.8,q75/q25=13687.41 train_time:434440ms step_avg:86.89ms +[2025-08-22 14:35:10] [Rank 0] step:5001/10000 train_time:434456ms step_avg:86.87ms +[2025-08-22 14:35:10] [Rank 0] step:5001/10000 train_time:434456ms step_avg:86.87ms +[2025-08-22 14:35:12] [Rank 0] step:5021/10000 train_time:436182ms step_avg:86.87ms +[2025-08-22 14:35:12] [Rank 0] step:5021/10000 train_time:436182ms step_avg:86.87ms +[2025-08-22 14:35:14] [Rank 0] step:5041/10000 train_time:437972ms step_avg:86.88ms +[2025-08-22 14:35:14] [Rank 0] step:5041/10000 train_time:437972ms step_avg:86.88ms +[2025-08-22 14:35:16] [Rank 0] step:5061/10000 train_time:439763ms step_avg:86.89ms +[2025-08-22 14:35:16] [Rank 0] step:5061/10000 train_time:439763ms step_avg:86.89ms +[2025-08-22 14:35:18] [Rank 0] step:5081/10000 train_time:441554ms step_avg:86.90ms +[2025-08-22 14:35:18] [Rank 0] step:5081/10000 train_time:441554ms step_avg:86.90ms +[2025-08-22 14:35:19] [Rank 0] step:5101/10000 train_time:443345ms step_avg:86.91ms +[2025-08-22 14:35:19] [Rank 0] step:5101/10000 train_time:443345ms step_avg:86.91ms +[2025-08-22 14:35:21] [Rank 0] step:5121/10000 train_time:445137ms step_avg:86.92ms +[2025-08-22 14:35:21] [Rank 0] step:5121/10000 train_time:445137ms step_avg:86.92ms +[2025-08-22 14:35:23] [Rank 0] step:5141/10000 train_time:446933ms step_avg:86.94ms +[2025-08-22 14:35:23] [Rank 0] step:5141/10000 train_time:446933ms step_avg:86.94ms +[2025-08-22 14:35:25] [Rank 0] step:5161/10000 train_time:448725ms step_avg:86.95ms +[2025-08-22 14:35:25] [Rank 0] step:5161/10000 train_time:448725ms step_avg:86.95ms +[2025-08-22 14:35:27] [Rank 0] step:5181/10000 train_time:450523ms step_avg:86.96ms +[2025-08-22 14:35:27] [Rank 0] step:5181/10000 train_time:450523ms step_avg:86.96ms +[2025-08-22 14:35:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:35:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:35:42] [Rank 0] PRINT: step:5200/10000 val_loss:4.0610 svd_entropy: attn_qk:H=0.8813,top10E=0.09,eRank=359.7,q75/q25=55.94 attn_vo:H=0.5730,top10E=0.49,eRank=57.8,q75/q25=118.21 mlp_w1:H=0.7749,top10E=0.25,eRank=204.8,q75/q25=13.29 mlp_w2:H=0.8916,top10E=0.13,eRank=382.9,q75/q25=8.51 vo_prod:H=0.4769,top10E=0.66,eRank=29.6,q75/q25=13432.99 train_time:452433ms step_avg:87.01ms +[2025-08-22 14:35:42] [Rank 0] PRINT: step:5200/10000 val_loss:4.0610 svd_entropy: attn_qk:H=0.8813,top10E=0.09,eRank=359.7,q75/q25=55.94 attn_vo:H=0.5730,top10E=0.49,eRank=57.8,q75/q25=118.21 mlp_w1:H=0.7749,top10E=0.25,eRank=204.8,q75/q25=13.29 mlp_w2:H=0.8916,top10E=0.13,eRank=382.9,q75/q25=8.51 vo_prod:H=0.4769,top10E=0.66,eRank=29.6,q75/q25=13432.99 train_time:452433ms step_avg:87.01ms +[2025-08-22 14:35:42] [Rank 0] step:5201/10000 train_time:452448ms step_avg:86.99ms +[2025-08-22 14:35:42] [Rank 0] step:5201/10000 train_time:452448ms step_avg:86.99ms +[2025-08-22 14:35:44] [Rank 0] step:5221/10000 train_time:454193ms step_avg:86.99ms +[2025-08-22 14:35:44] [Rank 0] step:5221/10000 train_time:454193ms step_avg:86.99ms +[2025-08-22 14:35:46] [Rank 0] step:5241/10000 train_time:456020ms step_avg:87.01ms +[2025-08-22 14:35:46] [Rank 0] step:5241/10000 train_time:456020ms step_avg:87.01ms +[2025-08-22 14:35:48] [Rank 0] step:5261/10000 train_time:457848ms step_avg:87.03ms +[2025-08-22 14:35:48] [Rank 0] step:5261/10000 train_time:457848ms step_avg:87.03ms +[2025-08-22 14:35:49] [Rank 0] step:5281/10000 train_time:459673ms step_avg:87.04ms +[2025-08-22 14:35:49] [Rank 0] step:5281/10000 train_time:459673ms step_avg:87.04ms +[2025-08-22 14:35:51] [Rank 0] step:5301/10000 train_time:461512ms step_avg:87.06ms +[2025-08-22 14:35:51] [Rank 0] step:5301/10000 train_time:461512ms step_avg:87.06ms +[2025-08-22 14:35:53] [Rank 0] step:5321/10000 train_time:463339ms step_avg:87.08ms +[2025-08-22 14:35:53] [Rank 0] step:5321/10000 train_time:463339ms step_avg:87.08ms +[2025-08-22 14:35:55] [Rank 0] step:5341/10000 train_time:465166ms step_avg:87.09ms +[2025-08-22 14:35:55] [Rank 0] step:5341/10000 train_time:465166ms step_avg:87.09ms +[2025-08-22 14:35:57] [Rank 0] step:5361/10000 train_time:467000ms step_avg:87.11ms +[2025-08-22 14:35:57] [Rank 0] step:5361/10000 train_time:467000ms step_avg:87.11ms +[2025-08-22 14:35:59] [Rank 0] step:5381/10000 train_time:468830ms step_avg:87.13ms +[2025-08-22 14:35:59] [Rank 0] step:5381/10000 train_time:468830ms step_avg:87.13ms +[2025-08-22 14:36:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:36:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:36:14] [Rank 0] PRINT: step:5400/10000 val_loss:4.0436 svd_entropy: attn_qk:H=0.8820,top10E=0.09,eRank=361.2,q75/q25=55.49 attn_vo:H=0.5764,top10E=0.48,eRank=59.2,q75/q25=117.14 mlp_w1:H=0.7768,top10E=0.24,eRank=207.4,q75/q25=13.28 mlp_w2:H=0.8927,top10E=0.12,eRank=385.4,q75/q25=8.41 vo_prod:H=0.4805,top10E=0.66,eRank=30.4,q75/q25=13248.40 train_time:470747ms step_avg:87.18ms +[2025-08-22 14:36:14] [Rank 0] PRINT: step:5400/10000 val_loss:4.0436 svd_entropy: attn_qk:H=0.8820,top10E=0.09,eRank=361.2,q75/q25=55.49 attn_vo:H=0.5764,top10E=0.48,eRank=59.2,q75/q25=117.14 mlp_w1:H=0.7768,top10E=0.24,eRank=207.4,q75/q25=13.28 mlp_w2:H=0.8927,top10E=0.12,eRank=385.4,q75/q25=8.41 vo_prod:H=0.4805,top10E=0.66,eRank=30.4,q75/q25=13248.40 train_time:470747ms step_avg:87.18ms +[2025-08-22 14:36:14] [Rank 0] step:5401/10000 train_time:470762ms step_avg:87.16ms +[2025-08-22 14:36:14] [Rank 0] step:5401/10000 train_time:470762ms step_avg:87.16ms +[2025-08-22 14:36:16] [Rank 0] step:5421/10000 train_time:472498ms step_avg:87.16ms +[2025-08-22 14:36:16] [Rank 0] step:5421/10000 train_time:472498ms step_avg:87.16ms +[2025-08-22 14:36:18] [Rank 0] step:5441/10000 train_time:474318ms step_avg:87.17ms +[2025-08-22 14:36:18] [Rank 0] step:5441/10000 train_time:474318ms step_avg:87.17ms +[2025-08-22 14:36:20] [Rank 0] step:5461/10000 train_time:476144ms step_avg:87.19ms +[2025-08-22 14:36:20] [Rank 0] step:5461/10000 train_time:476144ms step_avg:87.19ms +[2025-08-22 14:36:22] [Rank 0] step:5481/10000 train_time:477967ms step_avg:87.20ms +[2025-08-22 14:36:22] [Rank 0] step:5481/10000 train_time:477967ms step_avg:87.20ms +[2025-08-22 14:36:23] [Rank 0] step:5501/10000 train_time:479796ms step_avg:87.22ms +[2025-08-22 14:36:23] [Rank 0] step:5501/10000 train_time:479796ms step_avg:87.22ms +[2025-08-22 14:36:25] [Rank 0] step:5521/10000 train_time:481627ms step_avg:87.24ms +[2025-08-22 14:36:25] [Rank 0] step:5521/10000 train_time:481627ms step_avg:87.24ms +[2025-08-22 14:36:27] [Rank 0] step:5541/10000 train_time:483451ms step_avg:87.25ms +[2025-08-22 14:36:27] [Rank 0] step:5541/10000 train_time:483451ms step_avg:87.25ms +[2025-08-22 14:36:29] [Rank 0] step:5561/10000 train_time:485278ms step_avg:87.26ms +[2025-08-22 14:36:29] [Rank 0] step:5561/10000 train_time:485278ms step_avg:87.26ms +[2025-08-22 14:36:31] [Rank 0] step:5581/10000 train_time:487103ms step_avg:87.28ms +[2025-08-22 14:36:31] [Rank 0] step:5581/10000 train_time:487103ms step_avg:87.28ms +[2025-08-22 14:36:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:36:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:36:46] [Rank 0] PRINT: step:5600/10000 val_loss:4.0338 svd_entropy: attn_qk:H=0.8827,top10E=0.09,eRank=362.7,q75/q25=54.55 attn_vo:H=0.5798,top10E=0.47,eRank=60.7,q75/q25=117.30 mlp_w1:H=0.7785,top10E=0.24,eRank=209.8,q75/q25=13.26 mlp_w2:H=0.8936,top10E=0.12,eRank=387.7,q75/q25=8.33 vo_prod:H=0.4839,top10E=0.65,eRank=31.1,q75/q25=13300.08 train_time:489025ms step_avg:87.33ms +[2025-08-22 14:36:46] [Rank 0] PRINT: step:5600/10000 val_loss:4.0338 svd_entropy: attn_qk:H=0.8827,top10E=0.09,eRank=362.7,q75/q25=54.55 attn_vo:H=0.5798,top10E=0.47,eRank=60.7,q75/q25=117.30 mlp_w1:H=0.7785,top10E=0.24,eRank=209.8,q75/q25=13.26 mlp_w2:H=0.8936,top10E=0.12,eRank=387.7,q75/q25=8.33 vo_prod:H=0.4839,top10E=0.65,eRank=31.1,q75/q25=13300.08 train_time:489025ms step_avg:87.33ms +[2025-08-22 14:36:46] [Rank 0] step:5601/10000 train_time:489040ms step_avg:87.31ms +[2025-08-22 14:36:46] [Rank 0] step:5601/10000 train_time:489040ms step_avg:87.31ms +[2025-08-22 14:36:48] [Rank 0] step:5621/10000 train_time:490781ms step_avg:87.31ms +[2025-08-22 14:36:48] [Rank 0] step:5621/10000 train_time:490781ms step_avg:87.31ms +[2025-08-22 14:36:50] [Rank 0] step:5641/10000 train_time:492604ms step_avg:87.33ms +[2025-08-22 14:36:50] [Rank 0] step:5641/10000 train_time:492604ms step_avg:87.33ms +[2025-08-22 14:36:52] [Rank 0] step:5661/10000 train_time:494483ms step_avg:87.35ms +[2025-08-22 14:36:52] [Rank 0] step:5661/10000 train_time:494483ms step_avg:87.35ms +[2025-08-22 14:36:54] [Rank 0] step:5681/10000 train_time:496333ms step_avg:87.37ms +[2025-08-22 14:36:54] [Rank 0] step:5681/10000 train_time:496333ms step_avg:87.37ms +[2025-08-22 14:36:56] [Rank 0] step:5701/10000 train_time:498182ms step_avg:87.38ms +[2025-08-22 14:36:56] [Rank 0] step:5701/10000 train_time:498182ms step_avg:87.38ms +[2025-08-22 14:36:57] [Rank 0] step:5721/10000 train_time:500011ms step_avg:87.40ms +[2025-08-22 14:36:57] [Rank 0] step:5721/10000 train_time:500011ms step_avg:87.40ms +[2025-08-22 14:36:59] [Rank 0] step:5741/10000 train_time:501835ms step_avg:87.41ms +[2025-08-22 14:36:59] [Rank 0] step:5741/10000 train_time:501835ms step_avg:87.41ms +[2025-08-22 14:37:01] [Rank 0] step:5761/10000 train_time:503662ms step_avg:87.43ms +[2025-08-22 14:37:01] [Rank 0] step:5761/10000 train_time:503662ms step_avg:87.43ms +[2025-08-22 14:37:03] [Rank 0] step:5781/10000 train_time:505492ms step_avg:87.44ms +[2025-08-22 14:37:03] [Rank 0] step:5781/10000 train_time:505492ms step_avg:87.44ms +[2025-08-22 14:37:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:37:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:37:18] [Rank 0] PRINT: step:5800/10000 val_loss:4.0252 svd_entropy: attn_qk:H=0.8833,top10E=0.09,eRank=364.1,q75/q25=54.18 attn_vo:H=0.5830,top10E=0.47,eRank=62.1,q75/q25=116.04 mlp_w1:H=0.7802,top10E=0.24,eRank=212.2,q75/q25=13.26 mlp_w2:H=0.8945,top10E=0.12,eRank=389.7,q75/q25=8.24 vo_prod:H=0.4872,top10E=0.64,eRank=31.8,q75/q25=13306.54 train_time:507409ms step_avg:87.48ms +[2025-08-22 14:37:18] [Rank 0] PRINT: step:5800/10000 val_loss:4.0252 svd_entropy: attn_qk:H=0.8833,top10E=0.09,eRank=364.1,q75/q25=54.18 attn_vo:H=0.5830,top10E=0.47,eRank=62.1,q75/q25=116.04 mlp_w1:H=0.7802,top10E=0.24,eRank=212.2,q75/q25=13.26 mlp_w2:H=0.8945,top10E=0.12,eRank=389.7,q75/q25=8.24 vo_prod:H=0.4872,top10E=0.64,eRank=31.8,q75/q25=13306.54 train_time:507409ms step_avg:87.48ms +[2025-08-22 14:37:18] [Rank 0] step:5801/10000 train_time:507424ms step_avg:87.47ms +[2025-08-22 14:37:18] [Rank 0] step:5801/10000 train_time:507424ms step_avg:87.47ms +[2025-08-22 14:37:20] [Rank 0] step:5821/10000 train_time:509150ms step_avg:87.47ms +[2025-08-22 14:37:20] [Rank 0] step:5821/10000 train_time:509150ms step_avg:87.47ms +[2025-08-22 14:37:22] [Rank 0] step:5841/10000 train_time:510970ms step_avg:87.48ms +[2025-08-22 14:37:22] [Rank 0] step:5841/10000 train_time:510970ms step_avg:87.48ms +[2025-08-22 14:37:24] [Rank 0] step:5861/10000 train_time:512796ms step_avg:87.49ms +[2025-08-22 14:37:24] [Rank 0] step:5861/10000 train_time:512796ms step_avg:87.49ms +[2025-08-22 14:37:26] [Rank 0] step:5881/10000 train_time:514622ms step_avg:87.51ms +[2025-08-22 14:37:26] [Rank 0] step:5881/10000 train_time:514622ms step_avg:87.51ms +[2025-08-22 14:37:28] [Rank 0] step:5901/10000 train_time:516446ms step_avg:87.52ms +[2025-08-22 14:37:28] [Rank 0] step:5901/10000 train_time:516446ms step_avg:87.52ms +[2025-08-22 14:37:29] [Rank 0] step:5921/10000 train_time:518273ms step_avg:87.53ms +[2025-08-22 14:37:29] [Rank 0] step:5921/10000 train_time:518273ms step_avg:87.53ms +[2025-08-22 14:37:31] [Rank 0] step:5941/10000 train_time:520106ms step_avg:87.55ms +[2025-08-22 14:37:31] [Rank 0] step:5941/10000 train_time:520106ms step_avg:87.55ms +[2025-08-22 14:37:33] [Rank 0] step:5961/10000 train_time:521938ms step_avg:87.56ms +[2025-08-22 14:37:33] [Rank 0] step:5961/10000 train_time:521938ms step_avg:87.56ms +[2025-08-22 14:37:35] [Rank 0] step:5981/10000 train_time:523769ms step_avg:87.57ms +[2025-08-22 14:37:35] [Rank 0] step:5981/10000 train_time:523769ms step_avg:87.57ms +[2025-08-22 14:37:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:37:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:37:50] [Rank 0] PRINT: step:6000/10000 val_loss:4.0003 svd_entropy: attn_qk:H=0.8839,top10E=0.09,eRank=365.5,q75/q25=54.22 attn_vo:H=0.5860,top10E=0.46,eRank=63.4,q75/q25=115.99 mlp_w1:H=0.7817,top10E=0.24,eRank=214.5,q75/q25=13.27 mlp_w2:H=0.8954,top10E=0.12,eRank=391.8,q75/q25=8.18 vo_prod:H=0.4902,top10E=0.64,eRank=32.5,q75/q25=12903.34 train_time:525687ms step_avg:87.61ms +[2025-08-22 14:37:50] [Rank 0] PRINT: step:6000/10000 val_loss:4.0003 svd_entropy: attn_qk:H=0.8839,top10E=0.09,eRank=365.5,q75/q25=54.22 attn_vo:H=0.5860,top10E=0.46,eRank=63.4,q75/q25=115.99 mlp_w1:H=0.7817,top10E=0.24,eRank=214.5,q75/q25=13.27 mlp_w2:H=0.8954,top10E=0.12,eRank=391.8,q75/q25=8.18 vo_prod:H=0.4902,top10E=0.64,eRank=32.5,q75/q25=12903.34 train_time:525687ms step_avg:87.61ms +[2025-08-22 14:37:51] [Rank 0] step:6001/10000 train_time:525702ms step_avg:87.60ms +[2025-08-22 14:37:51] [Rank 0] step:6001/10000 train_time:525702ms step_avg:87.60ms +[2025-08-22 14:37:52] [Rank 0] step:6021/10000 train_time:527445ms step_avg:87.60ms +[2025-08-22 14:37:52] [Rank 0] step:6021/10000 train_time:527445ms step_avg:87.60ms +[2025-08-22 14:37:54] [Rank 0] step:6041/10000 train_time:529272ms step_avg:87.61ms +[2025-08-22 14:37:54] [Rank 0] step:6041/10000 train_time:529272ms step_avg:87.61ms +[2025-08-22 14:37:56] [Rank 0] step:6061/10000 train_time:531113ms step_avg:87.63ms +[2025-08-22 14:37:56] [Rank 0] step:6061/10000 train_time:531113ms step_avg:87.63ms +[2025-08-22 14:37:58] [Rank 0] step:6081/10000 train_time:532946ms step_avg:87.64ms +[2025-08-22 14:37:58] [Rank 0] step:6081/10000 train_time:532946ms step_avg:87.64ms +[2025-08-22 14:38:00] [Rank 0] step:6101/10000 train_time:534785ms step_avg:87.66ms +[2025-08-22 14:38:00] [Rank 0] step:6101/10000 train_time:534785ms step_avg:87.66ms +[2025-08-22 14:38:02] [Rank 0] step:6121/10000 train_time:536887ms step_avg:87.71ms +[2025-08-22 14:38:02] [Rank 0] step:6121/10000 train_time:536887ms step_avg:87.71ms +[2025-08-22 14:38:04] [Rank 0] step:6141/10000 train_time:538735ms step_avg:87.73ms +[2025-08-22 14:38:04] [Rank 0] step:6141/10000 train_time:538735ms step_avg:87.73ms +[2025-08-22 14:38:05] [Rank 0] step:6161/10000 train_time:540572ms step_avg:87.74ms +[2025-08-22 14:38:05] [Rank 0] step:6161/10000 train_time:540572ms step_avg:87.74ms +[2025-08-22 14:38:07] [Rank 0] step:6181/10000 train_time:542403ms step_avg:87.75ms +[2025-08-22 14:38:07] [Rank 0] step:6181/10000 train_time:542403ms step_avg:87.75ms +[2025-08-22 14:38:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:38:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:38:23] [Rank 0] PRINT: step:6200/10000 val_loss:3.9819 svd_entropy: attn_qk:H=0.8845,top10E=0.09,eRank=366.8,q75/q25=53.91 attn_vo:H=0.5889,top10E=0.46,eRank=64.7,q75/q25=114.94 mlp_w1:H=0.7832,top10E=0.24,eRank=216.7,q75/q25=13.22 mlp_w2:H=0.8962,top10E=0.12,eRank=393.7,q75/q25=8.08 vo_prod:H=0.4931,top10E=0.63,eRank=33.2,q75/q25=13016.04 train_time:544331ms step_avg:87.80ms +[2025-08-22 14:38:23] [Rank 0] PRINT: step:6200/10000 val_loss:3.9819 svd_entropy: attn_qk:H=0.8845,top10E=0.09,eRank=366.8,q75/q25=53.91 attn_vo:H=0.5889,top10E=0.46,eRank=64.7,q75/q25=114.94 mlp_w1:H=0.7832,top10E=0.24,eRank=216.7,q75/q25=13.22 mlp_w2:H=0.8962,top10E=0.12,eRank=393.7,q75/q25=8.08 vo_prod:H=0.4931,top10E=0.63,eRank=33.2,q75/q25=13016.04 train_time:544331ms step_avg:87.80ms +[2025-08-22 14:38:23] [Rank 0] step:6201/10000 train_time:544346ms step_avg:87.78ms +[2025-08-22 14:38:23] [Rank 0] step:6201/10000 train_time:544346ms step_avg:87.78ms +[2025-08-22 14:38:25] [Rank 0] step:6221/10000 train_time:546097ms step_avg:87.78ms +[2025-08-22 14:38:25] [Rank 0] step:6221/10000 train_time:546097ms step_avg:87.78ms +[2025-08-22 14:38:27] [Rank 0] step:6241/10000 train_time:547921ms step_avg:87.79ms +[2025-08-22 14:38:27] [Rank 0] step:6241/10000 train_time:547921ms step_avg:87.79ms +[2025-08-22 14:38:28] [Rank 0] step:6261/10000 train_time:549749ms step_avg:87.81ms +[2025-08-22 14:38:28] [Rank 0] step:6261/10000 train_time:549749ms step_avg:87.81ms +[2025-08-22 14:38:30] [Rank 0] step:6281/10000 train_time:551581ms step_avg:87.82ms +[2025-08-22 14:38:30] [Rank 0] step:6281/10000 train_time:551581ms step_avg:87.82ms +[2025-08-22 14:38:32] [Rank 0] step:6301/10000 train_time:553412ms step_avg:87.83ms +[2025-08-22 14:38:32] [Rank 0] step:6301/10000 train_time:553412ms step_avg:87.83ms +[2025-08-22 14:38:34] [Rank 0] step:6321/10000 train_time:555240ms step_avg:87.84ms +[2025-08-22 14:38:34] [Rank 0] step:6321/10000 train_time:555240ms step_avg:87.84ms +[2025-08-22 14:38:36] [Rank 0] step:6341/10000 train_time:557073ms step_avg:87.85ms +[2025-08-22 14:38:36] [Rank 0] step:6341/10000 train_time:557073ms step_avg:87.85ms +[2025-08-22 14:38:38] [Rank 0] step:6361/10000 train_time:558911ms step_avg:87.87ms +[2025-08-22 14:38:38] [Rank 0] step:6361/10000 train_time:558911ms step_avg:87.87ms +[2025-08-22 14:38:39] [Rank 0] step:6381/10000 train_time:560748ms step_avg:87.88ms +[2025-08-22 14:38:39] [Rank 0] step:6381/10000 train_time:560748ms step_avg:87.88ms +[2025-08-22 14:38:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:38:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:38:55] [Rank 0] PRINT: step:6400/10000 val_loss:3.9692 svd_entropy: attn_qk:H=0.8850,top10E=0.08,eRank=367.9,q75/q25=53.72 attn_vo:H=0.5915,top10E=0.45,eRank=65.9,q75/q25=114.79 mlp_w1:H=0.7846,top10E=0.23,eRank=218.7,q75/q25=13.21 mlp_w2:H=0.8969,top10E=0.12,eRank=395.5,q75/q25=8.02 vo_prod:H=0.4957,top10E=0.63,eRank=33.8,q75/q25=12870.98 train_time:562666ms step_avg:87.92ms +[2025-08-22 14:38:55] [Rank 0] PRINT: step:6400/10000 val_loss:3.9692 svd_entropy: attn_qk:H=0.8850,top10E=0.08,eRank=367.9,q75/q25=53.72 attn_vo:H=0.5915,top10E=0.45,eRank=65.9,q75/q25=114.79 mlp_w1:H=0.7846,top10E=0.23,eRank=218.7,q75/q25=13.21 mlp_w2:H=0.8969,top10E=0.12,eRank=395.5,q75/q25=8.02 vo_prod:H=0.4957,top10E=0.63,eRank=33.8,q75/q25=12870.98 train_time:562666ms step_avg:87.92ms +[2025-08-22 14:38:55] [Rank 0] step:6401/10000 train_time:562680ms step_avg:87.91ms +[2025-08-22 14:38:55] [Rank 0] step:6401/10000 train_time:562680ms step_avg:87.91ms +[2025-08-22 14:38:57] [Rank 0] step:6421/10000 train_time:564426ms step_avg:87.90ms +[2025-08-22 14:38:57] [Rank 0] step:6421/10000 train_time:564426ms step_avg:87.90ms +[2025-08-22 14:38:59] [Rank 0] step:6441/10000 train_time:566254ms step_avg:87.91ms +[2025-08-22 14:38:59] [Rank 0] step:6441/10000 train_time:566254ms step_avg:87.91ms +[2025-08-22 14:39:01] [Rank 0] step:6461/10000 train_time:568087ms step_avg:87.93ms +[2025-08-22 14:39:01] [Rank 0] step:6461/10000 train_time:568087ms step_avg:87.93ms +[2025-08-22 14:39:02] [Rank 0] step:6481/10000 train_time:569926ms step_avg:87.94ms +[2025-08-22 14:39:02] [Rank 0] step:6481/10000 train_time:569926ms step_avg:87.94ms +[2025-08-22 14:39:04] [Rank 0] step:6501/10000 train_time:571753ms step_avg:87.95ms +[2025-08-22 14:39:04] [Rank 0] step:6501/10000 train_time:571753ms step_avg:87.95ms +[2025-08-22 14:39:06] [Rank 0] step:6521/10000 train_time:573580ms step_avg:87.96ms +[2025-08-22 14:39:06] [Rank 0] step:6521/10000 train_time:573580ms step_avg:87.96ms +[2025-08-22 14:39:08] [Rank 0] step:6541/10000 train_time:575412ms step_avg:87.97ms +[2025-08-22 14:39:08] [Rank 0] step:6541/10000 train_time:575412ms step_avg:87.97ms +[2025-08-22 14:39:10] [Rank 0] step:6561/10000 train_time:577246ms step_avg:87.98ms +[2025-08-22 14:39:10] [Rank 0] step:6561/10000 train_time:577246ms step_avg:87.98ms +[2025-08-22 14:39:12] [Rank 0] step:6581/10000 train_time:579076ms step_avg:87.99ms +[2025-08-22 14:39:12] [Rank 0] step:6581/10000 train_time:579076ms step_avg:87.99ms +[2025-08-22 14:39:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:39:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:39:27] [Rank 0] PRINT: step:6600/10000 val_loss:3.9561 svd_entropy: attn_qk:H=0.8855,top10E=0.08,eRank=368.9,q75/q25=53.02 attn_vo:H=0.5940,top10E=0.45,eRank=67.1,q75/q25=115.05 mlp_w1:H=0.7858,top10E=0.23,eRank=220.6,q75/q25=13.18 mlp_w2:H=0.8976,top10E=0.12,eRank=397.1,q75/q25=7.99 vo_prod:H=0.4985,top10E=0.62,eRank=34.4,q75/q25=13185.59 train_time:580998ms step_avg:88.03ms +[2025-08-22 14:39:27] [Rank 0] PRINT: step:6600/10000 val_loss:3.9561 svd_entropy: attn_qk:H=0.8855,top10E=0.08,eRank=368.9,q75/q25=53.02 attn_vo:H=0.5940,top10E=0.45,eRank=67.1,q75/q25=115.05 mlp_w1:H=0.7858,top10E=0.23,eRank=220.6,q75/q25=13.18 mlp_w2:H=0.8976,top10E=0.12,eRank=397.1,q75/q25=7.99 vo_prod:H=0.4985,top10E=0.62,eRank=34.4,q75/q25=13185.59 train_time:580998ms step_avg:88.03ms +[2025-08-22 14:39:27] [Rank 0] step:6601/10000 train_time:581013ms step_avg:88.02ms +[2025-08-22 14:39:27] [Rank 0] step:6601/10000 train_time:581013ms step_avg:88.02ms +[2025-08-22 14:39:29] [Rank 0] step:6621/10000 train_time:582754ms step_avg:88.02ms +[2025-08-22 14:39:29] [Rank 0] step:6621/10000 train_time:582754ms step_avg:88.02ms +[2025-08-22 14:39:31] [Rank 0] step:6641/10000 train_time:584589ms step_avg:88.03ms +[2025-08-22 14:39:31] [Rank 0] step:6641/10000 train_time:584589ms step_avg:88.03ms +[2025-08-22 14:39:33] [Rank 0] step:6661/10000 train_time:586418ms step_avg:88.04ms +[2025-08-22 14:39:33] [Rank 0] step:6661/10000 train_time:586418ms step_avg:88.04ms +[2025-08-22 14:39:35] [Rank 0] step:6681/10000 train_time:588263ms step_avg:88.05ms +[2025-08-22 14:39:35] [Rank 0] step:6681/10000 train_time:588263ms step_avg:88.05ms +[2025-08-22 14:39:36] [Rank 0] step:6701/10000 train_time:590128ms step_avg:88.07ms +[2025-08-22 14:39:36] [Rank 0] step:6701/10000 train_time:590128ms step_avg:88.07ms +[2025-08-22 14:39:38] [Rank 0] step:6721/10000 train_time:591991ms step_avg:88.08ms +[2025-08-22 14:39:38] [Rank 0] step:6721/10000 train_time:591991ms step_avg:88.08ms +[2025-08-22 14:39:40] [Rank 0] step:6741/10000 train_time:593853ms step_avg:88.10ms +[2025-08-22 14:39:40] [Rank 0] step:6741/10000 train_time:593853ms step_avg:88.10ms +[2025-08-22 14:39:42] [Rank 0] step:6761/10000 train_time:595711ms step_avg:88.11ms +[2025-08-22 14:39:42] [Rank 0] step:6761/10000 train_time:595711ms step_avg:88.11ms +[2025-08-22 14:39:44] [Rank 0] step:6781/10000 train_time:597573ms step_avg:88.12ms +[2025-08-22 14:39:44] [Rank 0] step:6781/10000 train_time:597573ms step_avg:88.12ms +[2025-08-22 14:39:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:39:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:39:59] [Rank 0] PRINT: step:6800/10000 val_loss:3.9373 svd_entropy: attn_qk:H=0.8859,top10E=0.08,eRank=369.7,q75/q25=52.65 attn_vo:H=0.5963,top10E=0.44,eRank=68.1,q75/q25=114.78 mlp_w1:H=0.7870,top10E=0.23,eRank=222.3,q75/q25=13.12 mlp_w2:H=0.8982,top10E=0.12,eRank=398.6,q75/q25=7.92 vo_prod:H=0.5009,top10E=0.62,eRank=35.0,q75/q25=12757.80 train_time:599533ms step_avg:88.17ms +[2025-08-22 14:39:59] [Rank 0] PRINT: step:6800/10000 val_loss:3.9373 svd_entropy: attn_qk:H=0.8859,top10E=0.08,eRank=369.7,q75/q25=52.65 attn_vo:H=0.5963,top10E=0.44,eRank=68.1,q75/q25=114.78 mlp_w1:H=0.7870,top10E=0.23,eRank=222.3,q75/q25=13.12 mlp_w2:H=0.8982,top10E=0.12,eRank=398.6,q75/q25=7.92 vo_prod:H=0.5009,top10E=0.62,eRank=35.0,q75/q25=12757.80 train_time:599533ms step_avg:88.17ms +[2025-08-22 14:39:59] [Rank 0] step:6801/10000 train_time:599549ms step_avg:88.16ms +[2025-08-22 14:39:59] [Rank 0] step:6801/10000 train_time:599549ms step_avg:88.16ms +[2025-08-22 14:40:01] [Rank 0] step:6821/10000 train_time:601317ms step_avg:88.16ms +[2025-08-22 14:40:01] [Rank 0] step:6821/10000 train_time:601317ms step_avg:88.16ms +[2025-08-22 14:40:03] [Rank 0] step:6841/10000 train_time:603173ms step_avg:88.17ms +[2025-08-22 14:40:03] [Rank 0] step:6841/10000 train_time:603173ms step_avg:88.17ms +[2025-08-22 14:40:05] [Rank 0] step:6861/10000 train_time:605032ms step_avg:88.18ms +[2025-08-22 14:40:05] [Rank 0] step:6861/10000 train_time:605032ms step_avg:88.18ms +[2025-08-22 14:40:07] [Rank 0] step:6881/10000 train_time:606893ms step_avg:88.20ms +[2025-08-22 14:40:07] [Rank 0] step:6881/10000 train_time:606893ms step_avg:88.20ms +[2025-08-22 14:40:09] [Rank 0] step:6901/10000 train_time:608754ms step_avg:88.21ms +[2025-08-22 14:40:09] [Rank 0] step:6901/10000 train_time:608754ms step_avg:88.21ms +[2025-08-22 14:40:11] [Rank 0] step:6921/10000 train_time:610612ms step_avg:88.23ms +[2025-08-22 14:40:11] [Rank 0] step:6921/10000 train_time:610612ms step_avg:88.23ms +[2025-08-22 14:40:12] [Rank 0] step:6941/10000 train_time:612477ms step_avg:88.24ms +[2025-08-22 14:40:12] [Rank 0] step:6941/10000 train_time:612477ms step_avg:88.24ms +[2025-08-22 14:40:14] [Rank 0] step:6961/10000 train_time:614364ms step_avg:88.26ms +[2025-08-22 14:40:14] [Rank 0] step:6961/10000 train_time:614364ms step_avg:88.26ms +[2025-08-22 14:40:16] [Rank 0] step:6981/10000 train_time:616231ms step_avg:88.27ms +[2025-08-22 14:40:16] [Rank 0] step:6981/10000 train_time:616231ms step_avg:88.27ms +[2025-08-22 14:40:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:40:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:40:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.9211 svd_entropy: attn_qk:H=0.8862,top10E=0.08,eRank=370.4,q75/q25=52.58 attn_vo:H=0.5983,top10E=0.44,eRank=69.1,q75/q25=115.00 mlp_w1:H=0.7880,top10E=0.23,eRank=223.9,q75/q25=13.10 mlp_w2:H=0.8988,top10E=0.12,eRank=399.9,q75/q25=7.87 vo_prod:H=0.5028,top10E=0.61,eRank=35.5,q75/q25=12758.32 train_time:618192ms step_avg:88.31ms +[2025-08-22 14:40:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.9211 svd_entropy: attn_qk:H=0.8862,top10E=0.08,eRank=370.4,q75/q25=52.58 attn_vo:H=0.5983,top10E=0.44,eRank=69.1,q75/q25=115.00 mlp_w1:H=0.7880,top10E=0.23,eRank=223.9,q75/q25=13.10 mlp_w2:H=0.8988,top10E=0.12,eRank=399.9,q75/q25=7.87 vo_prod:H=0.5028,top10E=0.61,eRank=35.5,q75/q25=12758.32 train_time:618192ms step_avg:88.31ms +[2025-08-22 14:40:32] [Rank 0] step:7001/10000 train_time:618206ms step_avg:88.30ms +[2025-08-22 14:40:32] [Rank 0] step:7001/10000 train_time:618206ms step_avg:88.30ms +[2025-08-22 14:40:34] [Rank 0] step:7021/10000 train_time:619980ms step_avg:88.30ms +[2025-08-22 14:40:34] [Rank 0] step:7021/10000 train_time:619980ms step_avg:88.30ms +[2025-08-22 14:40:36] [Rank 0] step:7041/10000 train_time:621835ms step_avg:88.32ms +[2025-08-22 14:40:36] [Rank 0] step:7041/10000 train_time:621835ms step_avg:88.32ms +[2025-08-22 14:40:37] [Rank 0] step:7061/10000 train_time:623691ms step_avg:88.33ms +[2025-08-22 14:40:37] [Rank 0] step:7061/10000 train_time:623691ms step_avg:88.33ms +[2025-08-22 14:40:39] [Rank 0] step:7081/10000 train_time:625549ms step_avg:88.34ms +[2025-08-22 14:40:39] [Rank 0] step:7081/10000 train_time:625549ms step_avg:88.34ms +[2025-08-22 14:40:41] [Rank 0] step:7101/10000 train_time:627412ms step_avg:88.36ms +[2025-08-22 14:40:41] [Rank 0] step:7101/10000 train_time:627412ms step_avg:88.36ms +[2025-08-22 14:40:43] [Rank 0] step:7121/10000 train_time:629271ms step_avg:88.37ms +[2025-08-22 14:40:43] [Rank 0] step:7121/10000 train_time:629271ms step_avg:88.37ms +[2025-08-22 14:40:45] [Rank 0] step:7141/10000 train_time:631134ms step_avg:88.38ms +[2025-08-22 14:40:45] [Rank 0] step:7141/10000 train_time:631134ms step_avg:88.38ms +[2025-08-22 14:40:47] [Rank 0] step:7161/10000 train_time:632997ms step_avg:88.40ms +[2025-08-22 14:40:47] [Rank 0] step:7161/10000 train_time:632997ms step_avg:88.40ms +[2025-08-22 14:40:49] [Rank 0] step:7181/10000 train_time:634859ms step_avg:88.41ms +[2025-08-22 14:40:49] [Rank 0] step:7181/10000 train_time:634859ms step_avg:88.41ms +[2025-08-22 14:40:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:40:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:41:04] [Rank 0] PRINT: step:7200/10000 val_loss:3.9065 svd_entropy: attn_qk:H=0.8865,top10E=0.08,eRank=371.1,q75/q25=52.41 attn_vo:H=0.6003,top10E=0.44,eRank=70.0,q75/q25=114.76 mlp_w1:H=0.7890,top10E=0.23,eRank=225.4,q75/q25=13.06 mlp_w2:H=0.8993,top10E=0.12,eRank=401.3,q75/q25=7.83 vo_prod:H=0.5047,top10E=0.61,eRank=36.0,q75/q25=12866.10 train_time:636815ms step_avg:88.45ms +[2025-08-22 14:41:04] [Rank 0] PRINT: step:7200/10000 val_loss:3.9065 svd_entropy: attn_qk:H=0.8865,top10E=0.08,eRank=371.1,q75/q25=52.41 attn_vo:H=0.6003,top10E=0.44,eRank=70.0,q75/q25=114.76 mlp_w1:H=0.7890,top10E=0.23,eRank=225.4,q75/q25=13.06 mlp_w2:H=0.8993,top10E=0.12,eRank=401.3,q75/q25=7.83 vo_prod:H=0.5047,top10E=0.61,eRank=36.0,q75/q25=12866.10 train_time:636815ms step_avg:88.45ms +[2025-08-22 14:41:04] [Rank 0] step:7201/10000 train_time:636830ms step_avg:88.44ms +[2025-08-22 14:41:04] [Rank 0] step:7201/10000 train_time:636830ms step_avg:88.44ms +[2025-08-22 14:41:06] [Rank 0] step:7221/10000 train_time:638621ms step_avg:88.44ms +[2025-08-22 14:41:06] [Rank 0] step:7221/10000 train_time:638621ms step_avg:88.44ms +[2025-08-22 14:41:08] [Rank 0] step:7241/10000 train_time:640510ms step_avg:88.46ms +[2025-08-22 14:41:08] [Rank 0] step:7241/10000 train_time:640510ms step_avg:88.46ms +[2025-08-22 14:41:10] [Rank 0] step:7261/10000 train_time:642363ms step_avg:88.47ms +[2025-08-22 14:41:10] [Rank 0] step:7261/10000 train_time:642363ms step_avg:88.47ms +[2025-08-22 14:41:12] [Rank 0] step:7281/10000 train_time:644229ms step_avg:88.48ms +[2025-08-22 14:41:12] [Rank 0] step:7281/10000 train_time:644229ms step_avg:88.48ms +[2025-08-22 14:41:14] [Rank 0] step:7301/10000 train_time:646088ms step_avg:88.49ms +[2025-08-22 14:41:14] [Rank 0] step:7301/10000 train_time:646088ms step_avg:88.49ms +[2025-08-22 14:41:16] [Rank 0] step:7321/10000 train_time:647958ms step_avg:88.51ms +[2025-08-22 14:41:16] [Rank 0] step:7321/10000 train_time:647958ms step_avg:88.51ms +[2025-08-22 14:41:17] [Rank 0] step:7341/10000 train_time:649818ms step_avg:88.52ms +[2025-08-22 14:41:17] [Rank 0] step:7341/10000 train_time:649818ms step_avg:88.52ms +[2025-08-22 14:41:19] [Rank 0] step:7361/10000 train_time:651687ms step_avg:88.53ms +[2025-08-22 14:41:19] [Rank 0] step:7361/10000 train_time:651687ms step_avg:88.53ms +[2025-08-22 14:41:21] [Rank 0] step:7381/10000 train_time:653554ms step_avg:88.55ms +[2025-08-22 14:41:21] [Rank 0] step:7381/10000 train_time:653554ms step_avg:88.55ms +[2025-08-22 14:41:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:41:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:41:37] [Rank 0] PRINT: step:7400/10000 val_loss:3.8827 svd_entropy: attn_qk:H=0.8868,top10E=0.08,eRank=371.7,q75/q25=52.77 attn_vo:H=0.6020,top10E=0.43,eRank=70.8,q75/q25=114.25 mlp_w1:H=0.7898,top10E=0.23,eRank=226.7,q75/q25=13.02 mlp_w2:H=0.8998,top10E=0.12,eRank=402.4,q75/q25=7.80 vo_prod:H=0.5064,top10E=0.60,eRank=36.4,q75/q25=12623.36 train_time:655492ms step_avg:88.58ms +[2025-08-22 14:41:37] [Rank 0] PRINT: step:7400/10000 val_loss:3.8827 svd_entropy: attn_qk:H=0.8868,top10E=0.08,eRank=371.7,q75/q25=52.77 attn_vo:H=0.6020,top10E=0.43,eRank=70.8,q75/q25=114.25 mlp_w1:H=0.7898,top10E=0.23,eRank=226.7,q75/q25=13.02 mlp_w2:H=0.8998,top10E=0.12,eRank=402.4,q75/q25=7.80 vo_prod:H=0.5064,top10E=0.60,eRank=36.4,q75/q25=12623.36 train_time:655492ms step_avg:88.58ms +[2025-08-22 14:41:37] [Rank 0] step:7401/10000 train_time:655508ms step_avg:88.57ms +[2025-08-22 14:41:37] [Rank 0] step:7401/10000 train_time:655508ms step_avg:88.57ms +[2025-08-22 14:41:39] [Rank 0] step:7421/10000 train_time:657300ms step_avg:88.57ms +[2025-08-22 14:41:39] [Rank 0] step:7421/10000 train_time:657300ms step_avg:88.57ms +[2025-08-22 14:41:41] [Rank 0] step:7441/10000 train_time:659159ms step_avg:88.58ms +[2025-08-22 14:41:41] [Rank 0] step:7441/10000 train_time:659159ms step_avg:88.58ms +[2025-08-22 14:41:42] [Rank 0] step:7461/10000 train_time:661020ms step_avg:88.60ms +[2025-08-22 14:41:42] [Rank 0] step:7461/10000 train_time:661020ms step_avg:88.60ms +[2025-08-22 14:41:44] [Rank 0] step:7481/10000 train_time:662891ms step_avg:88.61ms +[2025-08-22 14:41:44] [Rank 0] step:7481/10000 train_time:662891ms step_avg:88.61ms +[2025-08-22 14:41:46] [Rank 0] step:7501/10000 train_time:664760ms step_avg:88.62ms +[2025-08-22 14:41:46] [Rank 0] step:7501/10000 train_time:664760ms step_avg:88.62ms +[2025-08-22 14:41:48] [Rank 0] step:7521/10000 train_time:666627ms step_avg:88.64ms +[2025-08-22 14:41:48] [Rank 0] step:7521/10000 train_time:666627ms step_avg:88.64ms +[2025-08-22 14:41:50] [Rank 0] step:7541/10000 train_time:668508ms step_avg:88.65ms +[2025-08-22 14:41:50] [Rank 0] step:7541/10000 train_time:668508ms step_avg:88.65ms +[2025-08-22 14:41:52] [Rank 0] step:7561/10000 train_time:670365ms step_avg:88.66ms +[2025-08-22 14:41:52] [Rank 0] step:7561/10000 train_time:670365ms step_avg:88.66ms +[2025-08-22 14:41:54] [Rank 0] step:7581/10000 train_time:672243ms step_avg:88.67ms +[2025-08-22 14:41:54] [Rank 0] step:7581/10000 train_time:672243ms step_avg:88.67ms +[2025-08-22 14:41:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:41:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:42:09] [Rank 0] PRINT: step:7600/10000 val_loss:3.8832 svd_entropy: attn_qk:H=0.8870,top10E=0.08,eRank=372.2,q75/q25=52.30 attn_vo:H=0.6035,top10E=0.43,eRank=71.6,q75/q25=113.43 mlp_w1:H=0.7906,top10E=0.23,eRank=228.0,q75/q25=13.00 mlp_w2:H=0.9002,top10E=0.12,eRank=403.5,q75/q25=7.75 vo_prod:H=0.5082,top10E=0.60,eRank=36.9,q75/q25=12232.14 train_time:674213ms step_avg:88.71ms +[2025-08-22 14:42:09] [Rank 0] PRINT: step:7600/10000 val_loss:3.8832 svd_entropy: attn_qk:H=0.8870,top10E=0.08,eRank=372.2,q75/q25=52.30 attn_vo:H=0.6035,top10E=0.43,eRank=71.6,q75/q25=113.43 mlp_w1:H=0.7906,top10E=0.23,eRank=228.0,q75/q25=13.00 mlp_w2:H=0.9002,top10E=0.12,eRank=403.5,q75/q25=7.75 vo_prod:H=0.5082,top10E=0.60,eRank=36.9,q75/q25=12232.14 train_time:674213ms step_avg:88.71ms +[2025-08-22 14:42:09] [Rank 0] step:7601/10000 train_time:674228ms step_avg:88.70ms +[2025-08-22 14:42:09] [Rank 0] step:7601/10000 train_time:674228ms step_avg:88.70ms +[2025-08-22 14:42:11] [Rank 0] step:7621/10000 train_time:676015ms step_avg:88.70ms +[2025-08-22 14:42:11] [Rank 0] step:7621/10000 train_time:676015ms step_avg:88.70ms +[2025-08-22 14:42:13] [Rank 0] step:7641/10000 train_time:677875ms step_avg:88.72ms +[2025-08-22 14:42:13] [Rank 0] step:7641/10000 train_time:677875ms step_avg:88.72ms +[2025-08-22 14:42:15] [Rank 0] step:7661/10000 train_time:679739ms step_avg:88.73ms +[2025-08-22 14:42:15] [Rank 0] step:7661/10000 train_time:679739ms step_avg:88.73ms +[2025-08-22 14:42:17] [Rank 0] step:7681/10000 train_time:681597ms step_avg:88.74ms +[2025-08-22 14:42:17] [Rank 0] step:7681/10000 train_time:681597ms step_avg:88.74ms +[2025-08-22 14:42:19] [Rank 0] step:7701/10000 train_time:683458ms step_avg:88.75ms +[2025-08-22 14:42:19] [Rank 0] step:7701/10000 train_time:683458ms step_avg:88.75ms +[2025-08-22 14:42:20] [Rank 0] step:7721/10000 train_time:685334ms step_avg:88.76ms +[2025-08-22 14:42:20] [Rank 0] step:7721/10000 train_time:685334ms step_avg:88.76ms +[2025-08-22 14:42:22] [Rank 0] step:7741/10000 train_time:687196ms step_avg:88.77ms +[2025-08-22 14:42:22] [Rank 0] step:7741/10000 train_time:687196ms step_avg:88.77ms +[2025-08-22 14:42:24] [Rank 0] step:7761/10000 train_time:689066ms step_avg:88.79ms +[2025-08-22 14:42:24] [Rank 0] step:7761/10000 train_time:689066ms step_avg:88.79ms +[2025-08-22 14:42:26] [Rank 0] step:7781/10000 train_time:690933ms step_avg:88.80ms +[2025-08-22 14:42:26] [Rank 0] step:7781/10000 train_time:690933ms step_avg:88.80ms +[2025-08-22 14:42:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:42:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:42:42] [Rank 0] PRINT: step:7800/10000 val_loss:3.8624 svd_entropy: attn_qk:H=0.8872,top10E=0.08,eRank=372.7,q75/q25=52.00 attn_vo:H=0.6051,top10E=0.43,eRank=72.3,q75/q25=113.18 mlp_w1:H=0.7914,top10E=0.23,eRank=229.1,q75/q25=12.94 mlp_w2:H=0.9007,top10E=0.12,eRank=404.5,q75/q25=7.73 vo_prod:H=0.5098,top10E=0.60,eRank=37.3,q75/q25=12129.82 train_time:692901ms step_avg:88.83ms +[2025-08-22 14:42:42] [Rank 0] PRINT: step:7800/10000 val_loss:3.8624 svd_entropy: attn_qk:H=0.8872,top10E=0.08,eRank=372.7,q75/q25=52.00 attn_vo:H=0.6051,top10E=0.43,eRank=72.3,q75/q25=113.18 mlp_w1:H=0.7914,top10E=0.23,eRank=229.1,q75/q25=12.94 mlp_w2:H=0.9007,top10E=0.12,eRank=404.5,q75/q25=7.73 vo_prod:H=0.5098,top10E=0.60,eRank=37.3,q75/q25=12129.82 train_time:692901ms step_avg:88.83ms +[2025-08-22 14:42:42] [Rank 0] step:7801/10000 train_time:692915ms step_avg:88.82ms +[2025-08-22 14:42:42] [Rank 0] step:7801/10000 train_time:692915ms step_avg:88.82ms +[2025-08-22 14:42:44] [Rank 0] step:7821/10000 train_time:694703ms step_avg:88.83ms +[2025-08-22 14:42:44] [Rank 0] step:7821/10000 train_time:694703ms step_avg:88.83ms +[2025-08-22 14:42:45] [Rank 0] step:7841/10000 train_time:696560ms step_avg:88.84ms +[2025-08-22 14:42:45] [Rank 0] step:7841/10000 train_time:696560ms step_avg:88.84ms +[2025-08-22 14:42:47] [Rank 0] step:7861/10000 train_time:698426ms step_avg:88.85ms +[2025-08-22 14:42:47] [Rank 0] step:7861/10000 train_time:698426ms step_avg:88.85ms +[2025-08-22 14:42:49] [Rank 0] step:7881/10000 train_time:700292ms step_avg:88.86ms +[2025-08-22 14:42:49] [Rank 0] step:7881/10000 train_time:700292ms step_avg:88.86ms +[2025-08-22 14:42:51] [Rank 0] step:7901/10000 train_time:702151ms step_avg:88.87ms +[2025-08-22 14:42:51] [Rank 0] step:7901/10000 train_time:702151ms step_avg:88.87ms +[2025-08-22 14:42:53] [Rank 0] step:7921/10000 train_time:704020ms step_avg:88.88ms +[2025-08-22 14:42:53] [Rank 0] step:7921/10000 train_time:704020ms step_avg:88.88ms +[2025-08-22 14:42:55] [Rank 0] step:7941/10000 train_time:705890ms step_avg:88.89ms +[2025-08-22 14:42:55] [Rank 0] step:7941/10000 train_time:705890ms step_avg:88.89ms +[2025-08-22 14:42:57] [Rank 0] step:7961/10000 train_time:707760ms step_avg:88.90ms +[2025-08-22 14:42:57] [Rank 0] step:7961/10000 train_time:707760ms step_avg:88.90ms +[2025-08-22 14:42:59] [Rank 0] step:7981/10000 train_time:709618ms step_avg:88.91ms +[2025-08-22 14:42:59] [Rank 0] step:7981/10000 train_time:709618ms step_avg:88.91ms +[2025-08-22 14:43:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:43:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:43:14] [Rank 0] PRINT: step:8000/10000 val_loss:3.8411 svd_entropy: attn_qk:H=0.8874,top10E=0.08,eRank=373.1,q75/q25=52.06 attn_vo:H=0.6064,top10E=0.42,eRank=73.0,q75/q25=113.08 mlp_w1:H=0.7920,top10E=0.23,eRank=230.1,q75/q25=12.90 mlp_w2:H=0.9010,top10E=0.12,eRank=405.5,q75/q25=7.68 vo_prod:H=0.5110,top10E=0.60,eRank=37.6,q75/q25=12090.17 train_time:711578ms step_avg:88.95ms +[2025-08-22 14:43:14] [Rank 0] PRINT: step:8000/10000 val_loss:3.8411 svd_entropy: attn_qk:H=0.8874,top10E=0.08,eRank=373.1,q75/q25=52.06 attn_vo:H=0.6064,top10E=0.42,eRank=73.0,q75/q25=113.08 mlp_w1:H=0.7920,top10E=0.23,eRank=230.1,q75/q25=12.90 mlp_w2:H=0.9010,top10E=0.12,eRank=405.5,q75/q25=7.68 vo_prod:H=0.5110,top10E=0.60,eRank=37.6,q75/q25=12090.17 train_time:711578ms step_avg:88.95ms +[2025-08-22 14:43:14] [Rank 0] step:8001/10000 train_time:711594ms step_avg:88.94ms +[2025-08-22 14:43:14] [Rank 0] step:8001/10000 train_time:711594ms step_avg:88.94ms +[2025-08-22 14:43:16] [Rank 0] step:8021/10000 train_time:713377ms step_avg:88.94ms +[2025-08-22 14:43:16] [Rank 0] step:8021/10000 train_time:713377ms step_avg:88.94ms +[2025-08-22 14:43:18] [Rank 0] step:8041/10000 train_time:715247ms step_avg:88.95ms +[2025-08-22 14:43:18] [Rank 0] step:8041/10000 train_time:715247ms step_avg:88.95ms +[2025-08-22 14:43:20] [Rank 0] step:8061/10000 train_time:717110ms step_avg:88.96ms +[2025-08-22 14:43:20] [Rank 0] step:8061/10000 train_time:717110ms step_avg:88.96ms +[2025-08-22 14:43:22] [Rank 0] step:8081/10000 train_time:718965ms step_avg:88.97ms +[2025-08-22 14:43:22] [Rank 0] step:8081/10000 train_time:718965ms step_avg:88.97ms +[2025-08-22 14:43:24] [Rank 0] step:8101/10000 train_time:720835ms step_avg:88.98ms +[2025-08-22 14:43:24] [Rank 0] step:8101/10000 train_time:720835ms step_avg:88.98ms +[2025-08-22 14:43:25] [Rank 0] step:8121/10000 train_time:722696ms step_avg:88.99ms +[2025-08-22 14:43:25] [Rank 0] step:8121/10000 train_time:722696ms step_avg:88.99ms +[2025-08-22 14:43:27] [Rank 0] step:8141/10000 train_time:724716ms step_avg:89.02ms +[2025-08-22 14:43:27] [Rank 0] step:8141/10000 train_time:724716ms step_avg:89.02ms +[2025-08-22 14:43:29] [Rank 0] step:8161/10000 train_time:726595ms step_avg:89.03ms +[2025-08-22 14:43:29] [Rank 0] step:8161/10000 train_time:726595ms step_avg:89.03ms +[2025-08-22 14:43:31] [Rank 0] step:8181/10000 train_time:728490ms step_avg:89.05ms +[2025-08-22 14:43:31] [Rank 0] step:8181/10000 train_time:728490ms step_avg:89.05ms +[2025-08-22 14:43:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:43:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:43:47] [Rank 0] PRINT: step:8200/10000 val_loss:3.8333 svd_entropy: attn_qk:H=0.8876,top10E=0.08,eRank=373.6,q75/q25=52.58 attn_vo:H=0.6076,top10E=0.42,eRank=73.6,q75/q25=113.26 mlp_w1:H=0.7926,top10E=0.22,eRank=231.1,q75/q25=12.88 mlp_w2:H=0.9014,top10E=0.12,eRank=406.4,q75/q25=7.66 vo_prod:H=0.5124,top10E=0.59,eRank=38.0,q75/q25=12020.31 train_time:730505ms step_avg:89.09ms +[2025-08-22 14:43:47] [Rank 0] PRINT: step:8200/10000 val_loss:3.8333 svd_entropy: attn_qk:H=0.8876,top10E=0.08,eRank=373.6,q75/q25=52.58 attn_vo:H=0.6076,top10E=0.42,eRank=73.6,q75/q25=113.26 mlp_w1:H=0.7926,top10E=0.22,eRank=231.1,q75/q25=12.88 mlp_w2:H=0.9014,top10E=0.12,eRank=406.4,q75/q25=7.66 vo_prod:H=0.5124,top10E=0.59,eRank=38.0,q75/q25=12020.31 train_time:730505ms step_avg:89.09ms +[2025-08-22 14:43:47] [Rank 0] step:8201/10000 train_time:730519ms step_avg:89.08ms +[2025-08-22 14:43:47] [Rank 0] step:8201/10000 train_time:730519ms step_avg:89.08ms +[2025-08-22 14:43:49] [Rank 0] step:8221/10000 train_time:732348ms step_avg:89.08ms +[2025-08-22 14:43:49] [Rank 0] step:8221/10000 train_time:732348ms step_avg:89.08ms +[2025-08-22 14:43:51] [Rank 0] step:8241/10000 train_time:734250ms step_avg:89.10ms +[2025-08-22 14:43:51] [Rank 0] step:8241/10000 train_time:734250ms step_avg:89.10ms +[2025-08-22 14:43:53] [Rank 0] step:8261/10000 train_time:736152ms step_avg:89.11ms +[2025-08-22 14:43:53] [Rank 0] step:8261/10000 train_time:736152ms step_avg:89.11ms +[2025-08-22 14:43:55] [Rank 0] step:8281/10000 train_time:738050ms step_avg:89.13ms +[2025-08-22 14:43:55] [Rank 0] step:8281/10000 train_time:738050ms step_avg:89.13ms +[2025-08-22 14:43:56] [Rank 0] step:8301/10000 train_time:739944ms step_avg:89.14ms +[2025-08-22 14:43:56] [Rank 0] step:8301/10000 train_time:739944ms step_avg:89.14ms +[2025-08-22 14:43:58] [Rank 0] step:8321/10000 train_time:741833ms step_avg:89.15ms +[2025-08-22 14:43:58] [Rank 0] step:8321/10000 train_time:741833ms step_avg:89.15ms +[2025-08-22 14:44:00] [Rank 0] step:8341/10000 train_time:743733ms step_avg:89.17ms +[2025-08-22 14:44:00] [Rank 0] step:8341/10000 train_time:743733ms step_avg:89.17ms +[2025-08-22 14:44:02] [Rank 0] step:8361/10000 train_time:745631ms step_avg:89.18ms +[2025-08-22 14:44:02] [Rank 0] step:8361/10000 train_time:745631ms step_avg:89.18ms +[2025-08-22 14:44:04] [Rank 0] step:8381/10000 train_time:747527ms step_avg:89.19ms +[2025-08-22 14:44:04] [Rank 0] step:8381/10000 train_time:747527ms step_avg:89.19ms +[2025-08-22 14:44:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:44:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:44:20] [Rank 0] PRINT: step:8400/10000 val_loss:3.8174 svd_entropy: attn_qk:H=0.8878,top10E=0.08,eRank=373.9,q75/q25=52.15 attn_vo:H=0.6087,top10E=0.42,eRank=74.1,q75/q25=113.60 mlp_w1:H=0.7932,top10E=0.22,eRank=232.0,q75/q25=12.85 mlp_w2:H=0.9017,top10E=0.12,eRank=407.2,q75/q25=7.62 vo_prod:H=0.5134,top10E=0.59,eRank=38.2,q75/q25=11906.49 train_time:749515ms step_avg:89.23ms +[2025-08-22 14:44:20] [Rank 0] PRINT: step:8400/10000 val_loss:3.8174 svd_entropy: attn_qk:H=0.8878,top10E=0.08,eRank=373.9,q75/q25=52.15 attn_vo:H=0.6087,top10E=0.42,eRank=74.1,q75/q25=113.60 mlp_w1:H=0.7932,top10E=0.22,eRank=232.0,q75/q25=12.85 mlp_w2:H=0.9017,top10E=0.12,eRank=407.2,q75/q25=7.62 vo_prod:H=0.5134,top10E=0.59,eRank=38.2,q75/q25=11906.49 train_time:749515ms step_avg:89.23ms +[2025-08-22 14:44:20] [Rank 0] step:8401/10000 train_time:749530ms step_avg:89.22ms +[2025-08-22 14:44:20] [Rank 0] step:8401/10000 train_time:749530ms step_avg:89.22ms +[2025-08-22 14:44:22] [Rank 0] step:8421/10000 train_time:751336ms step_avg:89.22ms +[2025-08-22 14:44:22] [Rank 0] step:8421/10000 train_time:751336ms step_avg:89.22ms +[2025-08-22 14:44:24] [Rank 0] step:8441/10000 train_time:753222ms step_avg:89.23ms +[2025-08-22 14:44:24] [Rank 0] step:8441/10000 train_time:753222ms step_avg:89.23ms +[2025-08-22 14:44:26] [Rank 0] step:8461/10000 train_time:755112ms step_avg:89.25ms +[2025-08-22 14:44:26] [Rank 0] step:8461/10000 train_time:755112ms step_avg:89.25ms +[2025-08-22 14:44:27] [Rank 0] step:8481/10000 train_time:757008ms step_avg:89.26ms +[2025-08-22 14:44:27] [Rank 0] step:8481/10000 train_time:757008ms step_avg:89.26ms +[2025-08-22 14:44:29] [Rank 0] step:8501/10000 train_time:758921ms step_avg:89.27ms +[2025-08-22 14:44:29] [Rank 0] step:8501/10000 train_time:758921ms step_avg:89.27ms +[2025-08-22 14:44:31] [Rank 0] step:8521/10000 train_time:760819ms step_avg:89.29ms +[2025-08-22 14:44:31] [Rank 0] step:8521/10000 train_time:760819ms step_avg:89.29ms +[2025-08-22 14:44:33] [Rank 0] step:8541/10000 train_time:762726ms step_avg:89.30ms +[2025-08-22 14:44:33] [Rank 0] step:8541/10000 train_time:762726ms step_avg:89.30ms +[2025-08-22 14:44:35] [Rank 0] step:8561/10000 train_time:764619ms step_avg:89.31ms +[2025-08-22 14:44:35] [Rank 0] step:8561/10000 train_time:764619ms step_avg:89.31ms +[2025-08-22 14:44:37] [Rank 0] step:8581/10000 train_time:766518ms step_avg:89.33ms +[2025-08-22 14:44:37] [Rank 0] step:8581/10000 train_time:766518ms step_avg:89.33ms +[2025-08-22 14:44:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:44:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:44:52] [Rank 0] PRINT: step:8600/10000 val_loss:3.8067 svd_entropy: attn_qk:H=0.8879,top10E=0.08,eRank=374.1,q75/q25=52.18 attn_vo:H=0.6096,top10E=0.42,eRank=74.6,q75/q25=111.86 mlp_w1:H=0.7936,top10E=0.22,eRank=232.7,q75/q25=12.79 mlp_w2:H=0.9020,top10E=0.12,eRank=407.9,q75/q25=7.61 vo_prod:H=0.5142,top10E=0.59,eRank=38.5,q75/q25=11368.81 train_time:768500ms step_avg:89.36ms +[2025-08-22 14:44:52] [Rank 0] PRINT: step:8600/10000 val_loss:3.8067 svd_entropy: attn_qk:H=0.8879,top10E=0.08,eRank=374.1,q75/q25=52.18 attn_vo:H=0.6096,top10E=0.42,eRank=74.6,q75/q25=111.86 mlp_w1:H=0.7936,top10E=0.22,eRank=232.7,q75/q25=12.79 mlp_w2:H=0.9020,top10E=0.12,eRank=407.9,q75/q25=7.61 vo_prod:H=0.5142,top10E=0.59,eRank=38.5,q75/q25=11368.81 train_time:768500ms step_avg:89.36ms +[2025-08-22 14:44:52] [Rank 0] step:8601/10000 train_time:768515ms step_avg:89.35ms +[2025-08-22 14:44:52] [Rank 0] step:8601/10000 train_time:768515ms step_avg:89.35ms +[2025-08-22 14:44:54] [Rank 0] step:8621/10000 train_time:770314ms step_avg:89.35ms +[2025-08-22 14:44:54] [Rank 0] step:8621/10000 train_time:770314ms step_avg:89.35ms +[2025-08-22 14:44:56] [Rank 0] step:8641/10000 train_time:772199ms step_avg:89.36ms +[2025-08-22 14:44:56] [Rank 0] step:8641/10000 train_time:772199ms step_avg:89.36ms +[2025-08-22 14:44:58] [Rank 0] step:8661/10000 train_time:774092ms step_avg:89.38ms +[2025-08-22 14:44:58] [Rank 0] step:8661/10000 train_time:774092ms step_avg:89.38ms +[2025-08-22 14:45:00] [Rank 0] step:8681/10000 train_time:775983ms step_avg:89.39ms +[2025-08-22 14:45:00] [Rank 0] step:8681/10000 train_time:775983ms step_avg:89.39ms +[2025-08-22 14:45:02] [Rank 0] step:8701/10000 train_time:777869ms step_avg:89.40ms +[2025-08-22 14:45:02] [Rank 0] step:8701/10000 train_time:777869ms step_avg:89.40ms +[2025-08-22 14:45:04] [Rank 0] step:8721/10000 train_time:779767ms step_avg:89.41ms +[2025-08-22 14:45:04] [Rank 0] step:8721/10000 train_time:779767ms step_avg:89.41ms +[2025-08-22 14:45:06] [Rank 0] step:8741/10000 train_time:781651ms step_avg:89.42ms +[2025-08-22 14:45:06] [Rank 0] step:8741/10000 train_time:781651ms step_avg:89.42ms +[2025-08-22 14:45:08] [Rank 0] step:8761/10000 train_time:783542ms step_avg:89.44ms +[2025-08-22 14:45:08] [Rank 0] step:8761/10000 train_time:783542ms step_avg:89.44ms +[2025-08-22 14:45:09] [Rank 0] step:8781/10000 train_time:785437ms step_avg:89.45ms +[2025-08-22 14:45:09] [Rank 0] step:8781/10000 train_time:785437ms step_avg:89.45ms +[2025-08-22 14:45:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:45:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:45:25] [Rank 0] PRINT: step:8800/10000 val_loss:3.7926 svd_entropy: attn_qk:H=0.8880,top10E=0.08,eRank=374.3,q75/q25=51.99 attn_vo:H=0.6105,top10E=0.42,eRank=75.0,q75/q25=112.05 mlp_w1:H=0.7940,top10E=0.22,eRank=233.3,q75/q25=12.77 mlp_w2:H=0.9023,top10E=0.12,eRank=408.6,q75/q25=7.60 vo_prod:H=0.5151,top10E=0.59,eRank=38.7,q75/q25=11328.44 train_time:787423ms step_avg:89.48ms +[2025-08-22 14:45:25] [Rank 0] PRINT: step:8800/10000 val_loss:3.7926 svd_entropy: attn_qk:H=0.8880,top10E=0.08,eRank=374.3,q75/q25=51.99 attn_vo:H=0.6105,top10E=0.42,eRank=75.0,q75/q25=112.05 mlp_w1:H=0.7940,top10E=0.22,eRank=233.3,q75/q25=12.77 mlp_w2:H=0.9023,top10E=0.12,eRank=408.6,q75/q25=7.60 vo_prod:H=0.5151,top10E=0.59,eRank=38.7,q75/q25=11328.44 train_time:787423ms step_avg:89.48ms +[2025-08-22 14:45:25] [Rank 0] step:8801/10000 train_time:787439ms step_avg:89.47ms +[2025-08-22 14:45:25] [Rank 0] step:8801/10000 train_time:787439ms step_avg:89.47ms +[2025-08-22 14:45:27] [Rank 0] step:8821/10000 train_time:789245ms step_avg:89.47ms +[2025-08-22 14:45:27] [Rank 0] step:8821/10000 train_time:789245ms step_avg:89.47ms +[2025-08-22 14:45:29] [Rank 0] step:8841/10000 train_time:791153ms step_avg:89.49ms +[2025-08-22 14:45:29] [Rank 0] step:8841/10000 train_time:791153ms step_avg:89.49ms +[2025-08-22 14:45:31] [Rank 0] step:8861/10000 train_time:793038ms step_avg:89.50ms +[2025-08-22 14:45:31] [Rank 0] step:8861/10000 train_time:793038ms step_avg:89.50ms +[2025-08-22 14:45:33] [Rank 0] step:8881/10000 train_time:794928ms step_avg:89.51ms +[2025-08-22 14:45:33] [Rank 0] step:8881/10000 train_time:794928ms step_avg:89.51ms +[2025-08-22 14:45:35] [Rank 0] step:8901/10000 train_time:796819ms step_avg:89.52ms +[2025-08-22 14:45:35] [Rank 0] step:8901/10000 train_time:796819ms step_avg:89.52ms +[2025-08-22 14:45:37] [Rank 0] step:8921/10000 train_time:798723ms step_avg:89.53ms +[2025-08-22 14:45:37] [Rank 0] step:8921/10000 train_time:798723ms step_avg:89.53ms +[2025-08-22 14:45:38] [Rank 0] step:8941/10000 train_time:800623ms step_avg:89.55ms +[2025-08-22 14:45:38] [Rank 0] step:8941/10000 train_time:800623ms step_avg:89.55ms +[2025-08-22 14:45:40] [Rank 0] step:8961/10000 train_time:802514ms step_avg:89.56ms +[2025-08-22 14:45:40] [Rank 0] step:8961/10000 train_time:802514ms step_avg:89.56ms +[2025-08-22 14:45:42] [Rank 0] step:8981/10000 train_time:804406ms step_avg:89.57ms +[2025-08-22 14:45:42] [Rank 0] step:8981/10000 train_time:804406ms step_avg:89.57ms +[2025-08-22 14:45:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:45:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:45:58] [Rank 0] PRINT: step:9000/10000 val_loss:3.7812 svd_entropy: attn_qk:H=0.8881,top10E=0.08,eRank=374.5,q75/q25=52.08 attn_vo:H=0.6113,top10E=0.42,eRank=75.4,q75/q25=112.51 mlp_w1:H=0.7944,top10E=0.22,eRank=234.0,q75/q25=12.75 mlp_w2:H=0.9025,top10E=0.11,eRank=409.2,q75/q25=7.57 vo_prod:H=0.5159,top10E=0.59,eRank=38.9,q75/q25=11411.45 train_time:806392ms step_avg:89.60ms +[2025-08-22 14:45:58] [Rank 0] PRINT: step:9000/10000 val_loss:3.7812 svd_entropy: attn_qk:H=0.8881,top10E=0.08,eRank=374.5,q75/q25=52.08 attn_vo:H=0.6113,top10E=0.42,eRank=75.4,q75/q25=112.51 mlp_w1:H=0.7944,top10E=0.22,eRank=234.0,q75/q25=12.75 mlp_w2:H=0.9025,top10E=0.11,eRank=409.2,q75/q25=7.57 vo_prod:H=0.5159,top10E=0.59,eRank=38.9,q75/q25=11411.45 train_time:806392ms step_avg:89.60ms +[2025-08-22 14:45:58] [Rank 0] step:9001/10000 train_time:806407ms step_avg:89.59ms +[2025-08-22 14:45:58] [Rank 0] step:9001/10000 train_time:806407ms step_avg:89.59ms +[2025-08-22 14:46:00] [Rank 0] step:9021/10000 train_time:808213ms step_avg:89.59ms +[2025-08-22 14:46:00] [Rank 0] step:9021/10000 train_time:808213ms step_avg:89.59ms +[2025-08-22 14:46:02] [Rank 0] step:9041/10000 train_time:810105ms step_avg:89.60ms +[2025-08-22 14:46:02] [Rank 0] step:9041/10000 train_time:810105ms step_avg:89.60ms +[2025-08-22 14:46:03] [Rank 0] step:9061/10000 train_time:812005ms step_avg:89.62ms +[2025-08-22 14:46:03] [Rank 0] step:9061/10000 train_time:812005ms step_avg:89.62ms +[2025-08-22 14:46:05] [Rank 0] step:9081/10000 train_time:813908ms step_avg:89.63ms +[2025-08-22 14:46:05] [Rank 0] step:9081/10000 train_time:813908ms step_avg:89.63ms +[2025-08-22 14:46:07] [Rank 0] step:9101/10000 train_time:815823ms step_avg:89.64ms +[2025-08-22 14:46:07] [Rank 0] step:9101/10000 train_time:815823ms step_avg:89.64ms +[2025-08-22 14:46:09] [Rank 0] step:9121/10000 train_time:817725ms step_avg:89.65ms +[2025-08-22 14:46:09] [Rank 0] step:9121/10000 train_time:817725ms step_avg:89.65ms +[2025-08-22 14:46:11] [Rank 0] step:9141/10000 train_time:819610ms step_avg:89.66ms +[2025-08-22 14:46:11] [Rank 0] step:9141/10000 train_time:819610ms step_avg:89.66ms +[2025-08-22 14:46:13] [Rank 0] step:9161/10000 train_time:821502ms step_avg:89.67ms +[2025-08-22 14:46:13] [Rank 0] step:9161/10000 train_time:821502ms step_avg:89.67ms +[2025-08-22 14:46:15] [Rank 0] step:9181/10000 train_time:823432ms step_avg:89.69ms +[2025-08-22 14:46:15] [Rank 0] step:9181/10000 train_time:823432ms step_avg:89.69ms +[2025-08-22 14:46:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:46:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:46:30] [Rank 0] PRINT: step:9200/10000 val_loss:3.7724 svd_entropy: attn_qk:H=0.8881,top10E=0.08,eRank=374.7,q75/q25=51.96 attn_vo:H=0.6120,top10E=0.41,eRank=75.7,q75/q25=111.92 mlp_w1:H=0.7948,top10E=0.22,eRank=234.5,q75/q25=12.74 mlp_w2:H=0.9027,top10E=0.11,eRank=409.8,q75/q25=7.56 vo_prod:H=0.5167,top10E=0.59,eRank=39.1,q75/q25=11309.72 train_time:825419ms step_avg:89.72ms +[2025-08-22 14:46:30] [Rank 0] PRINT: step:9200/10000 val_loss:3.7724 svd_entropy: attn_qk:H=0.8881,top10E=0.08,eRank=374.7,q75/q25=51.96 attn_vo:H=0.6120,top10E=0.41,eRank=75.7,q75/q25=111.92 mlp_w1:H=0.7948,top10E=0.22,eRank=234.5,q75/q25=12.74 mlp_w2:H=0.9027,top10E=0.11,eRank=409.8,q75/q25=7.56 vo_prod:H=0.5167,top10E=0.59,eRank=39.1,q75/q25=11309.72 train_time:825419ms step_avg:89.72ms +[2025-08-22 14:46:31] [Rank 0] step:9201/10000 train_time:825432ms step_avg:89.71ms +[2025-08-22 14:46:31] [Rank 0] step:9201/10000 train_time:825432ms step_avg:89.71ms +[2025-08-22 14:46:32] [Rank 0] step:9221/10000 train_time:827260ms step_avg:89.71ms +[2025-08-22 14:46:32] [Rank 0] step:9221/10000 train_time:827260ms step_avg:89.71ms +[2025-08-22 14:46:34] [Rank 0] step:9241/10000 train_time:829162ms step_avg:89.73ms +[2025-08-22 14:46:34] [Rank 0] step:9241/10000 train_time:829162ms step_avg:89.73ms +[2025-08-22 14:46:36] [Rank 0] step:9261/10000 train_time:831065ms step_avg:89.74ms +[2025-08-22 14:46:36] [Rank 0] step:9261/10000 train_time:831065ms step_avg:89.74ms +[2025-08-22 14:46:38] [Rank 0] step:9281/10000 train_time:832949ms step_avg:89.75ms +[2025-08-22 14:46:38] [Rank 0] step:9281/10000 train_time:832949ms step_avg:89.75ms +[2025-08-22 14:46:40] [Rank 0] step:9301/10000 train_time:834837ms step_avg:89.76ms +[2025-08-22 14:46:40] [Rank 0] step:9301/10000 train_time:834837ms step_avg:89.76ms +[2025-08-22 14:46:42] [Rank 0] step:9321/10000 train_time:836737ms step_avg:89.77ms +[2025-08-22 14:46:42] [Rank 0] step:9321/10000 train_time:836737ms step_avg:89.77ms +[2025-08-22 14:46:44] [Rank 0] step:9341/10000 train_time:838630ms step_avg:89.78ms +[2025-08-22 14:46:44] [Rank 0] step:9341/10000 train_time:838630ms step_avg:89.78ms +[2025-08-22 14:46:46] [Rank 0] step:9361/10000 train_time:840532ms step_avg:89.79ms +[2025-08-22 14:46:46] [Rank 0] step:9361/10000 train_time:840532ms step_avg:89.79ms +[2025-08-22 14:46:48] [Rank 0] step:9381/10000 train_time:842443ms step_avg:89.80ms +[2025-08-22 14:46:48] [Rank 0] step:9381/10000 train_time:842443ms step_avg:89.80ms +[2025-08-22 14:46:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:46:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:47:03] [Rank 0] PRINT: step:9400/10000 val_loss:3.7637 svd_entropy: attn_qk:H=0.8882,top10E=0.08,eRank=374.8,q75/q25=51.85 attn_vo:H=0.6125,top10E=0.41,eRank=76.0,q75/q25=112.41 mlp_w1:H=0.7950,top10E=0.22,eRank=234.9,q75/q25=12.71 mlp_w2:H=0.9029,top10E=0.11,eRank=410.2,q75/q25=7.55 vo_prod:H=0.5172,top10E=0.58,eRank=39.3,q75/q25=11475.83 train_time:844440ms step_avg:89.83ms +[2025-08-22 14:47:03] [Rank 0] PRINT: step:9400/10000 val_loss:3.7637 svd_entropy: attn_qk:H=0.8882,top10E=0.08,eRank=374.8,q75/q25=51.85 attn_vo:H=0.6125,top10E=0.41,eRank=76.0,q75/q25=112.41 mlp_w1:H=0.7950,top10E=0.22,eRank=234.9,q75/q25=12.71 mlp_w2:H=0.9029,top10E=0.11,eRank=410.2,q75/q25=7.55 vo_prod:H=0.5172,top10E=0.58,eRank=39.3,q75/q25=11475.83 train_time:844440ms step_avg:89.83ms +[2025-08-22 14:47:03] [Rank 0] step:9401/10000 train_time:844456ms step_avg:89.83ms +[2025-08-22 14:47:03] [Rank 0] step:9401/10000 train_time:844456ms step_avg:89.83ms +[2025-08-22 14:47:05] [Rank 0] step:9421/10000 train_time:846252ms step_avg:89.83ms +[2025-08-22 14:47:05] [Rank 0] step:9421/10000 train_time:846252ms step_avg:89.83ms +[2025-08-22 14:47:07] [Rank 0] step:9441/10000 train_time:848146ms step_avg:89.84ms +[2025-08-22 14:47:07] [Rank 0] step:9441/10000 train_time:848146ms step_avg:89.84ms +[2025-08-22 14:47:09] [Rank 0] step:9461/10000 train_time:850046ms step_avg:89.85ms +[2025-08-22 14:47:09] [Rank 0] step:9461/10000 train_time:850046ms step_avg:89.85ms +[2025-08-22 14:47:11] [Rank 0] step:9481/10000 train_time:851944ms step_avg:89.86ms +[2025-08-22 14:47:11] [Rank 0] step:9481/10000 train_time:851944ms step_avg:89.86ms +[2025-08-22 14:47:13] [Rank 0] step:9501/10000 train_time:853853ms step_avg:89.87ms +[2025-08-22 14:47:13] [Rank 0] step:9501/10000 train_time:853853ms step_avg:89.87ms +[2025-08-22 14:47:15] [Rank 0] step:9521/10000 train_time:855743ms step_avg:89.88ms +[2025-08-22 14:47:15] [Rank 0] step:9521/10000 train_time:855743ms step_avg:89.88ms +[2025-08-22 14:47:16] [Rank 0] step:9541/10000 train_time:857639ms step_avg:89.89ms +[2025-08-22 14:47:16] [Rank 0] step:9541/10000 train_time:857639ms step_avg:89.89ms +[2025-08-22 14:47:18] [Rank 0] step:9561/10000 train_time:859529ms step_avg:89.90ms +[2025-08-22 14:47:18] [Rank 0] step:9561/10000 train_time:859529ms step_avg:89.90ms +[2025-08-22 14:47:20] [Rank 0] step:9581/10000 train_time:861429ms step_avg:89.91ms +[2025-08-22 14:47:20] [Rank 0] step:9581/10000 train_time:861429ms step_avg:89.91ms +[2025-08-22 14:47:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:47:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:47:36] [Rank 0] PRINT: step:9600/10000 val_loss:3.7544 svd_entropy: attn_qk:H=0.8883,top10E=0.08,eRank=374.9,q75/q25=52.07 attn_vo:H=0.6130,top10E=0.41,eRank=76.2,q75/q25=112.73 mlp_w1:H=0.7952,top10E=0.22,eRank=235.3,q75/q25=12.69 mlp_w2:H=0.9031,top10E=0.11,eRank=410.6,q75/q25=7.53 vo_prod:H=0.5176,top10E=0.58,eRank=39.4,q75/q25=11519.22 train_time:863435ms step_avg:89.94ms +[2025-08-22 14:47:36] [Rank 0] PRINT: step:9600/10000 val_loss:3.7544 svd_entropy: attn_qk:H=0.8883,top10E=0.08,eRank=374.9,q75/q25=52.07 attn_vo:H=0.6130,top10E=0.41,eRank=76.2,q75/q25=112.73 mlp_w1:H=0.7952,top10E=0.22,eRank=235.3,q75/q25=12.69 mlp_w2:H=0.9031,top10E=0.11,eRank=410.6,q75/q25=7.53 vo_prod:H=0.5176,top10E=0.58,eRank=39.4,q75/q25=11519.22 train_time:863435ms step_avg:89.94ms +[2025-08-22 14:47:36] [Rank 0] step:9601/10000 train_time:863449ms step_avg:89.93ms +[2025-08-22 14:47:36] [Rank 0] step:9601/10000 train_time:863449ms step_avg:89.93ms +[2025-08-22 14:47:38] [Rank 0] step:9621/10000 train_time:865263ms step_avg:89.93ms +[2025-08-22 14:47:38] [Rank 0] step:9621/10000 train_time:865263ms step_avg:89.93ms +[2025-08-22 14:47:40] [Rank 0] step:9641/10000 train_time:867161ms step_avg:89.95ms +[2025-08-22 14:47:40] [Rank 0] step:9641/10000 train_time:867161ms step_avg:89.95ms +[2025-08-22 14:47:42] [Rank 0] step:9661/10000 train_time:869084ms step_avg:89.96ms +[2025-08-22 14:47:42] [Rank 0] step:9661/10000 train_time:869084ms step_avg:89.96ms +[2025-08-22 14:47:44] [Rank 0] step:9681/10000 train_time:871001ms step_avg:89.97ms +[2025-08-22 14:47:44] [Rank 0] step:9681/10000 train_time:871001ms step_avg:89.97ms +[2025-08-22 14:47:46] [Rank 0] step:9701/10000 train_time:872934ms step_avg:89.98ms +[2025-08-22 14:47:46] [Rank 0] step:9701/10000 train_time:872934ms step_avg:89.98ms +[2025-08-22 14:47:47] [Rank 0] step:9721/10000 train_time:874848ms step_avg:90.00ms +[2025-08-22 14:47:47] [Rank 0] step:9721/10000 train_time:874848ms step_avg:90.00ms +[2025-08-22 14:47:49] [Rank 0] step:9741/10000 train_time:876792ms step_avg:90.01ms +[2025-08-22 14:47:49] [Rank 0] step:9741/10000 train_time:876792ms step_avg:90.01ms +[2025-08-22 14:47:51] [Rank 0] step:9761/10000 train_time:878720ms step_avg:90.02ms +[2025-08-22 14:47:51] [Rank 0] step:9761/10000 train_time:878720ms step_avg:90.02ms +[2025-08-22 14:47:53] [Rank 0] step:9781/10000 train_time:880653ms step_avg:90.04ms +[2025-08-22 14:47:53] [Rank 0] step:9781/10000 train_time:880653ms step_avg:90.04ms +[2025-08-22 14:47:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:47:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:48:09] [Rank 0] PRINT: step:9800/10000 val_loss:3.7462 svd_entropy: attn_qk:H=0.8883,top10E=0.08,eRank=375.0,q75/q25=51.80 attn_vo:H=0.6133,top10E=0.41,eRank=76.4,q75/q25=112.76 mlp_w1:H=0.7954,top10E=0.22,eRank=235.6,q75/q25=12.67 mlp_w2:H=0.9032,top10E=0.11,eRank=410.9,q75/q25=7.52 vo_prod:H=0.5180,top10E=0.58,eRank=39.5,q75/q25=11568.61 train_time:882693ms step_avg:90.07ms +[2025-08-22 14:48:09] [Rank 0] PRINT: step:9800/10000 val_loss:3.7462 svd_entropy: attn_qk:H=0.8883,top10E=0.08,eRank=375.0,q75/q25=51.80 attn_vo:H=0.6133,top10E=0.41,eRank=76.4,q75/q25=112.76 mlp_w1:H=0.7954,top10E=0.22,eRank=235.6,q75/q25=12.67 mlp_w2:H=0.9032,top10E=0.11,eRank=410.9,q75/q25=7.52 vo_prod:H=0.5180,top10E=0.58,eRank=39.5,q75/q25=11568.61 train_time:882693ms step_avg:90.07ms +[2025-08-22 14:48:09] [Rank 0] step:9801/10000 train_time:882708ms step_avg:90.06ms +[2025-08-22 14:48:09] [Rank 0] step:9801/10000 train_time:882708ms step_avg:90.06ms +[2025-08-22 14:48:11] [Rank 0] step:9821/10000 train_time:884543ms step_avg:90.07ms +[2025-08-22 14:48:11] [Rank 0] step:9821/10000 train_time:884543ms step_avg:90.07ms +[2025-08-22 14:48:13] [Rank 0] step:9841/10000 train_time:886476ms step_avg:90.08ms +[2025-08-22 14:48:13] [Rank 0] step:9841/10000 train_time:886476ms step_avg:90.08ms +[2025-08-22 14:48:15] [Rank 0] step:9861/10000 train_time:888393ms step_avg:90.09ms +[2025-08-22 14:48:15] [Rank 0] step:9861/10000 train_time:888393ms step_avg:90.09ms +[2025-08-22 14:48:17] [Rank 0] step:9881/10000 train_time:890310ms step_avg:90.10ms +[2025-08-22 14:48:17] [Rank 0] step:9881/10000 train_time:890310ms step_avg:90.10ms +[2025-08-22 14:48:18] [Rank 0] step:9901/10000 train_time:892243ms step_avg:90.12ms +[2025-08-22 14:48:18] [Rank 0] step:9901/10000 train_time:892243ms step_avg:90.12ms +[2025-08-22 14:48:20] [Rank 0] step:9921/10000 train_time:894172ms step_avg:90.13ms +[2025-08-22 14:48:20] [Rank 0] step:9921/10000 train_time:894172ms step_avg:90.13ms +[2025-08-22 14:48:22] [Rank 0] step:9941/10000 train_time:896101ms step_avg:90.14ms +[2025-08-22 14:48:22] [Rank 0] step:9941/10000 train_time:896101ms step_avg:90.14ms +[2025-08-22 14:48:24] [Rank 0] step:9961/10000 train_time:898027ms step_avg:90.15ms +[2025-08-22 14:48:24] [Rank 0] step:9961/10000 train_time:898027ms step_avg:90.15ms +[2025-08-22 14:48:26] [Rank 0] step:9981/10000 train_time:899956ms step_avg:90.17ms +[2025-08-22 14:48:26] [Rank 0] step:9981/10000 train_time:899956ms step_avg:90.17ms +[2025-08-22 14:48:28] [Rank 0] step:10000/10000 train_time:901792ms step_avg:90.18ms +[2025-08-22 14:48:28] [Rank 0] step:10000/10000 train_time:901792ms step_avg:90.18ms +[2025-08-22 14:48:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:48:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:48:41] [Rank 0] PRINT: step:10000/10000 val_loss:3.7374 svd_entropy: attn_qk:H=0.8883,top10E=0.08,eRank=375.0,q75/q25=51.71 attn_vo:H=0.6136,top10E=0.41,eRank=76.5,q75/q25=112.75 mlp_w1:H=0.7955,top10E=0.22,eRank=235.8,q75/q25=12.66 mlp_w2:H=0.9033,top10E=0.11,eRank=411.2,q75/q25=7.52 vo_prod:H=0.5182,top10E=0.58,eRank=39.6,q75/q25=11525.56 train_time:901993ms step_avg:90.20ms +[2025-08-22 14:48:41] [Rank 0] PRINT: step:10000/10000 val_loss:3.7374 svd_entropy: attn_qk:H=0.8883,top10E=0.08,eRank=375.0,q75/q25=51.71 attn_vo:H=0.6136,top10E=0.41,eRank=76.5,q75/q25=112.75 mlp_w1:H=0.7955,top10E=0.22,eRank=235.8,q75/q25=12.66 mlp_w2:H=0.9033,top10E=0.11,eRank=411.2,q75/q25=7.52 vo_prod:H=0.5182,top10E=0.58,eRank=39.6,q75/q25=11525.56 train_time:901993ms step_avg:90.20ms +[2025-08-22 14:48:41] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 14:48:41 2025 --- +[2025-08-22 14:48:41] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 14:48:41 2025 --- +[2025-08-22 14:48:41] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15796 MiB +[2025-08-22 14:48:41] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15796 MiB diff --git a/logs_svd_gated/mode_1_param_gated_seed_43/config.json b/logs_svd_gated/mode_1_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..e126f74cbe024c35ec77c0c769bd2b501575818d --- /dev/null +++ b/logs_svd_gated/mode_1_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 1, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "cbf3d3c8-0ebf-4214-9ac5-9810106f77fd", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_1_param_gated_seed_43/training_log_cbf3d3c8-0ebf-4214-9ac5-9810106f77fd.txt b/logs_svd_gated/mode_1_param_gated_seed_43/training_log_cbf3d3c8-0ebf-4214-9ac5-9810106f77fd.txt new file mode 100644 index 0000000000000000000000000000000000000000..e7edcc0828ec39cf7f688b21e4ff5207064fb03c --- /dev/null +++ b/logs_svd_gated/mode_1_param_gated_seed_43/training_log_cbf3d3c8-0ebf-4214-9ac5-9810106f77fd.txt @@ -0,0 +1,2926 @@ +[2025-08-22 19:34:48] [Rank 0] PRINT: --- Script Start: Fri Aug 22 19:34:48 2025 --- +[2025-08-22 19:34:48] [Rank 0] PRINT: --- Script Start: Fri Aug 22 19:34:48 2025 --- +[2025-08-22 19:34:48] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=1, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 19:34:48] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=1, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 19:34:48] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 19:34:48] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 19:34:48] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 19:34:48] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 19:34:48] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_1_param_gated_seed_43 +[2025-08-22 19:34:48] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_1_param_gated_seed_43 +[2025-08-22 19:34:48] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 19:34:48] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 19:34:48] [Rank 0] PRINT: Constructing model... +[2025-08-22 19:34:48] [Rank 0] PRINT: Constructing model... +[2025-08-22 19:34:50] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 19:34:50] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 19:34:50] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 19:34:50] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 19:34:50] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 19:34:50] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 19:34:50] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-08-22 19:34:50] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 1 +[2025-08-22 19:34:50] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.05). +[2025-08-22 19:34:50] [Rank 0] PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: 0.05). +[2025-08-22 19:34:50] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 19:34:50] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 19:34:50] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 19:34:50] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 19:34:50] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 19:34:50] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 19:34:50] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 19:34:50] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 19:34:50] [Rank 0] PRINT: Starting warmup... +[2025-08-22 19:34:50] [Rank 0] PRINT: Starting warmup... +[2025-08-22 19:35:34] [Rank 0] PRINT: Warmup complete. +[2025-08-22 19:35:34] [Rank 0] PRINT: Warmup complete. +[2025-08-22 19:35:34] [Rank 0] PRINT: Starting training... +[2025-08-22 19:35:34] [Rank 0] PRINT: Starting training... +[2025-08-22 19:35:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:35:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:35:51] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 19:35:51] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 19:35:53] [Rank 0] step:21/10000 train_time:1645ms step_avg:78.32ms +[2025-08-22 19:35:53] [Rank 0] step:21/10000 train_time:1645ms step_avg:78.32ms +[2025-08-22 19:35:55] [Rank 0] step:41/10000 train_time:3309ms step_avg:80.70ms +[2025-08-22 19:35:55] [Rank 0] step:41/10000 train_time:3309ms step_avg:80.70ms +[2025-08-22 19:35:57] [Rank 0] step:61/10000 train_time:4978ms step_avg:81.61ms +[2025-08-22 19:35:57] [Rank 0] step:61/10000 train_time:4978ms step_avg:81.61ms +[2025-08-22 19:35:58] [Rank 0] step:81/10000 train_time:6649ms step_avg:82.09ms +[2025-08-22 19:35:58] [Rank 0] step:81/10000 train_time:6649ms step_avg:82.09ms +[2025-08-22 19:36:00] [Rank 0] step:101/10000 train_time:8323ms step_avg:82.41ms +[2025-08-22 19:36:00] [Rank 0] step:101/10000 train_time:8323ms step_avg:82.41ms +[2025-08-22 19:36:02] [Rank 0] step:121/10000 train_time:9997ms step_avg:82.62ms +[2025-08-22 19:36:02] [Rank 0] step:121/10000 train_time:9997ms step_avg:82.62ms +[2025-08-22 19:36:03] [Rank 0] step:141/10000 train_time:11673ms step_avg:82.78ms +[2025-08-22 19:36:03] [Rank 0] step:141/10000 train_time:11673ms step_avg:82.78ms +[2025-08-22 19:36:05] [Rank 0] step:161/10000 train_time:13348ms step_avg:82.91ms +[2025-08-22 19:36:05] [Rank 0] step:161/10000 train_time:13348ms step_avg:82.91ms +[2025-08-22 19:36:07] [Rank 0] step:181/10000 train_time:15025ms step_avg:83.01ms +[2025-08-22 19:36:07] [Rank 0] step:181/10000 train_time:15025ms step_avg:83.01ms +[2025-08-22 19:36:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:36:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:36:22] [Rank 0] PRINT: step:200/10000 val_loss:6.7645 svd_entropy: attn_qk:H=0.8324,top10E=0.23,eRank=265.1,q75/q25=10.65 attn_vo:H=0.1236,top10E=0.98,eRank=2.6,q75/q25=1725.32 mlp_w1:H=0.3127,top10E=0.92,eRank=8.2,q75/q25=4.51 mlp_w2:H=0.3736,top10E=0.85,eRank=12.3,q75/q25=6.74 vo_prod:H=0.0224,top10E=1.00,eRank=1.3,q75/q25=10426.05 train_time:16786ms step_avg:83.93ms +[2025-08-22 19:36:22] [Rank 0] PRINT: step:200/10000 val_loss:6.7645 svd_entropy: attn_qk:H=0.8324,top10E=0.23,eRank=265.1,q75/q25=10.65 attn_vo:H=0.1236,top10E=0.98,eRank=2.6,q75/q25=1725.32 mlp_w1:H=0.3127,top10E=0.92,eRank=8.2,q75/q25=4.51 mlp_w2:H=0.3736,top10E=0.85,eRank=12.3,q75/q25=6.74 vo_prod:H=0.0224,top10E=1.00,eRank=1.3,q75/q25=10426.05 train_time:16786ms step_avg:83.93ms +[2025-08-22 19:36:22] [Rank 0] step:201/10000 train_time:16802ms step_avg:83.59ms +[2025-08-22 19:36:22] [Rank 0] step:201/10000 train_time:16802ms step_avg:83.59ms +[2025-08-22 19:36:24] [Rank 0] step:221/10000 train_time:18404ms step_avg:83.28ms +[2025-08-22 19:36:24] [Rank 0] step:221/10000 train_time:18404ms step_avg:83.28ms +[2025-08-22 19:36:25] [Rank 0] step:241/10000 train_time:20074ms step_avg:83.30ms +[2025-08-22 19:36:25] [Rank 0] step:241/10000 train_time:20074ms step_avg:83.30ms +[2025-08-22 19:36:27] [Rank 0] step:261/10000 train_time:21745ms step_avg:83.32ms +[2025-08-22 19:36:27] [Rank 0] step:261/10000 train_time:21745ms step_avg:83.32ms +[2025-08-22 19:36:29] [Rank 0] step:281/10000 train_time:23418ms step_avg:83.34ms +[2025-08-22 19:36:29] [Rank 0] step:281/10000 train_time:23418ms step_avg:83.34ms +[2025-08-22 19:36:30] [Rank 0] step:301/10000 train_time:25090ms step_avg:83.35ms +[2025-08-22 19:36:30] [Rank 0] step:301/10000 train_time:25090ms step_avg:83.35ms +[2025-08-22 19:36:32] [Rank 0] step:321/10000 train_time:26761ms step_avg:83.37ms +[2025-08-22 19:36:32] [Rank 0] step:321/10000 train_time:26761ms step_avg:83.37ms +[2025-08-22 19:36:34] [Rank 0] step:341/10000 train_time:28434ms step_avg:83.38ms +[2025-08-22 19:36:34] [Rank 0] step:341/10000 train_time:28434ms step_avg:83.38ms +[2025-08-22 19:36:35] [Rank 0] step:361/10000 train_time:30105ms step_avg:83.39ms +[2025-08-22 19:36:35] [Rank 0] step:361/10000 train_time:30105ms step_avg:83.39ms +[2025-08-22 19:36:37] [Rank 0] step:381/10000 train_time:31778ms step_avg:83.41ms +[2025-08-22 19:36:37] [Rank 0] step:381/10000 train_time:31778ms step_avg:83.41ms +[2025-08-22 19:36:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:36:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:36:52] [Rank 0] PRINT: step:400/10000 val_loss:6.1015 svd_entropy: attn_qk:H=0.7972,top10E=0.20,eRank=219.9,q75/q25=12.94 attn_vo:H=0.2532,top10E=0.98,eRank=5.8,q75/q25=119.08 mlp_w1:H=0.4843,top10E=0.72,eRank=28.4,q75/q25=8.43 mlp_w2:H=0.5567,top10E=0.61,eRank=43.3,q75/q25=7.35 vo_prod:H=0.1424,top10E=1.00,eRank=2.8,q75/q25=906.20 train_time:33534ms step_avg:83.84ms +[2025-08-22 19:36:52] [Rank 0] PRINT: step:400/10000 val_loss:6.1015 svd_entropy: attn_qk:H=0.7972,top10E=0.20,eRank=219.9,q75/q25=12.94 attn_vo:H=0.2532,top10E=0.98,eRank=5.8,q75/q25=119.08 mlp_w1:H=0.4843,top10E=0.72,eRank=28.4,q75/q25=8.43 mlp_w2:H=0.5567,top10E=0.61,eRank=43.3,q75/q25=7.35 vo_prod:H=0.1424,top10E=1.00,eRank=2.8,q75/q25=906.20 train_time:33534ms step_avg:83.84ms +[2025-08-22 19:36:52] [Rank 0] step:401/10000 train_time:33549ms step_avg:83.66ms +[2025-08-22 19:36:52] [Rank 0] step:401/10000 train_time:33549ms step_avg:83.66ms +[2025-08-22 19:36:54] [Rank 0] step:421/10000 train_time:35148ms step_avg:83.49ms +[2025-08-22 19:36:54] [Rank 0] step:421/10000 train_time:35148ms step_avg:83.49ms +[2025-08-22 19:36:55] [Rank 0] step:441/10000 train_time:36813ms step_avg:83.48ms +[2025-08-22 19:36:55] [Rank 0] step:441/10000 train_time:36813ms step_avg:83.48ms +[2025-08-22 19:36:57] [Rank 0] step:461/10000 train_time:38480ms step_avg:83.47ms +[2025-08-22 19:36:57] [Rank 0] step:461/10000 train_time:38480ms step_avg:83.47ms +[2025-08-22 19:36:59] [Rank 0] step:481/10000 train_time:40148ms step_avg:83.47ms +[2025-08-22 19:36:59] [Rank 0] step:481/10000 train_time:40148ms step_avg:83.47ms +[2025-08-22 19:37:00] [Rank 0] step:501/10000 train_time:41816ms step_avg:83.46ms +[2025-08-22 19:37:00] [Rank 0] step:501/10000 train_time:41816ms step_avg:83.46ms +[2025-08-22 19:37:02] [Rank 0] step:521/10000 train_time:43483ms step_avg:83.46ms +[2025-08-22 19:37:02] [Rank 0] step:521/10000 train_time:43483ms step_avg:83.46ms +[2025-08-22 19:37:04] [Rank 0] step:541/10000 train_time:45152ms step_avg:83.46ms +[2025-08-22 19:37:04] [Rank 0] step:541/10000 train_time:45152ms step_avg:83.46ms +[2025-08-22 19:37:05] [Rank 0] step:561/10000 train_time:46821ms step_avg:83.46ms +[2025-08-22 19:37:05] [Rank 0] step:561/10000 train_time:46821ms step_avg:83.46ms +[2025-08-22 19:37:07] [Rank 0] step:581/10000 train_time:48489ms step_avg:83.46ms +[2025-08-22 19:37:07] [Rank 0] step:581/10000 train_time:48489ms step_avg:83.46ms +[2025-08-22 19:37:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:37:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:37:22] [Rank 0] PRINT: step:600/10000 val_loss:5.7279 svd_entropy: attn_qk:H=0.7987,top10E=0.17,eRank=221.6,q75/q25=23.86 attn_vo:H=0.2911,top10E=0.97,eRank=7.4,q75/q25=91.98 mlp_w1:H=0.5520,top10E=0.61,eRank=46.4,q75/q25=7.47 mlp_w2:H=0.6428,top10E=0.46,eRank=77.0,q75/q25=8.35 vo_prod:H=0.1903,top10E=1.00,eRank=3.8,q75/q25=754.39 train_time:50242ms step_avg:83.74ms +[2025-08-22 19:37:22] [Rank 0] PRINT: step:600/10000 val_loss:5.7279 svd_entropy: attn_qk:H=0.7987,top10E=0.17,eRank=221.6,q75/q25=23.86 attn_vo:H=0.2911,top10E=0.97,eRank=7.4,q75/q25=91.98 mlp_w1:H=0.5520,top10E=0.61,eRank=46.4,q75/q25=7.47 mlp_w2:H=0.6428,top10E=0.46,eRank=77.0,q75/q25=8.35 vo_prod:H=0.1903,top10E=1.00,eRank=3.8,q75/q25=754.39 train_time:50242ms step_avg:83.74ms +[2025-08-22 19:37:22] [Rank 0] step:601/10000 train_time:50257ms step_avg:83.62ms +[2025-08-22 19:37:22] [Rank 0] step:601/10000 train_time:50257ms step_avg:83.62ms +[2025-08-22 19:37:24] [Rank 0] step:621/10000 train_time:51861ms step_avg:83.51ms +[2025-08-22 19:37:24] [Rank 0] step:621/10000 train_time:51861ms step_avg:83.51ms +[2025-08-22 19:37:26] [Rank 0] step:641/10000 train_time:53526ms step_avg:83.50ms +[2025-08-22 19:37:26] [Rank 0] step:641/10000 train_time:53526ms step_avg:83.50ms +[2025-08-22 19:37:27] [Rank 0] step:661/10000 train_time:55193ms step_avg:83.50ms +[2025-08-22 19:37:27] [Rank 0] step:661/10000 train_time:55193ms step_avg:83.50ms +[2025-08-22 19:37:29] [Rank 0] step:681/10000 train_time:56859ms step_avg:83.49ms +[2025-08-22 19:37:29] [Rank 0] step:681/10000 train_time:56859ms step_avg:83.49ms +[2025-08-22 19:37:31] [Rank 0] step:701/10000 train_time:58526ms step_avg:83.49ms +[2025-08-22 19:37:31] [Rank 0] step:701/10000 train_time:58526ms step_avg:83.49ms +[2025-08-22 19:37:32] [Rank 0] step:721/10000 train_time:60194ms step_avg:83.49ms +[2025-08-22 19:37:32] [Rank 0] step:721/10000 train_time:60194ms step_avg:83.49ms +[2025-08-22 19:37:34] [Rank 0] step:741/10000 train_time:61862ms step_avg:83.48ms +[2025-08-22 19:37:34] [Rank 0] step:741/10000 train_time:61862ms step_avg:83.48ms +[2025-08-22 19:37:36] [Rank 0] step:761/10000 train_time:63543ms step_avg:83.50ms +[2025-08-22 19:37:36] [Rank 0] step:761/10000 train_time:63543ms step_avg:83.50ms +[2025-08-22 19:37:37] [Rank 0] step:781/10000 train_time:65226ms step_avg:83.52ms +[2025-08-22 19:37:37] [Rank 0] step:781/10000 train_time:65226ms step_avg:83.52ms +[2025-08-22 19:37:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:37:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:37:53] [Rank 0] PRINT: step:800/10000 val_loss:5.4880 svd_entropy: attn_qk:H=0.8062,top10E=0.16,eRank=231.0,q75/q25=38.56 attn_vo:H=0.3171,top10E=0.94,eRank=8.8,q75/q25=78.56 mlp_w1:H=0.5866,top10E=0.55,eRank=59.3,q75/q25=7.52 mlp_w2:H=0.6895,top10E=0.39,eRank=105.3,q75/q25=10.07 vo_prod:H=0.2184,top10E=1.00,eRank=4.6,q75/q25=747.64 train_time:66993ms step_avg:83.74ms +[2025-08-22 19:37:53] [Rank 0] PRINT: step:800/10000 val_loss:5.4880 svd_entropy: attn_qk:H=0.8062,top10E=0.16,eRank=231.0,q75/q25=38.56 attn_vo:H=0.3171,top10E=0.94,eRank=8.8,q75/q25=78.56 mlp_w1:H=0.5866,top10E=0.55,eRank=59.3,q75/q25=7.52 mlp_w2:H=0.6895,top10E=0.39,eRank=105.3,q75/q25=10.07 vo_prod:H=0.2184,top10E=1.00,eRank=4.6,q75/q25=747.64 train_time:66993ms step_avg:83.74ms +[2025-08-22 19:37:53] [Rank 0] step:801/10000 train_time:67007ms step_avg:83.65ms +[2025-08-22 19:37:53] [Rank 0] step:801/10000 train_time:67007ms step_avg:83.65ms +[2025-08-22 19:37:54] [Rank 0] step:821/10000 train_time:68626ms step_avg:83.59ms +[2025-08-22 19:37:54] [Rank 0] step:821/10000 train_time:68626ms step_avg:83.59ms +[2025-08-22 19:37:56] [Rank 0] step:841/10000 train_time:70306ms step_avg:83.60ms +[2025-08-22 19:37:56] [Rank 0] step:841/10000 train_time:70306ms step_avg:83.60ms +[2025-08-22 19:37:58] [Rank 0] step:861/10000 train_time:71993ms step_avg:83.62ms +[2025-08-22 19:37:58] [Rank 0] step:861/10000 train_time:71993ms step_avg:83.62ms +[2025-08-22 19:37:59] [Rank 0] step:881/10000 train_time:73675ms step_avg:83.63ms +[2025-08-22 19:37:59] [Rank 0] step:881/10000 train_time:73675ms step_avg:83.63ms +[2025-08-22 19:38:01] [Rank 0] step:901/10000 train_time:75358ms step_avg:83.64ms +[2025-08-22 19:38:01] [Rank 0] step:901/10000 train_time:75358ms step_avg:83.64ms +[2025-08-22 19:38:03] [Rank 0] step:921/10000 train_time:77043ms step_avg:83.65ms +[2025-08-22 19:38:03] [Rank 0] step:921/10000 train_time:77043ms step_avg:83.65ms +[2025-08-22 19:38:04] [Rank 0] step:941/10000 train_time:78729ms step_avg:83.66ms +[2025-08-22 19:38:04] [Rank 0] step:941/10000 train_time:78729ms step_avg:83.66ms +[2025-08-22 19:38:06] [Rank 0] step:961/10000 train_time:80414ms step_avg:83.68ms +[2025-08-22 19:38:06] [Rank 0] step:961/10000 train_time:80414ms step_avg:83.68ms +[2025-08-22 19:38:08] [Rank 0] step:981/10000 train_time:82101ms step_avg:83.69ms +[2025-08-22 19:38:08] [Rank 0] step:981/10000 train_time:82101ms step_avg:83.69ms +[2025-08-22 19:38:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:38:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:38:23] [Rank 0] PRINT: step:1000/10000 val_loss:5.3176 svd_entropy: attn_qk:H=0.8132,top10E=0.15,eRank=240.5,q75/q25=50.07 attn_vo:H=0.3386,top10E=0.92,eRank=10.4,q75/q25=72.02 mlp_w1:H=0.6087,top10E=0.51,eRank=69.0,q75/q25=7.80 mlp_w2:H=0.7219,top10E=0.34,eRank=130.2,q75/q25=11.59 vo_prod:H=0.2428,top10E=0.99,eRank=5.5,q75/q25=791.93 train_time:83873ms step_avg:83.87ms +[2025-08-22 19:38:23] [Rank 0] PRINT: step:1000/10000 val_loss:5.3176 svd_entropy: attn_qk:H=0.8132,top10E=0.15,eRank=240.5,q75/q25=50.07 attn_vo:H=0.3386,top10E=0.92,eRank=10.4,q75/q25=72.02 mlp_w1:H=0.6087,top10E=0.51,eRank=69.0,q75/q25=7.80 mlp_w2:H=0.7219,top10E=0.34,eRank=130.2,q75/q25=11.59 vo_prod:H=0.2428,top10E=0.99,eRank=5.5,q75/q25=791.93 train_time:83873ms step_avg:83.87ms +[2025-08-22 19:38:23] [Rank 0] step:1001/10000 train_time:83887ms step_avg:83.80ms +[2025-08-22 19:38:23] [Rank 0] step:1001/10000 train_time:83887ms step_avg:83.80ms +[2025-08-22 19:38:25] [Rank 0] step:1021/10000 train_time:85484ms step_avg:83.73ms +[2025-08-22 19:38:25] [Rank 0] step:1021/10000 train_time:85484ms step_avg:83.73ms +[2025-08-22 19:38:26] [Rank 0] step:1041/10000 train_time:87166ms step_avg:83.73ms +[2025-08-22 19:38:26] [Rank 0] step:1041/10000 train_time:87166ms step_avg:83.73ms +[2025-08-22 19:38:28] [Rank 0] step:1061/10000 train_time:88848ms step_avg:83.74ms +[2025-08-22 19:38:28] [Rank 0] step:1061/10000 train_time:88848ms step_avg:83.74ms +[2025-08-22 19:38:30] [Rank 0] step:1081/10000 train_time:90531ms step_avg:83.75ms +[2025-08-22 19:38:30] [Rank 0] step:1081/10000 train_time:90531ms step_avg:83.75ms +[2025-08-22 19:38:31] [Rank 0] step:1101/10000 train_time:92215ms step_avg:83.76ms +[2025-08-22 19:38:31] [Rank 0] step:1101/10000 train_time:92215ms step_avg:83.76ms +[2025-08-22 19:38:33] [Rank 0] step:1121/10000 train_time:93899ms step_avg:83.76ms +[2025-08-22 19:38:33] [Rank 0] step:1121/10000 train_time:93899ms step_avg:83.76ms +[2025-08-22 19:38:35] [Rank 0] step:1141/10000 train_time:95584ms step_avg:83.77ms +[2025-08-22 19:38:35] [Rank 0] step:1141/10000 train_time:95584ms step_avg:83.77ms +[2025-08-22 19:38:37] [Rank 0] step:1161/10000 train_time:97269ms step_avg:83.78ms +[2025-08-22 19:38:37] [Rank 0] step:1161/10000 train_time:97269ms step_avg:83.78ms +[2025-08-22 19:38:38] [Rank 0] step:1181/10000 train_time:98954ms step_avg:83.79ms +[2025-08-22 19:38:38] [Rank 0] step:1181/10000 train_time:98954ms step_avg:83.79ms +[2025-08-22 19:38:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:38:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:38:53] [Rank 0] PRINT: step:1200/10000 val_loss:5.1781 svd_entropy: attn_qk:H=0.8191,top10E=0.14,eRank=249.2,q75/q25=57.60 attn_vo:H=0.3583,top10E=0.89,eRank=12.1,q75/q25=68.97 mlp_w1:H=0.6249,top10E=0.49,eRank=77.2,q75/q25=8.26 mlp_w2:H=0.7453,top10E=0.31,eRank=152.1,q75/q25=12.77 vo_prod:H=0.2620,top10E=0.98,eRank=6.3,q75/q25=862.39 train_time:100724ms step_avg:83.94ms +[2025-08-22 19:38:53] [Rank 0] PRINT: step:1200/10000 val_loss:5.1781 svd_entropy: attn_qk:H=0.8191,top10E=0.14,eRank=249.2,q75/q25=57.60 attn_vo:H=0.3583,top10E=0.89,eRank=12.1,q75/q25=68.97 mlp_w1:H=0.6249,top10E=0.49,eRank=77.2,q75/q25=8.26 mlp_w2:H=0.7453,top10E=0.31,eRank=152.1,q75/q25=12.77 vo_prod:H=0.2620,top10E=0.98,eRank=6.3,q75/q25=862.39 train_time:100724ms step_avg:83.94ms +[2025-08-22 19:38:53] [Rank 0] step:1201/10000 train_time:100739ms step_avg:83.88ms +[2025-08-22 19:38:53] [Rank 0] step:1201/10000 train_time:100739ms step_avg:83.88ms +[2025-08-22 19:38:55] [Rank 0] step:1221/10000 train_time:102346ms step_avg:83.82ms +[2025-08-22 19:38:55] [Rank 0] step:1221/10000 train_time:102346ms step_avg:83.82ms +[2025-08-22 19:38:57] [Rank 0] step:1241/10000 train_time:104026ms step_avg:83.82ms +[2025-08-22 19:38:57] [Rank 0] step:1241/10000 train_time:104026ms step_avg:83.82ms +[2025-08-22 19:38:58] [Rank 0] step:1261/10000 train_time:105708ms step_avg:83.83ms +[2025-08-22 19:38:58] [Rank 0] step:1261/10000 train_time:105708ms step_avg:83.83ms +[2025-08-22 19:39:00] [Rank 0] step:1281/10000 train_time:107390ms step_avg:83.83ms +[2025-08-22 19:39:00] [Rank 0] step:1281/10000 train_time:107390ms step_avg:83.83ms +[2025-08-22 19:39:02] [Rank 0] step:1301/10000 train_time:109073ms step_avg:83.84ms +[2025-08-22 19:39:02] [Rank 0] step:1301/10000 train_time:109073ms step_avg:83.84ms +[2025-08-22 19:39:04] [Rank 0] step:1321/10000 train_time:110756ms step_avg:83.84ms +[2025-08-22 19:39:04] [Rank 0] step:1321/10000 train_time:110756ms step_avg:83.84ms +[2025-08-22 19:39:05] [Rank 0] step:1341/10000 train_time:112438ms step_avg:83.85ms +[2025-08-22 19:39:05] [Rank 0] step:1341/10000 train_time:112438ms step_avg:83.85ms +[2025-08-22 19:39:07] [Rank 0] step:1361/10000 train_time:114125ms step_avg:83.85ms +[2025-08-22 19:39:07] [Rank 0] step:1361/10000 train_time:114125ms step_avg:83.85ms +[2025-08-22 19:39:09] [Rank 0] step:1381/10000 train_time:115809ms step_avg:83.86ms +[2025-08-22 19:39:09] [Rank 0] step:1381/10000 train_time:115809ms step_avg:83.86ms +[2025-08-22 19:39:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:39:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:39:24] [Rank 0] PRINT: step:1400/10000 val_loss:5.0889 svd_entropy: attn_qk:H=0.8238,top10E=0.14,eRank=256.5,q75/q25=63.34 attn_vo:H=0.3763,top10E=0.86,eRank=14.0,q75/q25=67.82 mlp_w1:H=0.6373,top10E=0.46,eRank=84.3,q75/q25=8.91 mlp_w2:H=0.7629,top10E=0.29,eRank=171.2,q75/q25=13.54 vo_prod:H=0.2795,top10E=0.96,eRank=7.2,q75/q25=969.47 train_time:117580ms step_avg:83.99ms +[2025-08-22 19:39:24] [Rank 0] PRINT: step:1400/10000 val_loss:5.0889 svd_entropy: attn_qk:H=0.8238,top10E=0.14,eRank=256.5,q75/q25=63.34 attn_vo:H=0.3763,top10E=0.86,eRank=14.0,q75/q25=67.82 mlp_w1:H=0.6373,top10E=0.46,eRank=84.3,q75/q25=8.91 mlp_w2:H=0.7629,top10E=0.29,eRank=171.2,q75/q25=13.54 vo_prod:H=0.2795,top10E=0.96,eRank=7.2,q75/q25=969.47 train_time:117580ms step_avg:83.99ms +[2025-08-22 19:39:24] [Rank 0] step:1401/10000 train_time:117594ms step_avg:83.94ms +[2025-08-22 19:39:24] [Rank 0] step:1401/10000 train_time:117594ms step_avg:83.94ms +[2025-08-22 19:39:26] [Rank 0] step:1421/10000 train_time:119257ms step_avg:83.92ms +[2025-08-22 19:39:26] [Rank 0] step:1421/10000 train_time:119257ms step_avg:83.92ms +[2025-08-22 19:39:27] [Rank 0] step:1441/10000 train_time:120942ms step_avg:83.93ms +[2025-08-22 19:39:27] [Rank 0] step:1441/10000 train_time:120942ms step_avg:83.93ms +[2025-08-22 19:39:29] [Rank 0] step:1461/10000 train_time:122628ms step_avg:83.93ms +[2025-08-22 19:39:29] [Rank 0] step:1461/10000 train_time:122628ms step_avg:83.93ms +[2025-08-22 19:39:31] [Rank 0] step:1481/10000 train_time:124314ms step_avg:83.94ms +[2025-08-22 19:39:31] [Rank 0] step:1481/10000 train_time:124314ms step_avg:83.94ms +[2025-08-22 19:39:32] [Rank 0] step:1501/10000 train_time:126017ms step_avg:83.96ms +[2025-08-22 19:39:32] [Rank 0] step:1501/10000 train_time:126017ms step_avg:83.96ms +[2025-08-22 19:39:34] [Rank 0] step:1521/10000 train_time:127716ms step_avg:83.97ms +[2025-08-22 19:39:34] [Rank 0] step:1521/10000 train_time:127716ms step_avg:83.97ms +[2025-08-22 19:39:36] [Rank 0] step:1541/10000 train_time:129416ms step_avg:83.98ms +[2025-08-22 19:39:36] [Rank 0] step:1541/10000 train_time:129416ms step_avg:83.98ms +[2025-08-22 19:39:37] [Rank 0] step:1561/10000 train_time:131117ms step_avg:84.00ms +[2025-08-22 19:39:37] [Rank 0] step:1561/10000 train_time:131117ms step_avg:84.00ms +[2025-08-22 19:39:39] [Rank 0] step:1581/10000 train_time:132818ms step_avg:84.01ms +[2025-08-22 19:39:39] [Rank 0] step:1581/10000 train_time:132818ms step_avg:84.01ms +[2025-08-22 19:39:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:39:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:39:54] [Rank 0] PRINT: step:1600/10000 val_loss:4.9428 svd_entropy: attn_qk:H=0.8276,top10E=0.13,eRank=262.7,q75/q25=67.19 attn_vo:H=0.3927,top10E=0.84,eRank=15.9,q75/q25=67.39 mlp_w1:H=0.6477,top10E=0.45,eRank=90.9,q75/q25=9.50 mlp_w2:H=0.7763,top10E=0.27,eRank=187.7,q75/q25=13.86 vo_prod:H=0.2962,top10E=0.94,eRank=8.3,q75/q25=1125.31 train_time:134606ms step_avg:84.13ms +[2025-08-22 19:39:54] [Rank 0] PRINT: step:1600/10000 val_loss:4.9428 svd_entropy: attn_qk:H=0.8276,top10E=0.13,eRank=262.7,q75/q25=67.19 attn_vo:H=0.3927,top10E=0.84,eRank=15.9,q75/q25=67.39 mlp_w1:H=0.6477,top10E=0.45,eRank=90.9,q75/q25=9.50 mlp_w2:H=0.7763,top10E=0.27,eRank=187.7,q75/q25=13.86 vo_prod:H=0.2962,top10E=0.94,eRank=8.3,q75/q25=1125.31 train_time:134606ms step_avg:84.13ms +[2025-08-22 19:39:54] [Rank 0] step:1601/10000 train_time:134620ms step_avg:84.09ms +[2025-08-22 19:39:54] [Rank 0] step:1601/10000 train_time:134620ms step_avg:84.09ms +[2025-08-22 19:39:56] [Rank 0] step:1621/10000 train_time:136238ms step_avg:84.05ms +[2025-08-22 19:39:56] [Rank 0] step:1621/10000 train_time:136238ms step_avg:84.05ms +[2025-08-22 19:39:58] [Rank 0] step:1641/10000 train_time:137933ms step_avg:84.05ms +[2025-08-22 19:39:58] [Rank 0] step:1641/10000 train_time:137933ms step_avg:84.05ms +[2025-08-22 19:39:59] [Rank 0] step:1661/10000 train_time:139627ms step_avg:84.06ms +[2025-08-22 19:39:59] [Rank 0] step:1661/10000 train_time:139627ms step_avg:84.06ms +[2025-08-22 19:40:01] [Rank 0] step:1681/10000 train_time:141324ms step_avg:84.07ms +[2025-08-22 19:40:01] [Rank 0] step:1681/10000 train_time:141324ms step_avg:84.07ms +[2025-08-22 19:40:03] [Rank 0] step:1701/10000 train_time:143020ms step_avg:84.08ms +[2025-08-22 19:40:03] [Rank 0] step:1701/10000 train_time:143020ms step_avg:84.08ms +[2025-08-22 19:40:05] [Rank 0] step:1721/10000 train_time:144715ms step_avg:84.09ms +[2025-08-22 19:40:05] [Rank 0] step:1721/10000 train_time:144715ms step_avg:84.09ms +[2025-08-22 19:40:06] [Rank 0] step:1741/10000 train_time:146414ms step_avg:84.10ms +[2025-08-22 19:40:06] [Rank 0] step:1741/10000 train_time:146414ms step_avg:84.10ms +[2025-08-22 19:40:08] [Rank 0] step:1761/10000 train_time:148108ms step_avg:84.10ms +[2025-08-22 19:40:08] [Rank 0] step:1761/10000 train_time:148108ms step_avg:84.10ms +[2025-08-22 19:40:10] [Rank 0] step:1781/10000 train_time:149807ms step_avg:84.11ms +[2025-08-22 19:40:10] [Rank 0] step:1781/10000 train_time:149807ms step_avg:84.11ms +[2025-08-22 19:40:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:40:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:40:25] [Rank 0] PRINT: step:1800/10000 val_loss:4.8228 svd_entropy: attn_qk:H=0.8308,top10E=0.13,eRank=267.9,q75/q25=71.08 attn_vo:H=0.4079,top10E=0.81,eRank=17.9,q75/q25=68.88 mlp_w1:H=0.6566,top10E=0.43,eRank=97.0,q75/q25=10.09 mlp_w2:H=0.7872,top10E=0.26,eRank=202.2,q75/q25=13.87 vo_prod:H=0.3125,top10E=0.93,eRank=9.4,q75/q25=1363.23 train_time:151589ms step_avg:84.22ms +[2025-08-22 19:40:25] [Rank 0] PRINT: step:1800/10000 val_loss:4.8228 svd_entropy: attn_qk:H=0.8308,top10E=0.13,eRank=267.9,q75/q25=71.08 attn_vo:H=0.4079,top10E=0.81,eRank=17.9,q75/q25=68.88 mlp_w1:H=0.6566,top10E=0.43,eRank=97.0,q75/q25=10.09 mlp_w2:H=0.7872,top10E=0.26,eRank=202.2,q75/q25=13.87 vo_prod:H=0.3125,top10E=0.93,eRank=9.4,q75/q25=1363.23 train_time:151589ms step_avg:84.22ms +[2025-08-22 19:40:25] [Rank 0] step:1801/10000 train_time:151604ms step_avg:84.18ms +[2025-08-22 19:40:25] [Rank 0] step:1801/10000 train_time:151604ms step_avg:84.18ms +[2025-08-22 19:40:27] [Rank 0] step:1821/10000 train_time:153265ms step_avg:84.17ms +[2025-08-22 19:40:27] [Rank 0] step:1821/10000 train_time:153265ms step_avg:84.17ms +[2025-08-22 19:40:28] [Rank 0] step:1841/10000 train_time:154997ms step_avg:84.19ms +[2025-08-22 19:40:28] [Rank 0] step:1841/10000 train_time:154997ms step_avg:84.19ms +[2025-08-22 19:40:30] [Rank 0] step:1861/10000 train_time:156688ms step_avg:84.20ms +[2025-08-22 19:40:30] [Rank 0] step:1861/10000 train_time:156688ms step_avg:84.20ms +[2025-08-22 19:40:32] [Rank 0] step:1881/10000 train_time:158382ms step_avg:84.20ms +[2025-08-22 19:40:32] [Rank 0] step:1881/10000 train_time:158382ms step_avg:84.20ms +[2025-08-22 19:40:33] [Rank 0] step:1901/10000 train_time:160076ms step_avg:84.21ms +[2025-08-22 19:40:33] [Rank 0] step:1901/10000 train_time:160076ms step_avg:84.21ms +[2025-08-22 19:40:35] [Rank 0] step:1921/10000 train_time:161770ms step_avg:84.21ms +[2025-08-22 19:40:35] [Rank 0] step:1921/10000 train_time:161770ms step_avg:84.21ms +[2025-08-22 19:40:37] [Rank 0] step:1941/10000 train_time:163465ms step_avg:84.22ms +[2025-08-22 19:40:37] [Rank 0] step:1941/10000 train_time:163465ms step_avg:84.22ms +[2025-08-22 19:40:39] [Rank 0] step:1961/10000 train_time:165160ms step_avg:84.22ms +[2025-08-22 19:40:39] [Rank 0] step:1961/10000 train_time:165160ms step_avg:84.22ms +[2025-08-22 19:40:40] [Rank 0] step:1981/10000 train_time:166857ms step_avg:84.23ms +[2025-08-22 19:40:40] [Rank 0] step:1981/10000 train_time:166857ms step_avg:84.23ms +[2025-08-22 19:40:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:40:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:40:55] [Rank 0] PRINT: step:2000/10000 val_loss:4.7547 svd_entropy: attn_qk:H=0.8335,top10E=0.13,eRank=272.6,q75/q25=74.84 attn_vo:H=0.4211,top10E=0.79,eRank=19.9,q75/q25=70.70 mlp_w1:H=0.6644,top10E=0.42,eRank=102.6,q75/q25=10.63 mlp_w2:H=0.7959,top10E=0.25,eRank=214.7,q75/q25=13.70 vo_prod:H=0.3263,top10E=0.91,eRank=10.4,q75/q25=1717.30 train_time:168639ms step_avg:84.32ms +[2025-08-22 19:40:55] [Rank 0] PRINT: step:2000/10000 val_loss:4.7547 svd_entropy: attn_qk:H=0.8335,top10E=0.13,eRank=272.6,q75/q25=74.84 attn_vo:H=0.4211,top10E=0.79,eRank=19.9,q75/q25=70.70 mlp_w1:H=0.6644,top10E=0.42,eRank=102.6,q75/q25=10.63 mlp_w2:H=0.7959,top10E=0.25,eRank=214.7,q75/q25=13.70 vo_prod:H=0.3263,top10E=0.91,eRank=10.4,q75/q25=1717.30 train_time:168639ms step_avg:84.32ms +[2025-08-22 19:40:55] [Rank 0] step:2001/10000 train_time:168653ms step_avg:84.28ms +[2025-08-22 19:40:55] [Rank 0] step:2001/10000 train_time:168653ms step_avg:84.28ms +[2025-08-22 19:40:57] [Rank 0] step:2021/10000 train_time:170279ms step_avg:84.26ms +[2025-08-22 19:40:57] [Rank 0] step:2021/10000 train_time:170279ms step_avg:84.26ms +[2025-08-22 19:40:59] [Rank 0] step:2041/10000 train_time:172143ms step_avg:84.34ms +[2025-08-22 19:40:59] [Rank 0] step:2041/10000 train_time:172143ms step_avg:84.34ms +[2025-08-22 19:41:01] [Rank 0] step:2061/10000 train_time:173840ms step_avg:84.35ms +[2025-08-22 19:41:01] [Rank 0] step:2061/10000 train_time:173840ms step_avg:84.35ms +[2025-08-22 19:41:02] [Rank 0] step:2081/10000 train_time:175537ms step_avg:84.35ms +[2025-08-22 19:41:02] [Rank 0] step:2081/10000 train_time:175537ms step_avg:84.35ms +[2025-08-22 19:41:04] [Rank 0] step:2101/10000 train_time:177236ms step_avg:84.36ms +[2025-08-22 19:41:04] [Rank 0] step:2101/10000 train_time:177236ms step_avg:84.36ms +[2025-08-22 19:41:06] [Rank 0] step:2121/10000 train_time:178937ms step_avg:84.36ms +[2025-08-22 19:41:06] [Rank 0] step:2121/10000 train_time:178937ms step_avg:84.36ms +[2025-08-22 19:41:08] [Rank 0] step:2141/10000 train_time:180637ms step_avg:84.37ms +[2025-08-22 19:41:08] [Rank 0] step:2141/10000 train_time:180637ms step_avg:84.37ms +[2025-08-22 19:41:09] [Rank 0] step:2161/10000 train_time:182338ms step_avg:84.38ms +[2025-08-22 19:41:09] [Rank 0] step:2161/10000 train_time:182338ms step_avg:84.38ms +[2025-08-22 19:41:11] [Rank 0] step:2181/10000 train_time:184041ms step_avg:84.38ms +[2025-08-22 19:41:11] [Rank 0] step:2181/10000 train_time:184041ms step_avg:84.38ms +[2025-08-22 19:41:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:41:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:41:26] [Rank 0] PRINT: step:2200/10000 val_loss:4.6680 svd_entropy: attn_qk:H=0.8358,top10E=0.12,eRank=276.7,q75/q25=77.75 attn_vo:H=0.4319,top10E=0.77,eRank=21.7,q75/q25=73.35 mlp_w1:H=0.6711,top10E=0.41,eRank=107.5,q75/q25=11.09 mlp_w2:H=0.8033,top10E=0.24,eRank=225.6,q75/q25=13.44 vo_prod:H=0.3367,top10E=0.90,eRank=11.3,q75/q25=2141.08 train_time:185828ms step_avg:84.47ms +[2025-08-22 19:41:26] [Rank 0] PRINT: step:2200/10000 val_loss:4.6680 svd_entropy: attn_qk:H=0.8358,top10E=0.12,eRank=276.7,q75/q25=77.75 attn_vo:H=0.4319,top10E=0.77,eRank=21.7,q75/q25=73.35 mlp_w1:H=0.6711,top10E=0.41,eRank=107.5,q75/q25=11.09 mlp_w2:H=0.8033,top10E=0.24,eRank=225.6,q75/q25=13.44 vo_prod:H=0.3367,top10E=0.90,eRank=11.3,q75/q25=2141.08 train_time:185828ms step_avg:84.47ms +[2025-08-22 19:41:26] [Rank 0] step:2201/10000 train_time:185843ms step_avg:84.44ms +[2025-08-22 19:41:26] [Rank 0] step:2201/10000 train_time:185843ms step_avg:84.44ms +[2025-08-22 19:41:28] [Rank 0] step:2221/10000 train_time:187461ms step_avg:84.40ms +[2025-08-22 19:41:28] [Rank 0] step:2221/10000 train_time:187461ms step_avg:84.40ms +[2025-08-22 19:41:30] [Rank 0] step:2241/10000 train_time:189239ms step_avg:84.44ms +[2025-08-22 19:41:30] [Rank 0] step:2241/10000 train_time:189239ms step_avg:84.44ms +[2025-08-22 19:41:31] [Rank 0] step:2261/10000 train_time:191033ms step_avg:84.49ms +[2025-08-22 19:41:31] [Rank 0] step:2261/10000 train_time:191033ms step_avg:84.49ms +[2025-08-22 19:41:33] [Rank 0] step:2281/10000 train_time:192774ms step_avg:84.51ms +[2025-08-22 19:41:33] [Rank 0] step:2281/10000 train_time:192774ms step_avg:84.51ms +[2025-08-22 19:41:35] [Rank 0] step:2301/10000 train_time:194513ms step_avg:84.53ms +[2025-08-22 19:41:35] [Rank 0] step:2301/10000 train_time:194513ms step_avg:84.53ms +[2025-08-22 19:41:37] [Rank 0] step:2321/10000 train_time:196253ms step_avg:84.56ms +[2025-08-22 19:41:37] [Rank 0] step:2321/10000 train_time:196253ms step_avg:84.56ms +[2025-08-22 19:41:38] [Rank 0] step:2341/10000 train_time:197994ms step_avg:84.58ms +[2025-08-22 19:41:38] [Rank 0] step:2341/10000 train_time:197994ms step_avg:84.58ms +[2025-08-22 19:41:40] [Rank 0] step:2361/10000 train_time:199735ms step_avg:84.60ms +[2025-08-22 19:41:40] [Rank 0] step:2361/10000 train_time:199735ms step_avg:84.60ms +[2025-08-22 19:41:42] [Rank 0] step:2381/10000 train_time:201478ms step_avg:84.62ms +[2025-08-22 19:41:42] [Rank 0] step:2381/10000 train_time:201478ms step_avg:84.62ms +[2025-08-22 19:41:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:41:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:41:57] [Rank 0] PRINT: step:2400/10000 val_loss:4.5838 svd_entropy: attn_qk:H=0.8382,top10E=0.12,eRank=280.6,q75/q25=80.69 attn_vo:H=0.4421,top10E=0.75,eRank=23.5,q75/q25=76.03 mlp_w1:H=0.6770,top10E=0.40,eRank=112.3,q75/q25=11.53 mlp_w2:H=0.8095,top10E=0.23,eRank=235.3,q75/q25=13.11 vo_prod:H=0.3471,top10E=0.88,eRank=12.3,q75/q25=2620.63 train_time:203306ms step_avg:84.71ms +[2025-08-22 19:41:57] [Rank 0] PRINT: step:2400/10000 val_loss:4.5838 svd_entropy: attn_qk:H=0.8382,top10E=0.12,eRank=280.6,q75/q25=80.69 attn_vo:H=0.4421,top10E=0.75,eRank=23.5,q75/q25=76.03 mlp_w1:H=0.6770,top10E=0.40,eRank=112.3,q75/q25=11.53 mlp_w2:H=0.8095,top10E=0.23,eRank=235.3,q75/q25=13.11 vo_prod:H=0.3471,top10E=0.88,eRank=12.3,q75/q25=2620.63 train_time:203306ms step_avg:84.71ms +[2025-08-22 19:41:57] [Rank 0] step:2401/10000 train_time:203321ms step_avg:84.68ms +[2025-08-22 19:41:57] [Rank 0] step:2401/10000 train_time:203321ms step_avg:84.68ms +[2025-08-22 19:41:59] [Rank 0] step:2421/10000 train_time:204989ms step_avg:84.67ms +[2025-08-22 19:41:59] [Rank 0] step:2421/10000 train_time:204989ms step_avg:84.67ms +[2025-08-22 19:42:01] [Rank 0] step:2441/10000 train_time:206727ms step_avg:84.69ms +[2025-08-22 19:42:01] [Rank 0] step:2441/10000 train_time:206727ms step_avg:84.69ms +[2025-08-22 19:42:02] [Rank 0] step:2461/10000 train_time:208464ms step_avg:84.71ms +[2025-08-22 19:42:02] [Rank 0] step:2461/10000 train_time:208464ms step_avg:84.71ms +[2025-08-22 19:42:04] [Rank 0] step:2481/10000 train_time:210202ms step_avg:84.72ms +[2025-08-22 19:42:04] [Rank 0] step:2481/10000 train_time:210202ms step_avg:84.72ms +[2025-08-22 19:42:06] [Rank 0] step:2501/10000 train_time:211940ms step_avg:84.74ms +[2025-08-22 19:42:06] [Rank 0] step:2501/10000 train_time:211940ms step_avg:84.74ms +[2025-08-22 19:42:08] [Rank 0] step:2521/10000 train_time:213681ms step_avg:84.76ms +[2025-08-22 19:42:08] [Rank 0] step:2521/10000 train_time:213681ms step_avg:84.76ms +[2025-08-22 19:42:09] [Rank 0] step:2541/10000 train_time:215422ms step_avg:84.78ms +[2025-08-22 19:42:09] [Rank 0] step:2541/10000 train_time:215422ms step_avg:84.78ms +[2025-08-22 19:42:11] [Rank 0] step:2561/10000 train_time:217165ms step_avg:84.80ms +[2025-08-22 19:42:11] [Rank 0] step:2561/10000 train_time:217165ms step_avg:84.80ms +[2025-08-22 19:42:13] [Rank 0] step:2581/10000 train_time:218907ms step_avg:84.81ms +[2025-08-22 19:42:13] [Rank 0] step:2581/10000 train_time:218907ms step_avg:84.81ms +[2025-08-22 19:42:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:42:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:42:28] [Rank 0] PRINT: step:2600/10000 val_loss:4.5082 svd_entropy: attn_qk:H=0.8402,top10E=0.12,eRank=284.3,q75/q25=82.84 attn_vo:H=0.4510,top10E=0.73,eRank=25.2,q75/q25=78.55 mlp_w1:H=0.6824,top10E=0.39,eRank=116.7,q75/q25=11.90 mlp_w2:H=0.8150,top10E=0.22,eRank=244.0,q75/q25=12.85 vo_prod:H=0.3559,top10E=0.87,eRank=13.2,q75/q25=3104.69 train_time:220735ms step_avg:84.90ms +[2025-08-22 19:42:28] [Rank 0] PRINT: step:2600/10000 val_loss:4.5082 svd_entropy: attn_qk:H=0.8402,top10E=0.12,eRank=284.3,q75/q25=82.84 attn_vo:H=0.4510,top10E=0.73,eRank=25.2,q75/q25=78.55 mlp_w1:H=0.6824,top10E=0.39,eRank=116.7,q75/q25=11.90 mlp_w2:H=0.8150,top10E=0.22,eRank=244.0,q75/q25=12.85 vo_prod:H=0.3559,top10E=0.87,eRank=13.2,q75/q25=3104.69 train_time:220735ms step_avg:84.90ms +[2025-08-22 19:42:28] [Rank 0] step:2601/10000 train_time:220749ms step_avg:84.87ms +[2025-08-22 19:42:28] [Rank 0] step:2601/10000 train_time:220749ms step_avg:84.87ms +[2025-08-22 19:42:30] [Rank 0] step:2621/10000 train_time:222407ms step_avg:84.86ms +[2025-08-22 19:42:30] [Rank 0] step:2621/10000 train_time:222407ms step_avg:84.86ms +[2025-08-22 19:42:32] [Rank 0] step:2641/10000 train_time:224144ms step_avg:84.87ms +[2025-08-22 19:42:32] [Rank 0] step:2641/10000 train_time:224144ms step_avg:84.87ms +[2025-08-22 19:42:33] [Rank 0] step:2661/10000 train_time:225935ms step_avg:84.91ms +[2025-08-22 19:42:33] [Rank 0] step:2661/10000 train_time:225935ms step_avg:84.91ms +[2025-08-22 19:42:35] [Rank 0] step:2681/10000 train_time:227675ms step_avg:84.92ms +[2025-08-22 19:42:35] [Rank 0] step:2681/10000 train_time:227675ms step_avg:84.92ms +[2025-08-22 19:42:37] [Rank 0] step:2701/10000 train_time:229415ms step_avg:84.94ms +[2025-08-22 19:42:37] [Rank 0] step:2701/10000 train_time:229415ms step_avg:84.94ms +[2025-08-22 19:42:39] [Rank 0] step:2721/10000 train_time:231157ms step_avg:84.95ms +[2025-08-22 19:42:39] [Rank 0] step:2721/10000 train_time:231157ms step_avg:84.95ms +[2025-08-22 19:42:40] [Rank 0] step:2741/10000 train_time:232899ms step_avg:84.97ms +[2025-08-22 19:42:40] [Rank 0] step:2741/10000 train_time:232899ms step_avg:84.97ms +[2025-08-22 19:42:42] [Rank 0] step:2761/10000 train_time:234643ms step_avg:84.98ms +[2025-08-22 19:42:42] [Rank 0] step:2761/10000 train_time:234643ms step_avg:84.98ms +[2025-08-22 19:42:44] [Rank 0] step:2781/10000 train_time:236387ms step_avg:85.00ms +[2025-08-22 19:42:44] [Rank 0] step:2781/10000 train_time:236387ms step_avg:85.00ms +[2025-08-22 19:42:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:42:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:42:59] [Rank 0] PRINT: step:2800/10000 val_loss:4.4703 svd_entropy: attn_qk:H=0.8421,top10E=0.12,eRank=287.7,q75/q25=83.59 attn_vo:H=0.4594,top10E=0.72,eRank=26.9,q75/q25=82.21 mlp_w1:H=0.6874,top10E=0.38,eRank=121.0,q75/q25=12.21 mlp_w2:H=0.8200,top10E=0.22,eRank=252.1,q75/q25=12.51 vo_prod:H=0.3652,top10E=0.86,eRank=14.1,q75/q25=3610.03 train_time:238217ms step_avg:85.08ms +[2025-08-22 19:42:59] [Rank 0] PRINT: step:2800/10000 val_loss:4.4703 svd_entropy: attn_qk:H=0.8421,top10E=0.12,eRank=287.7,q75/q25=83.59 attn_vo:H=0.4594,top10E=0.72,eRank=26.9,q75/q25=82.21 mlp_w1:H=0.6874,top10E=0.38,eRank=121.0,q75/q25=12.21 mlp_w2:H=0.8200,top10E=0.22,eRank=252.1,q75/q25=12.51 vo_prod:H=0.3652,top10E=0.86,eRank=14.1,q75/q25=3610.03 train_time:238217ms step_avg:85.08ms +[2025-08-22 19:42:59] [Rank 0] step:2801/10000 train_time:238233ms step_avg:85.05ms +[2025-08-22 19:42:59] [Rank 0] step:2801/10000 train_time:238233ms step_avg:85.05ms +[2025-08-22 19:43:01] [Rank 0] step:2821/10000 train_time:239901ms step_avg:85.04ms +[2025-08-22 19:43:01] [Rank 0] step:2821/10000 train_time:239901ms step_avg:85.04ms +[2025-08-22 19:43:03] [Rank 0] step:2841/10000 train_time:241641ms step_avg:85.05ms +[2025-08-22 19:43:03] [Rank 0] step:2841/10000 train_time:241641ms step_avg:85.05ms +[2025-08-22 19:43:04] [Rank 0] step:2861/10000 train_time:243383ms step_avg:85.07ms +[2025-08-22 19:43:04] [Rank 0] step:2861/10000 train_time:243383ms step_avg:85.07ms +[2025-08-22 19:43:06] [Rank 0] step:2881/10000 train_time:245124ms step_avg:85.08ms +[2025-08-22 19:43:06] [Rank 0] step:2881/10000 train_time:245124ms step_avg:85.08ms +[2025-08-22 19:43:08] [Rank 0] step:2901/10000 train_time:246867ms step_avg:85.10ms +[2025-08-22 19:43:08] [Rank 0] step:2901/10000 train_time:246867ms step_avg:85.10ms +[2025-08-22 19:43:10] [Rank 0] step:2921/10000 train_time:248610ms step_avg:85.11ms +[2025-08-22 19:43:10] [Rank 0] step:2921/10000 train_time:248610ms step_avg:85.11ms +[2025-08-22 19:43:11] [Rank 0] step:2941/10000 train_time:250356ms step_avg:85.13ms +[2025-08-22 19:43:11] [Rank 0] step:2941/10000 train_time:250356ms step_avg:85.13ms +[2025-08-22 19:43:13] [Rank 0] step:2961/10000 train_time:252101ms step_avg:85.14ms +[2025-08-22 19:43:13] [Rank 0] step:2961/10000 train_time:252101ms step_avg:85.14ms +[2025-08-22 19:43:15] [Rank 0] step:2981/10000 train_time:253852ms step_avg:85.16ms +[2025-08-22 19:43:15] [Rank 0] step:2981/10000 train_time:253852ms step_avg:85.16ms +[2025-08-22 19:43:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:43:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:43:30] [Rank 0] PRINT: step:3000/10000 val_loss:4.4278 svd_entropy: attn_qk:H=0.8437,top10E=0.12,eRank=290.7,q75/q25=85.31 attn_vo:H=0.4666,top10E=0.70,eRank=28.5,q75/q25=84.54 mlp_w1:H=0.6920,top10E=0.37,eRank=125.0,q75/q25=12.45 mlp_w2:H=0.8243,top10E=0.21,eRank=259.3,q75/q25=12.24 vo_prod:H=0.3725,top10E=0.85,eRank=14.9,q75/q25=4025.74 train_time:255693ms step_avg:85.23ms +[2025-08-22 19:43:30] [Rank 0] PRINT: step:3000/10000 val_loss:4.4278 svd_entropy: attn_qk:H=0.8437,top10E=0.12,eRank=290.7,q75/q25=85.31 attn_vo:H=0.4666,top10E=0.70,eRank=28.5,q75/q25=84.54 mlp_w1:H=0.6920,top10E=0.37,eRank=125.0,q75/q25=12.45 mlp_w2:H=0.8243,top10E=0.21,eRank=259.3,q75/q25=12.24 vo_prod:H=0.3725,top10E=0.85,eRank=14.9,q75/q25=4025.74 train_time:255693ms step_avg:85.23ms +[2025-08-22 19:43:30] [Rank 0] step:3001/10000 train_time:255708ms step_avg:85.21ms +[2025-08-22 19:43:30] [Rank 0] step:3001/10000 train_time:255708ms step_avg:85.21ms +[2025-08-22 19:43:32] [Rank 0] step:3021/10000 train_time:257379ms step_avg:85.20ms +[2025-08-22 19:43:32] [Rank 0] step:3021/10000 train_time:257379ms step_avg:85.20ms +[2025-08-22 19:43:34] [Rank 0] step:3041/10000 train_time:259126ms step_avg:85.21ms +[2025-08-22 19:43:34] [Rank 0] step:3041/10000 train_time:259126ms step_avg:85.21ms +[2025-08-22 19:43:36] [Rank 0] step:3061/10000 train_time:260925ms step_avg:85.24ms +[2025-08-22 19:43:36] [Rank 0] step:3061/10000 train_time:260925ms step_avg:85.24ms +[2025-08-22 19:43:37] [Rank 0] step:3081/10000 train_time:262623ms step_avg:85.24ms +[2025-08-22 19:43:37] [Rank 0] step:3081/10000 train_time:262623ms step_avg:85.24ms +[2025-08-22 19:43:39] [Rank 0] step:3101/10000 train_time:264443ms step_avg:85.28ms +[2025-08-22 19:43:39] [Rank 0] step:3101/10000 train_time:264443ms step_avg:85.28ms +[2025-08-22 19:43:41] [Rank 0] step:3121/10000 train_time:266190ms step_avg:85.29ms +[2025-08-22 19:43:41] [Rank 0] step:3121/10000 train_time:266190ms step_avg:85.29ms +[2025-08-22 19:43:43] [Rank 0] step:3141/10000 train_time:267940ms step_avg:85.30ms +[2025-08-22 19:43:43] [Rank 0] step:3141/10000 train_time:267940ms step_avg:85.30ms +[2025-08-22 19:43:45] [Rank 0] step:3161/10000 train_time:269691ms step_avg:85.32ms +[2025-08-22 19:43:45] [Rank 0] step:3161/10000 train_time:269691ms step_avg:85.32ms +[2025-08-22 19:43:46] [Rank 0] step:3181/10000 train_time:271441ms step_avg:85.33ms +[2025-08-22 19:43:46] [Rank 0] step:3181/10000 train_time:271441ms step_avg:85.33ms +[2025-08-22 19:43:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:43:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:44:02] [Rank 0] PRINT: step:3200/10000 val_loss:4.3947 svd_entropy: attn_qk:H=0.8452,top10E=0.12,eRank=293.4,q75/q25=86.08 attn_vo:H=0.4732,top10E=0.69,eRank=30.0,q75/q25=87.10 mlp_w1:H=0.6960,top10E=0.37,eRank=128.6,q75/q25=12.70 mlp_w2:H=0.8283,top10E=0.21,eRank=266.0,q75/q25=11.94 vo_prod:H=0.3799,top10E=0.84,eRank=15.7,q75/q25=4471.27 train_time:273279ms step_avg:85.40ms +[2025-08-22 19:44:02] [Rank 0] PRINT: step:3200/10000 val_loss:4.3947 svd_entropy: attn_qk:H=0.8452,top10E=0.12,eRank=293.4,q75/q25=86.08 attn_vo:H=0.4732,top10E=0.69,eRank=30.0,q75/q25=87.10 mlp_w1:H=0.6960,top10E=0.37,eRank=128.6,q75/q25=12.70 mlp_w2:H=0.8283,top10E=0.21,eRank=266.0,q75/q25=11.94 vo_prod:H=0.3799,top10E=0.84,eRank=15.7,q75/q25=4471.27 train_time:273279ms step_avg:85.40ms +[2025-08-22 19:44:02] [Rank 0] step:3201/10000 train_time:273293ms step_avg:85.38ms +[2025-08-22 19:44:02] [Rank 0] step:3201/10000 train_time:273293ms step_avg:85.38ms +[2025-08-22 19:44:04] [Rank 0] step:3221/10000 train_time:274972ms step_avg:85.37ms +[2025-08-22 19:44:04] [Rank 0] step:3221/10000 train_time:274972ms step_avg:85.37ms +[2025-08-22 19:44:05] [Rank 0] step:3241/10000 train_time:276716ms step_avg:85.38ms +[2025-08-22 19:44:05] [Rank 0] step:3241/10000 train_time:276716ms step_avg:85.38ms +[2025-08-22 19:44:07] [Rank 0] step:3261/10000 train_time:278460ms step_avg:85.39ms +[2025-08-22 19:44:07] [Rank 0] step:3261/10000 train_time:278460ms step_avg:85.39ms +[2025-08-22 19:44:09] [Rank 0] step:3281/10000 train_time:280208ms step_avg:85.40ms +[2025-08-22 19:44:09] [Rank 0] step:3281/10000 train_time:280208ms step_avg:85.40ms +[2025-08-22 19:44:11] [Rank 0] step:3301/10000 train_time:281954ms step_avg:85.41ms +[2025-08-22 19:44:11] [Rank 0] step:3301/10000 train_time:281954ms step_avg:85.41ms +[2025-08-22 19:44:12] [Rank 0] step:3321/10000 train_time:283703ms step_avg:85.43ms +[2025-08-22 19:44:12] [Rank 0] step:3321/10000 train_time:283703ms step_avg:85.43ms +[2025-08-22 19:44:14] [Rank 0] step:3341/10000 train_time:285454ms step_avg:85.44ms +[2025-08-22 19:44:14] [Rank 0] step:3341/10000 train_time:285454ms step_avg:85.44ms +[2025-08-22 19:44:16] [Rank 0] step:3361/10000 train_time:287202ms step_avg:85.45ms +[2025-08-22 19:44:16] [Rank 0] step:3361/10000 train_time:287202ms step_avg:85.45ms +[2025-08-22 19:44:18] [Rank 0] step:3381/10000 train_time:288953ms step_avg:85.46ms +[2025-08-22 19:44:18] [Rank 0] step:3381/10000 train_time:288953ms step_avg:85.46ms +[2025-08-22 19:44:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:44:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:44:33] [Rank 0] PRINT: step:3400/10000 val_loss:4.3509 svd_entropy: attn_qk:H=0.8467,top10E=0.12,eRank=296.0,q75/q25=87.21 attn_vo:H=0.4794,top10E=0.68,eRank=31.5,q75/q25=90.48 mlp_w1:H=0.6998,top10E=0.36,eRank=132.2,q75/q25=12.93 mlp_w2:H=0.8321,top10E=0.20,eRank=272.4,q75/q25=11.76 vo_prod:H=0.3859,top10E=0.83,eRank=16.5,q75/q25=4919.24 train_time:290791ms step_avg:85.53ms +[2025-08-22 19:44:33] [Rank 0] PRINT: step:3400/10000 val_loss:4.3509 svd_entropy: attn_qk:H=0.8467,top10E=0.12,eRank=296.0,q75/q25=87.21 attn_vo:H=0.4794,top10E=0.68,eRank=31.5,q75/q25=90.48 mlp_w1:H=0.6998,top10E=0.36,eRank=132.2,q75/q25=12.93 mlp_w2:H=0.8321,top10E=0.20,eRank=272.4,q75/q25=11.76 vo_prod:H=0.3859,top10E=0.83,eRank=16.5,q75/q25=4919.24 train_time:290791ms step_avg:85.53ms +[2025-08-22 19:44:33] [Rank 0] step:3401/10000 train_time:290806ms step_avg:85.51ms +[2025-08-22 19:44:33] [Rank 0] step:3401/10000 train_time:290806ms step_avg:85.51ms +[2025-08-22 19:44:35] [Rank 0] step:3421/10000 train_time:292472ms step_avg:85.49ms +[2025-08-22 19:44:35] [Rank 0] step:3421/10000 train_time:292472ms step_avg:85.49ms +[2025-08-22 19:44:37] [Rank 0] step:3441/10000 train_time:294220ms step_avg:85.50ms +[2025-08-22 19:44:37] [Rank 0] step:3441/10000 train_time:294220ms step_avg:85.50ms +[2025-08-22 19:44:39] [Rank 0] step:3461/10000 train_time:296190ms step_avg:85.58ms +[2025-08-22 19:44:39] [Rank 0] step:3461/10000 train_time:296190ms step_avg:85.58ms +[2025-08-22 19:44:40] [Rank 0] step:3481/10000 train_time:297770ms step_avg:85.54ms +[2025-08-22 19:44:40] [Rank 0] step:3481/10000 train_time:297770ms step_avg:85.54ms +[2025-08-22 19:44:42] [Rank 0] step:3501/10000 train_time:299562ms step_avg:85.56ms +[2025-08-22 19:44:42] [Rank 0] step:3501/10000 train_time:299562ms step_avg:85.56ms +[2025-08-22 19:44:44] [Rank 0] step:3521/10000 train_time:301315ms step_avg:85.58ms +[2025-08-22 19:44:44] [Rank 0] step:3521/10000 train_time:301315ms step_avg:85.58ms +[2025-08-22 19:44:46] [Rank 0] step:3541/10000 train_time:303069ms step_avg:85.59ms +[2025-08-22 19:44:46] [Rank 0] step:3541/10000 train_time:303069ms step_avg:85.59ms +[2025-08-22 19:44:47] [Rank 0] step:3561/10000 train_time:304822ms step_avg:85.60ms +[2025-08-22 19:44:47] [Rank 0] step:3561/10000 train_time:304822ms step_avg:85.60ms +[2025-08-22 19:44:49] [Rank 0] step:3581/10000 train_time:306577ms step_avg:85.61ms +[2025-08-22 19:44:49] [Rank 0] step:3581/10000 train_time:306577ms step_avg:85.61ms +[2025-08-22 19:44:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:44:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:45:04] [Rank 0] PRINT: step:3600/10000 val_loss:4.3495 svd_entropy: attn_qk:H=0.8480,top10E=0.11,eRank=298.4,q75/q25=88.43 attn_vo:H=0.4850,top10E=0.67,eRank=33.0,q75/q25=92.91 mlp_w1:H=0.7034,top10E=0.36,eRank=135.5,q75/q25=13.12 mlp_w2:H=0.8354,top10E=0.20,eRank=278.0,q75/q25=11.54 vo_prod:H=0.3914,top10E=0.82,eRank=17.2,q75/q25=5280.79 train_time:308421ms step_avg:85.67ms +[2025-08-22 19:45:04] [Rank 0] PRINT: step:3600/10000 val_loss:4.3495 svd_entropy: attn_qk:H=0.8480,top10E=0.11,eRank=298.4,q75/q25=88.43 attn_vo:H=0.4850,top10E=0.67,eRank=33.0,q75/q25=92.91 mlp_w1:H=0.7034,top10E=0.36,eRank=135.5,q75/q25=13.12 mlp_w2:H=0.8354,top10E=0.20,eRank=278.0,q75/q25=11.54 vo_prod:H=0.3914,top10E=0.82,eRank=17.2,q75/q25=5280.79 train_time:308421ms step_avg:85.67ms +[2025-08-22 19:45:04] [Rank 0] step:3601/10000 train_time:308435ms step_avg:85.65ms +[2025-08-22 19:45:04] [Rank 0] step:3601/10000 train_time:308435ms step_avg:85.65ms +[2025-08-22 19:45:06] [Rank 0] step:3621/10000 train_time:310112ms step_avg:85.64ms +[2025-08-22 19:45:06] [Rank 0] step:3621/10000 train_time:310112ms step_avg:85.64ms +[2025-08-22 19:45:08] [Rank 0] step:3641/10000 train_time:311859ms step_avg:85.65ms +[2025-08-22 19:45:08] [Rank 0] step:3641/10000 train_time:311859ms step_avg:85.65ms +[2025-08-22 19:45:10] [Rank 0] step:3661/10000 train_time:313647ms step_avg:85.67ms +[2025-08-22 19:45:10] [Rank 0] step:3661/10000 train_time:313647ms step_avg:85.67ms +[2025-08-22 19:45:12] [Rank 0] step:3681/10000 train_time:315395ms step_avg:85.68ms +[2025-08-22 19:45:12] [Rank 0] step:3681/10000 train_time:315395ms step_avg:85.68ms +[2025-08-22 19:45:13] [Rank 0] step:3701/10000 train_time:317144ms step_avg:85.69ms +[2025-08-22 19:45:13] [Rank 0] step:3701/10000 train_time:317144ms step_avg:85.69ms +[2025-08-22 19:45:15] [Rank 0] step:3721/10000 train_time:318920ms step_avg:85.71ms +[2025-08-22 19:45:15] [Rank 0] step:3721/10000 train_time:318920ms step_avg:85.71ms +[2025-08-22 19:45:17] [Rank 0] step:3741/10000 train_time:320706ms step_avg:85.73ms +[2025-08-22 19:45:17] [Rank 0] step:3741/10000 train_time:320706ms step_avg:85.73ms +[2025-08-22 19:45:19] [Rank 0] step:3761/10000 train_time:322492ms step_avg:85.75ms +[2025-08-22 19:45:19] [Rank 0] step:3761/10000 train_time:322492ms step_avg:85.75ms +[2025-08-22 19:45:20] [Rank 0] step:3781/10000 train_time:324279ms step_avg:85.77ms +[2025-08-22 19:45:20] [Rank 0] step:3781/10000 train_time:324279ms step_avg:85.77ms +[2025-08-22 19:45:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:45:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:45:36] [Rank 0] PRINT: step:3800/10000 val_loss:4.2783 svd_entropy: attn_qk:H=0.8493,top10E=0.11,eRank=300.8,q75/q25=89.39 attn_vo:H=0.4903,top10E=0.66,eRank=34.4,q75/q25=94.81 mlp_w1:H=0.7067,top10E=0.35,eRank=138.7,q75/q25=13.26 mlp_w2:H=0.8383,top10E=0.19,eRank=283.2,q75/q25=11.36 vo_prod:H=0.3966,top10E=0.81,eRank=17.9,q75/q25=5751.52 train_time:326157ms step_avg:85.83ms +[2025-08-22 19:45:36] [Rank 0] PRINT: step:3800/10000 val_loss:4.2783 svd_entropy: attn_qk:H=0.8493,top10E=0.11,eRank=300.8,q75/q25=89.39 attn_vo:H=0.4903,top10E=0.66,eRank=34.4,q75/q25=94.81 mlp_w1:H=0.7067,top10E=0.35,eRank=138.7,q75/q25=13.26 mlp_w2:H=0.8383,top10E=0.19,eRank=283.2,q75/q25=11.36 vo_prod:H=0.3966,top10E=0.81,eRank=17.9,q75/q25=5751.52 train_time:326157ms step_avg:85.83ms +[2025-08-22 19:45:36] [Rank 0] step:3801/10000 train_time:326170ms step_avg:85.81ms +[2025-08-22 19:45:36] [Rank 0] step:3801/10000 train_time:326170ms step_avg:85.81ms +[2025-08-22 19:45:38] [Rank 0] step:3821/10000 train_time:327872ms step_avg:85.81ms +[2025-08-22 19:45:38] [Rank 0] step:3821/10000 train_time:327872ms step_avg:85.81ms +[2025-08-22 19:45:40] [Rank 0] step:3841/10000 train_time:329660ms step_avg:85.83ms +[2025-08-22 19:45:40] [Rank 0] step:3841/10000 train_time:329660ms step_avg:85.83ms +[2025-08-22 19:45:41] [Rank 0] step:3861/10000 train_time:331444ms step_avg:85.84ms +[2025-08-22 19:45:41] [Rank 0] step:3861/10000 train_time:331444ms step_avg:85.84ms +[2025-08-22 19:45:43] [Rank 0] step:3881/10000 train_time:333253ms step_avg:85.87ms +[2025-08-22 19:45:43] [Rank 0] step:3881/10000 train_time:333253ms step_avg:85.87ms +[2025-08-22 19:45:45] [Rank 0] step:3901/10000 train_time:335037ms step_avg:85.88ms +[2025-08-22 19:45:45] [Rank 0] step:3901/10000 train_time:335037ms step_avg:85.88ms +[2025-08-22 19:45:47] [Rank 0] step:3921/10000 train_time:336820ms step_avg:85.90ms +[2025-08-22 19:45:47] [Rank 0] step:3921/10000 train_time:336820ms step_avg:85.90ms +[2025-08-22 19:45:49] [Rank 0] step:3941/10000 train_time:338606ms step_avg:85.92ms +[2025-08-22 19:45:49] [Rank 0] step:3941/10000 train_time:338606ms step_avg:85.92ms +[2025-08-22 19:45:50] [Rank 0] step:3961/10000 train_time:340390ms step_avg:85.94ms +[2025-08-22 19:45:50] [Rank 0] step:3961/10000 train_time:340390ms step_avg:85.94ms +[2025-08-22 19:45:52] [Rank 0] step:3981/10000 train_time:342176ms step_avg:85.95ms +[2025-08-22 19:45:52] [Rank 0] step:3981/10000 train_time:342176ms step_avg:85.95ms +[2025-08-22 19:45:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:45:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:46:07] [Rank 0] PRINT: step:4000/10000 val_loss:4.2484 svd_entropy: attn_qk:H=0.8504,top10E=0.11,eRank=302.8,q75/q25=89.25 attn_vo:H=0.4948,top10E=0.65,eRank=35.8,q75/q25=97.34 mlp_w1:H=0.7098,top10E=0.35,eRank=141.7,q75/q25=13.28 mlp_w2:H=0.8411,top10E=0.19,eRank=288.0,q75/q25=11.18 vo_prod:H=0.4012,top10E=0.80,eRank=18.6,q75/q25=6069.84 train_time:344051ms step_avg:86.01ms +[2025-08-22 19:46:07] [Rank 0] PRINT: step:4000/10000 val_loss:4.2484 svd_entropy: attn_qk:H=0.8504,top10E=0.11,eRank=302.8,q75/q25=89.25 attn_vo:H=0.4948,top10E=0.65,eRank=35.8,q75/q25=97.34 mlp_w1:H=0.7098,top10E=0.35,eRank=141.7,q75/q25=13.28 mlp_w2:H=0.8411,top10E=0.19,eRank=288.0,q75/q25=11.18 vo_prod:H=0.4012,top10E=0.80,eRank=18.6,q75/q25=6069.84 train_time:344051ms step_avg:86.01ms +[2025-08-22 19:46:07] [Rank 0] step:4001/10000 train_time:344066ms step_avg:86.00ms +[2025-08-22 19:46:07] [Rank 0] step:4001/10000 train_time:344066ms step_avg:86.00ms +[2025-08-22 19:46:09] [Rank 0] step:4021/10000 train_time:345771ms step_avg:85.99ms +[2025-08-22 19:46:09] [Rank 0] step:4021/10000 train_time:345771ms step_avg:85.99ms +[2025-08-22 19:46:11] [Rank 0] step:4041/10000 train_time:347556ms step_avg:86.01ms +[2025-08-22 19:46:11] [Rank 0] step:4041/10000 train_time:347556ms step_avg:86.01ms +[2025-08-22 19:46:13] [Rank 0] step:4061/10000 train_time:349339ms step_avg:86.02ms +[2025-08-22 19:46:13] [Rank 0] step:4061/10000 train_time:349339ms step_avg:86.02ms +[2025-08-22 19:46:15] [Rank 0] step:4081/10000 train_time:351713ms step_avg:86.18ms +[2025-08-22 19:46:15] [Rank 0] step:4081/10000 train_time:351713ms step_avg:86.18ms +[2025-08-22 19:46:17] [Rank 0] step:4101/10000 train_time:353497ms step_avg:86.20ms +[2025-08-22 19:46:17] [Rank 0] step:4101/10000 train_time:353497ms step_avg:86.20ms +[2025-08-22 19:46:19] [Rank 0] step:4121/10000 train_time:355284ms step_avg:86.21ms +[2025-08-22 19:46:19] [Rank 0] step:4121/10000 train_time:355284ms step_avg:86.21ms +[2025-08-22 19:46:21] [Rank 0] step:4141/10000 train_time:357073ms step_avg:86.23ms +[2025-08-22 19:46:21] [Rank 0] step:4141/10000 train_time:357073ms step_avg:86.23ms +[2025-08-22 19:46:22] [Rank 0] step:4161/10000 train_time:358860ms step_avg:86.24ms +[2025-08-22 19:46:22] [Rank 0] step:4161/10000 train_time:358860ms step_avg:86.24ms +[2025-08-22 19:46:24] [Rank 0] step:4181/10000 train_time:360649ms step_avg:86.26ms +[2025-08-22 19:46:24] [Rank 0] step:4181/10000 train_time:360649ms step_avg:86.26ms +[2025-08-22 19:46:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:46:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:46:40] [Rank 0] PRINT: step:4200/10000 val_loss:4.2343 svd_entropy: attn_qk:H=0.8516,top10E=0.11,eRank=305.0,q75/q25=90.54 attn_vo:H=0.4994,top10E=0.64,eRank=37.1,q75/q25=99.64 mlp_w1:H=0.7128,top10E=0.34,eRank=144.6,q75/q25=13.40 mlp_w2:H=0.8436,top10E=0.19,eRank=292.5,q75/q25=10.99 vo_prod:H=0.4059,top10E=0.79,eRank=19.3,q75/q25=6355.83 train_time:362527ms step_avg:86.32ms +[2025-08-22 19:46:40] [Rank 0] PRINT: step:4200/10000 val_loss:4.2343 svd_entropy: attn_qk:H=0.8516,top10E=0.11,eRank=305.0,q75/q25=90.54 attn_vo:H=0.4994,top10E=0.64,eRank=37.1,q75/q25=99.64 mlp_w1:H=0.7128,top10E=0.34,eRank=144.6,q75/q25=13.40 mlp_w2:H=0.8436,top10E=0.19,eRank=292.5,q75/q25=10.99 vo_prod:H=0.4059,top10E=0.79,eRank=19.3,q75/q25=6355.83 train_time:362527ms step_avg:86.32ms +[2025-08-22 19:46:40] [Rank 0] step:4201/10000 train_time:362542ms step_avg:86.30ms +[2025-08-22 19:46:40] [Rank 0] step:4201/10000 train_time:362542ms step_avg:86.30ms +[2025-08-22 19:46:42] [Rank 0] step:4221/10000 train_time:364248ms step_avg:86.29ms +[2025-08-22 19:46:42] [Rank 0] step:4221/10000 train_time:364248ms step_avg:86.29ms +[2025-08-22 19:46:43] [Rank 0] step:4241/10000 train_time:366035ms step_avg:86.31ms +[2025-08-22 19:46:43] [Rank 0] step:4241/10000 train_time:366035ms step_avg:86.31ms +[2025-08-22 19:46:45] [Rank 0] step:4261/10000 train_time:367822ms step_avg:86.32ms +[2025-08-22 19:46:45] [Rank 0] step:4261/10000 train_time:367822ms step_avg:86.32ms +[2025-08-22 19:46:47] [Rank 0] step:4281/10000 train_time:369675ms step_avg:86.35ms +[2025-08-22 19:46:47] [Rank 0] step:4281/10000 train_time:369675ms step_avg:86.35ms +[2025-08-22 19:46:49] [Rank 0] step:4301/10000 train_time:371483ms step_avg:86.37ms +[2025-08-22 19:46:49] [Rank 0] step:4301/10000 train_time:371483ms step_avg:86.37ms +[2025-08-22 19:46:51] [Rank 0] step:4321/10000 train_time:373272ms step_avg:86.39ms +[2025-08-22 19:46:51] [Rank 0] step:4321/10000 train_time:373272ms step_avg:86.39ms +[2025-08-22 19:46:52] [Rank 0] step:4341/10000 train_time:375059ms step_avg:86.40ms +[2025-08-22 19:46:52] [Rank 0] step:4341/10000 train_time:375059ms step_avg:86.40ms +[2025-08-22 19:46:54] [Rank 0] step:4361/10000 train_time:376845ms step_avg:86.41ms +[2025-08-22 19:46:54] [Rank 0] step:4361/10000 train_time:376845ms step_avg:86.41ms +[2025-08-22 19:46:56] [Rank 0] step:4381/10000 train_time:378633ms step_avg:86.43ms +[2025-08-22 19:46:56] [Rank 0] step:4381/10000 train_time:378633ms step_avg:86.43ms +[2025-08-22 19:46:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:46:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:47:11] [Rank 0] PRINT: step:4400/10000 val_loss:4.2121 svd_entropy: attn_qk:H=0.8527,top10E=0.11,eRank=307.1,q75/q25=90.41 attn_vo:H=0.5036,top10E=0.63,eRank=38.5,q75/q25=101.50 mlp_w1:H=0.7154,top10E=0.34,eRank=147.3,q75/q25=13.53 mlp_w2:H=0.8461,top10E=0.19,eRank=297.0,q75/q25=10.88 vo_prod:H=0.4102,top10E=0.78,eRank=20.0,q75/q25=6603.98 train_time:380513ms step_avg:86.48ms +[2025-08-22 19:47:11] [Rank 0] PRINT: step:4400/10000 val_loss:4.2121 svd_entropy: attn_qk:H=0.8527,top10E=0.11,eRank=307.1,q75/q25=90.41 attn_vo:H=0.5036,top10E=0.63,eRank=38.5,q75/q25=101.50 mlp_w1:H=0.7154,top10E=0.34,eRank=147.3,q75/q25=13.53 mlp_w2:H=0.8461,top10E=0.19,eRank=297.0,q75/q25=10.88 vo_prod:H=0.4102,top10E=0.78,eRank=20.0,q75/q25=6603.98 train_time:380513ms step_avg:86.48ms +[2025-08-22 19:47:12] [Rank 0] step:4401/10000 train_time:380528ms step_avg:86.46ms +[2025-08-22 19:47:12] [Rank 0] step:4401/10000 train_time:380528ms step_avg:86.46ms +[2025-08-22 19:47:13] [Rank 0] step:4421/10000 train_time:382223ms step_avg:86.46ms +[2025-08-22 19:47:13] [Rank 0] step:4421/10000 train_time:382223ms step_avg:86.46ms +[2025-08-22 19:47:15] [Rank 0] step:4441/10000 train_time:384008ms step_avg:86.47ms +[2025-08-22 19:47:15] [Rank 0] step:4441/10000 train_time:384008ms step_avg:86.47ms +[2025-08-22 19:47:17] [Rank 0] step:4461/10000 train_time:385797ms step_avg:86.48ms +[2025-08-22 19:47:17] [Rank 0] step:4461/10000 train_time:385797ms step_avg:86.48ms +[2025-08-22 19:47:19] [Rank 0] step:4481/10000 train_time:387588ms step_avg:86.50ms +[2025-08-22 19:47:19] [Rank 0] step:4481/10000 train_time:387588ms step_avg:86.50ms +[2025-08-22 19:47:20] [Rank 0] step:4501/10000 train_time:389378ms step_avg:86.51ms +[2025-08-22 19:47:20] [Rank 0] step:4501/10000 train_time:389378ms step_avg:86.51ms +[2025-08-22 19:47:22] [Rank 0] step:4521/10000 train_time:391169ms step_avg:86.52ms +[2025-08-22 19:47:22] [Rank 0] step:4521/10000 train_time:391169ms step_avg:86.52ms +[2025-08-22 19:47:24] [Rank 0] step:4541/10000 train_time:392963ms step_avg:86.54ms +[2025-08-22 19:47:24] [Rank 0] step:4541/10000 train_time:392963ms step_avg:86.54ms +[2025-08-22 19:47:26] [Rank 0] step:4561/10000 train_time:394755ms step_avg:86.55ms +[2025-08-22 19:47:26] [Rank 0] step:4561/10000 train_time:394755ms step_avg:86.55ms +[2025-08-22 19:47:28] [Rank 0] step:4581/10000 train_time:396551ms step_avg:86.56ms +[2025-08-22 19:47:28] [Rank 0] step:4581/10000 train_time:396551ms step_avg:86.56ms +[2025-08-22 19:47:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:47:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:47:43] [Rank 0] PRINT: step:4600/10000 val_loss:4.1795 svd_entropy: attn_qk:H=0.8539,top10E=0.11,eRank=309.3,q75/q25=91.06 attn_vo:H=0.5079,top10E=0.62,eRank=39.8,q75/q25=102.96 mlp_w1:H=0.7179,top10E=0.33,eRank=150.0,q75/q25=13.61 mlp_w2:H=0.8485,top10E=0.18,eRank=301.3,q75/q25=10.75 vo_prod:H=0.4147,top10E=0.78,eRank=20.6,q75/q25=6985.85 train_time:398433ms step_avg:86.62ms +[2025-08-22 19:47:43] [Rank 0] PRINT: step:4600/10000 val_loss:4.1795 svd_entropy: attn_qk:H=0.8539,top10E=0.11,eRank=309.3,q75/q25=91.06 attn_vo:H=0.5079,top10E=0.62,eRank=39.8,q75/q25=102.96 mlp_w1:H=0.7179,top10E=0.33,eRank=150.0,q75/q25=13.61 mlp_w2:H=0.8485,top10E=0.18,eRank=301.3,q75/q25=10.75 vo_prod:H=0.4147,top10E=0.78,eRank=20.6,q75/q25=6985.85 train_time:398433ms step_avg:86.62ms +[2025-08-22 19:47:43] [Rank 0] step:4601/10000 train_time:398448ms step_avg:86.60ms +[2025-08-22 19:47:43] [Rank 0] step:4601/10000 train_time:398448ms step_avg:86.60ms +[2025-08-22 19:47:45] [Rank 0] step:4621/10000 train_time:400150ms step_avg:86.59ms +[2025-08-22 19:47:45] [Rank 0] step:4621/10000 train_time:400150ms step_avg:86.59ms +[2025-08-22 19:47:47] [Rank 0] step:4641/10000 train_time:401940ms step_avg:86.61ms +[2025-08-22 19:47:47] [Rank 0] step:4641/10000 train_time:401940ms step_avg:86.61ms +[2025-08-22 19:47:48] [Rank 0] step:4661/10000 train_time:403730ms step_avg:86.62ms +[2025-08-22 19:47:48] [Rank 0] step:4661/10000 train_time:403730ms step_avg:86.62ms +[2025-08-22 19:47:50] [Rank 0] step:4681/10000 train_time:405520ms step_avg:86.63ms +[2025-08-22 19:47:50] [Rank 0] step:4681/10000 train_time:405520ms step_avg:86.63ms +[2025-08-22 19:47:52] [Rank 0] step:4701/10000 train_time:407361ms step_avg:86.65ms +[2025-08-22 19:47:52] [Rank 0] step:4701/10000 train_time:407361ms step_avg:86.65ms +[2025-08-22 19:47:54] [Rank 0] step:4721/10000 train_time:409152ms step_avg:86.67ms +[2025-08-22 19:47:54] [Rank 0] step:4721/10000 train_time:409152ms step_avg:86.67ms +[2025-08-22 19:47:56] [Rank 0] step:4741/10000 train_time:410944ms step_avg:86.68ms +[2025-08-22 19:47:56] [Rank 0] step:4741/10000 train_time:410944ms step_avg:86.68ms +[2025-08-22 19:47:57] [Rank 0] step:4761/10000 train_time:412738ms step_avg:86.69ms +[2025-08-22 19:47:57] [Rank 0] step:4761/10000 train_time:412738ms step_avg:86.69ms +[2025-08-22 19:47:59] [Rank 0] step:4781/10000 train_time:414530ms step_avg:86.70ms +[2025-08-22 19:47:59] [Rank 0] step:4781/10000 train_time:414530ms step_avg:86.70ms +[2025-08-22 19:48:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:48:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:48:14] [Rank 0] PRINT: step:4800/10000 val_loss:4.1728 svd_entropy: attn_qk:H=0.8550,top10E=0.11,eRank=311.2,q75/q25=91.19 attn_vo:H=0.5116,top10E=0.62,eRank=41.0,q75/q25=104.31 mlp_w1:H=0.7204,top10E=0.33,eRank=152.6,q75/q25=13.66 mlp_w2:H=0.8506,top10E=0.18,eRank=305.2,q75/q25=10.67 vo_prod:H=0.4182,top10E=0.77,eRank=21.2,q75/q25=7257.54 train_time:416414ms step_avg:86.75ms +[2025-08-22 19:48:14] [Rank 0] PRINT: step:4800/10000 val_loss:4.1728 svd_entropy: attn_qk:H=0.8550,top10E=0.11,eRank=311.2,q75/q25=91.19 attn_vo:H=0.5116,top10E=0.62,eRank=41.0,q75/q25=104.31 mlp_w1:H=0.7204,top10E=0.33,eRank=152.6,q75/q25=13.66 mlp_w2:H=0.8506,top10E=0.18,eRank=305.2,q75/q25=10.67 vo_prod:H=0.4182,top10E=0.77,eRank=21.2,q75/q25=7257.54 train_time:416414ms step_avg:86.75ms +[2025-08-22 19:48:14] [Rank 0] step:4801/10000 train_time:416428ms step_avg:86.74ms +[2025-08-22 19:48:14] [Rank 0] step:4801/10000 train_time:416428ms step_avg:86.74ms +[2025-08-22 19:48:16] [Rank 0] step:4821/10000 train_time:418136ms step_avg:86.73ms +[2025-08-22 19:48:16] [Rank 0] step:4821/10000 train_time:418136ms step_avg:86.73ms +[2025-08-22 19:48:18] [Rank 0] step:4841/10000 train_time:419926ms step_avg:86.74ms +[2025-08-22 19:48:18] [Rank 0] step:4841/10000 train_time:419926ms step_avg:86.74ms +[2025-08-22 19:48:20] [Rank 0] step:4861/10000 train_time:421719ms step_avg:86.76ms +[2025-08-22 19:48:20] [Rank 0] step:4861/10000 train_time:421719ms step_avg:86.76ms +[2025-08-22 19:48:22] [Rank 0] step:4881/10000 train_time:423515ms step_avg:86.77ms +[2025-08-22 19:48:22] [Rank 0] step:4881/10000 train_time:423515ms step_avg:86.77ms +[2025-08-22 19:48:23] [Rank 0] step:4901/10000 train_time:425307ms step_avg:86.78ms +[2025-08-22 19:48:23] [Rank 0] step:4901/10000 train_time:425307ms step_avg:86.78ms +[2025-08-22 19:48:25] [Rank 0] step:4921/10000 train_time:427103ms step_avg:86.79ms +[2025-08-22 19:48:25] [Rank 0] step:4921/10000 train_time:427103ms step_avg:86.79ms +[2025-08-22 19:48:27] [Rank 0] step:4941/10000 train_time:428902ms step_avg:86.80ms +[2025-08-22 19:48:27] [Rank 0] step:4941/10000 train_time:428902ms step_avg:86.80ms +[2025-08-22 19:48:29] [Rank 0] step:4961/10000 train_time:430697ms step_avg:86.82ms +[2025-08-22 19:48:29] [Rank 0] step:4961/10000 train_time:430697ms step_avg:86.82ms +[2025-08-22 19:48:31] [Rank 0] step:4981/10000 train_time:432496ms step_avg:86.83ms +[2025-08-22 19:48:31] [Rank 0] step:4981/10000 train_time:432496ms step_avg:86.83ms +[2025-08-22 19:48:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:48:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:48:46] [Rank 0] PRINT: step:5000/10000 val_loss:4.1484 svd_entropy: attn_qk:H=0.8560,top10E=0.11,eRank=313.1,q75/q25=91.66 attn_vo:H=0.5154,top10E=0.61,eRank=42.3,q75/q25=105.40 mlp_w1:H=0.7226,top10E=0.33,eRank=155.0,q75/q25=13.71 mlp_w2:H=0.8525,top10E=0.18,eRank=308.7,q75/q25=10.57 vo_prod:H=0.4217,top10E=0.76,eRank=21.8,q75/q25=7596.82 train_time:434383ms step_avg:86.88ms +[2025-08-22 19:48:46] [Rank 0] PRINT: step:5000/10000 val_loss:4.1484 svd_entropy: attn_qk:H=0.8560,top10E=0.11,eRank=313.1,q75/q25=91.66 attn_vo:H=0.5154,top10E=0.61,eRank=42.3,q75/q25=105.40 mlp_w1:H=0.7226,top10E=0.33,eRank=155.0,q75/q25=13.71 mlp_w2:H=0.8525,top10E=0.18,eRank=308.7,q75/q25=10.57 vo_prod:H=0.4217,top10E=0.76,eRank=21.8,q75/q25=7596.82 train_time:434383ms step_avg:86.88ms +[2025-08-22 19:48:46] [Rank 0] step:5001/10000 train_time:434398ms step_avg:86.86ms +[2025-08-22 19:48:46] [Rank 0] step:5001/10000 train_time:434398ms step_avg:86.86ms +[2025-08-22 19:48:48] [Rank 0] step:5021/10000 train_time:436126ms step_avg:86.86ms +[2025-08-22 19:48:48] [Rank 0] step:5021/10000 train_time:436126ms step_avg:86.86ms +[2025-08-22 19:48:50] [Rank 0] step:5041/10000 train_time:437917ms step_avg:86.87ms +[2025-08-22 19:48:50] [Rank 0] step:5041/10000 train_time:437917ms step_avg:86.87ms +[2025-08-22 19:48:51] [Rank 0] step:5061/10000 train_time:439706ms step_avg:86.88ms +[2025-08-22 19:48:51] [Rank 0] step:5061/10000 train_time:439706ms step_avg:86.88ms +[2025-08-22 19:48:53] [Rank 0] step:5081/10000 train_time:441497ms step_avg:86.89ms +[2025-08-22 19:48:53] [Rank 0] step:5081/10000 train_time:441497ms step_avg:86.89ms +[2025-08-22 19:48:55] [Rank 0] step:5101/10000 train_time:443335ms step_avg:86.91ms +[2025-08-22 19:48:55] [Rank 0] step:5101/10000 train_time:443335ms step_avg:86.91ms +[2025-08-22 19:48:57] [Rank 0] step:5121/10000 train_time:445125ms step_avg:86.92ms +[2025-08-22 19:48:57] [Rank 0] step:5121/10000 train_time:445125ms step_avg:86.92ms +[2025-08-22 19:48:59] [Rank 0] step:5141/10000 train_time:446919ms step_avg:86.93ms +[2025-08-22 19:48:59] [Rank 0] step:5141/10000 train_time:446919ms step_avg:86.93ms +[2025-08-22 19:49:00] [Rank 0] step:5161/10000 train_time:448711ms step_avg:86.94ms +[2025-08-22 19:49:00] [Rank 0] step:5161/10000 train_time:448711ms step_avg:86.94ms +[2025-08-22 19:49:02] [Rank 0] step:5181/10000 train_time:450508ms step_avg:86.95ms +[2025-08-22 19:49:02] [Rank 0] step:5181/10000 train_time:450508ms step_avg:86.95ms +[2025-08-22 19:49:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:49:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:49:18] [Rank 0] PRINT: step:5200/10000 val_loss:4.1316 svd_entropy: attn_qk:H=0.8570,top10E=0.11,eRank=314.9,q75/q25=91.81 attn_vo:H=0.5189,top10E=0.60,eRank=43.5,q75/q25=106.87 mlp_w1:H=0.7248,top10E=0.32,eRank=157.4,q75/q25=13.81 mlp_w2:H=0.8543,top10E=0.18,eRank=312.0,q75/q25=10.44 vo_prod:H=0.4251,top10E=0.76,eRank=22.4,q75/q25=7583.86 train_time:452415ms step_avg:87.00ms +[2025-08-22 19:49:18] [Rank 0] PRINT: step:5200/10000 val_loss:4.1316 svd_entropy: attn_qk:H=0.8570,top10E=0.11,eRank=314.9,q75/q25=91.81 attn_vo:H=0.5189,top10E=0.60,eRank=43.5,q75/q25=106.87 mlp_w1:H=0.7248,top10E=0.32,eRank=157.4,q75/q25=13.81 mlp_w2:H=0.8543,top10E=0.18,eRank=312.0,q75/q25=10.44 vo_prod:H=0.4251,top10E=0.76,eRank=22.4,q75/q25=7583.86 train_time:452415ms step_avg:87.00ms +[2025-08-22 19:49:18] [Rank 0] step:5201/10000 train_time:452430ms step_avg:86.99ms +[2025-08-22 19:49:18] [Rank 0] step:5201/10000 train_time:452430ms step_avg:86.99ms +[2025-08-22 19:49:19] [Rank 0] step:5221/10000 train_time:454162ms step_avg:86.99ms +[2025-08-22 19:49:19] [Rank 0] step:5221/10000 train_time:454162ms step_avg:86.99ms +[2025-08-22 19:49:21] [Rank 0] step:5241/10000 train_time:455982ms step_avg:87.00ms +[2025-08-22 19:49:21] [Rank 0] step:5241/10000 train_time:455982ms step_avg:87.00ms +[2025-08-22 19:49:23] [Rank 0] step:5261/10000 train_time:457802ms step_avg:87.02ms +[2025-08-22 19:49:23] [Rank 0] step:5261/10000 train_time:457802ms step_avg:87.02ms +[2025-08-22 19:49:25] [Rank 0] step:5281/10000 train_time:459622ms step_avg:87.03ms +[2025-08-22 19:49:25] [Rank 0] step:5281/10000 train_time:459622ms step_avg:87.03ms +[2025-08-22 19:49:27] [Rank 0] step:5301/10000 train_time:461454ms step_avg:87.05ms +[2025-08-22 19:49:27] [Rank 0] step:5301/10000 train_time:461454ms step_avg:87.05ms +[2025-08-22 19:49:29] [Rank 0] step:5321/10000 train_time:463276ms step_avg:87.07ms +[2025-08-22 19:49:29] [Rank 0] step:5321/10000 train_time:463276ms step_avg:87.07ms +[2025-08-22 19:49:30] [Rank 0] step:5341/10000 train_time:465098ms step_avg:87.08ms +[2025-08-22 19:49:30] [Rank 0] step:5341/10000 train_time:465098ms step_avg:87.08ms +[2025-08-22 19:49:32] [Rank 0] step:5361/10000 train_time:466921ms step_avg:87.10ms +[2025-08-22 19:49:32] [Rank 0] step:5361/10000 train_time:466921ms step_avg:87.10ms +[2025-08-22 19:49:34] [Rank 0] step:5381/10000 train_time:468744ms step_avg:87.11ms +[2025-08-22 19:49:34] [Rank 0] step:5381/10000 train_time:468744ms step_avg:87.11ms +[2025-08-22 19:49:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:49:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:49:49] [Rank 0] PRINT: step:5400/10000 val_loss:4.1106 svd_entropy: attn_qk:H=0.8579,top10E=0.11,eRank=316.6,q75/q25=91.49 attn_vo:H=0.5223,top10E=0.60,eRank=44.7,q75/q25=108.47 mlp_w1:H=0.7269,top10E=0.32,eRank=159.7,q75/q25=13.88 mlp_w2:H=0.8560,top10E=0.17,eRank=315.2,q75/q25=10.37 vo_prod:H=0.4287,top10E=0.75,eRank=23.0,q75/q25=7967.94 train_time:470655ms step_avg:87.16ms +[2025-08-22 19:49:49] [Rank 0] PRINT: step:5400/10000 val_loss:4.1106 svd_entropy: attn_qk:H=0.8579,top10E=0.11,eRank=316.6,q75/q25=91.49 attn_vo:H=0.5223,top10E=0.60,eRank=44.7,q75/q25=108.47 mlp_w1:H=0.7269,top10E=0.32,eRank=159.7,q75/q25=13.88 mlp_w2:H=0.8560,top10E=0.17,eRank=315.2,q75/q25=10.37 vo_prod:H=0.4287,top10E=0.75,eRank=23.0,q75/q25=7967.94 train_time:470655ms step_avg:87.16ms +[2025-08-22 19:49:49] [Rank 0] step:5401/10000 train_time:470670ms step_avg:87.15ms +[2025-08-22 19:49:49] [Rank 0] step:5401/10000 train_time:470670ms step_avg:87.15ms +[2025-08-22 19:49:51] [Rank 0] step:5421/10000 train_time:472419ms step_avg:87.15ms +[2025-08-22 19:49:51] [Rank 0] step:5421/10000 train_time:472419ms step_avg:87.15ms +[2025-08-22 19:49:53] [Rank 0] step:5441/10000 train_time:474236ms step_avg:87.16ms +[2025-08-22 19:49:53] [Rank 0] step:5441/10000 train_time:474236ms step_avg:87.16ms +[2025-08-22 19:49:55] [Rank 0] step:5461/10000 train_time:476060ms step_avg:87.17ms +[2025-08-22 19:49:55] [Rank 0] step:5461/10000 train_time:476060ms step_avg:87.17ms +[2025-08-22 19:49:57] [Rank 0] step:5481/10000 train_time:477908ms step_avg:87.19ms +[2025-08-22 19:49:57] [Rank 0] step:5481/10000 train_time:477908ms step_avg:87.19ms +[2025-08-22 19:49:58] [Rank 0] step:5501/10000 train_time:479779ms step_avg:87.22ms +[2025-08-22 19:49:58] [Rank 0] step:5501/10000 train_time:479779ms step_avg:87.22ms +[2025-08-22 19:50:00] [Rank 0] step:5521/10000 train_time:481604ms step_avg:87.23ms +[2025-08-22 19:50:00] [Rank 0] step:5521/10000 train_time:481604ms step_avg:87.23ms +[2025-08-22 19:50:02] [Rank 0] step:5541/10000 train_time:483426ms step_avg:87.25ms +[2025-08-22 19:50:02] [Rank 0] step:5541/10000 train_time:483426ms step_avg:87.25ms +[2025-08-22 19:50:04] [Rank 0] step:5561/10000 train_time:485251ms step_avg:87.26ms +[2025-08-22 19:50:04] [Rank 0] step:5561/10000 train_time:485251ms step_avg:87.26ms +[2025-08-22 19:50:06] [Rank 0] step:5581/10000 train_time:487076ms step_avg:87.27ms +[2025-08-22 19:50:06] [Rank 0] step:5581/10000 train_time:487076ms step_avg:87.27ms +[2025-08-22 19:50:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:50:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:50:21] [Rank 0] PRINT: step:5600/10000 val_loss:4.1001 svd_entropy: attn_qk:H=0.8589,top10E=0.11,eRank=318.3,q75/q25=91.33 attn_vo:H=0.5255,top10E=0.59,eRank=45.8,q75/q25=108.85 mlp_w1:H=0.7288,top10E=0.32,eRank=161.9,q75/q25=13.92 mlp_w2:H=0.8576,top10E=0.17,eRank=318.2,q75/q25=10.32 vo_prod:H=0.4317,top10E=0.75,eRank=23.5,q75/q25=8232.80 train_time:488993ms step_avg:87.32ms +[2025-08-22 19:50:21] [Rank 0] PRINT: step:5600/10000 val_loss:4.1001 svd_entropy: attn_qk:H=0.8589,top10E=0.11,eRank=318.3,q75/q25=91.33 attn_vo:H=0.5255,top10E=0.59,eRank=45.8,q75/q25=108.85 mlp_w1:H=0.7288,top10E=0.32,eRank=161.9,q75/q25=13.92 mlp_w2:H=0.8576,top10E=0.17,eRank=318.2,q75/q25=10.32 vo_prod:H=0.4317,top10E=0.75,eRank=23.5,q75/q25=8232.80 train_time:488993ms step_avg:87.32ms +[2025-08-22 19:50:21] [Rank 0] step:5601/10000 train_time:489008ms step_avg:87.31ms +[2025-08-22 19:50:21] [Rank 0] step:5601/10000 train_time:489008ms step_avg:87.31ms +[2025-08-22 19:50:23] [Rank 0] step:5621/10000 train_time:490750ms step_avg:87.31ms +[2025-08-22 19:50:23] [Rank 0] step:5621/10000 train_time:490750ms step_avg:87.31ms +[2025-08-22 19:50:25] [Rank 0] step:5641/10000 train_time:492570ms step_avg:87.32ms +[2025-08-22 19:50:25] [Rank 0] step:5641/10000 train_time:492570ms step_avg:87.32ms +[2025-08-22 19:50:27] [Rank 0] step:5661/10000 train_time:494391ms step_avg:87.33ms +[2025-08-22 19:50:27] [Rank 0] step:5661/10000 train_time:494391ms step_avg:87.33ms +[2025-08-22 19:50:28] [Rank 0] step:5681/10000 train_time:496219ms step_avg:87.35ms +[2025-08-22 19:50:28] [Rank 0] step:5681/10000 train_time:496219ms step_avg:87.35ms +[2025-08-22 19:50:30] [Rank 0] step:5701/10000 train_time:498043ms step_avg:87.36ms +[2025-08-22 19:50:30] [Rank 0] step:5701/10000 train_time:498043ms step_avg:87.36ms +[2025-08-22 19:50:32] [Rank 0] step:5721/10000 train_time:499873ms step_avg:87.38ms +[2025-08-22 19:50:32] [Rank 0] step:5721/10000 train_time:499873ms step_avg:87.38ms +[2025-08-22 19:50:34] [Rank 0] step:5741/10000 train_time:501700ms step_avg:87.39ms +[2025-08-22 19:50:34] [Rank 0] step:5741/10000 train_time:501700ms step_avg:87.39ms +[2025-08-22 19:50:36] [Rank 0] step:5761/10000 train_time:503528ms step_avg:87.40ms +[2025-08-22 19:50:36] [Rank 0] step:5761/10000 train_time:503528ms step_avg:87.40ms +[2025-08-22 19:50:38] [Rank 0] step:5781/10000 train_time:505359ms step_avg:87.42ms +[2025-08-22 19:50:38] [Rank 0] step:5781/10000 train_time:505359ms step_avg:87.42ms +[2025-08-22 19:50:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:50:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:50:53] [Rank 0] PRINT: step:5800/10000 val_loss:4.0939 svd_entropy: attn_qk:H=0.8598,top10E=0.10,eRank=320.0,q75/q25=91.47 attn_vo:H=0.5284,top10E=0.59,eRank=46.9,q75/q25=110.06 mlp_w1:H=0.7308,top10E=0.31,eRank=164.0,q75/q25=13.94 mlp_w2:H=0.8591,top10E=0.17,eRank=321.1,q75/q25=10.22 vo_prod:H=0.4345,top10E=0.74,eRank=24.0,q75/q25=8401.29 train_time:507281ms step_avg:87.46ms +[2025-08-22 19:50:53] [Rank 0] PRINT: step:5800/10000 val_loss:4.0939 svd_entropy: attn_qk:H=0.8598,top10E=0.10,eRank=320.0,q75/q25=91.47 attn_vo:H=0.5284,top10E=0.59,eRank=46.9,q75/q25=110.06 mlp_w1:H=0.7308,top10E=0.31,eRank=164.0,q75/q25=13.94 mlp_w2:H=0.8591,top10E=0.17,eRank=321.1,q75/q25=10.22 vo_prod:H=0.4345,top10E=0.74,eRank=24.0,q75/q25=8401.29 train_time:507281ms step_avg:87.46ms +[2025-08-22 19:50:53] [Rank 0] step:5801/10000 train_time:507296ms step_avg:87.45ms +[2025-08-22 19:50:53] [Rank 0] step:5801/10000 train_time:507296ms step_avg:87.45ms +[2025-08-22 19:50:55] [Rank 0] step:5821/10000 train_time:509041ms step_avg:87.45ms +[2025-08-22 19:50:55] [Rank 0] step:5821/10000 train_time:509041ms step_avg:87.45ms +[2025-08-22 19:50:57] [Rank 0] step:5841/10000 train_time:510860ms step_avg:87.46ms +[2025-08-22 19:50:57] [Rank 0] step:5841/10000 train_time:510860ms step_avg:87.46ms +[2025-08-22 19:50:59] [Rank 0] step:5861/10000 train_time:512748ms step_avg:87.48ms +[2025-08-22 19:50:59] [Rank 0] step:5861/10000 train_time:512748ms step_avg:87.48ms +[2025-08-22 19:51:00] [Rank 0] step:5881/10000 train_time:514549ms step_avg:87.49ms +[2025-08-22 19:51:00] [Rank 0] step:5881/10000 train_time:514549ms step_avg:87.49ms +[2025-08-22 19:51:02] [Rank 0] step:5901/10000 train_time:516413ms step_avg:87.51ms +[2025-08-22 19:51:02] [Rank 0] step:5901/10000 train_time:516413ms step_avg:87.51ms +[2025-08-22 19:51:04] [Rank 0] step:5921/10000 train_time:518238ms step_avg:87.53ms +[2025-08-22 19:51:04] [Rank 0] step:5921/10000 train_time:518238ms step_avg:87.53ms +[2025-08-22 19:51:06] [Rank 0] step:5941/10000 train_time:520069ms step_avg:87.54ms +[2025-08-22 19:51:06] [Rank 0] step:5941/10000 train_time:520069ms step_avg:87.54ms +[2025-08-22 19:51:08] [Rank 0] step:5961/10000 train_time:521895ms step_avg:87.55ms +[2025-08-22 19:51:08] [Rank 0] step:5961/10000 train_time:521895ms step_avg:87.55ms +[2025-08-22 19:51:10] [Rank 0] step:5981/10000 train_time:523723ms step_avg:87.56ms +[2025-08-22 19:51:10] [Rank 0] step:5981/10000 train_time:523723ms step_avg:87.56ms +[2025-08-22 19:51:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:51:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:51:25] [Rank 0] PRINT: step:6000/10000 val_loss:4.0660 svd_entropy: attn_qk:H=0.8606,top10E=0.10,eRank=321.5,q75/q25=92.43 attn_vo:H=0.5313,top10E=0.58,eRank=48.0,q75/q25=111.07 mlp_w1:H=0.7326,top10E=0.31,eRank=166.1,q75/q25=13.97 mlp_w2:H=0.8605,top10E=0.17,eRank=323.8,q75/q25=10.18 vo_prod:H=0.4374,top10E=0.74,eRank=24.6,q75/q25=8692.28 train_time:525638ms step_avg:87.61ms +[2025-08-22 19:51:25] [Rank 0] PRINT: step:6000/10000 val_loss:4.0660 svd_entropy: attn_qk:H=0.8606,top10E=0.10,eRank=321.5,q75/q25=92.43 attn_vo:H=0.5313,top10E=0.58,eRank=48.0,q75/q25=111.07 mlp_w1:H=0.7326,top10E=0.31,eRank=166.1,q75/q25=13.97 mlp_w2:H=0.8605,top10E=0.17,eRank=323.8,q75/q25=10.18 vo_prod:H=0.4374,top10E=0.74,eRank=24.6,q75/q25=8692.28 train_time:525638ms step_avg:87.61ms +[2025-08-22 19:51:25] [Rank 0] step:6001/10000 train_time:525653ms step_avg:87.59ms +[2025-08-22 19:51:25] [Rank 0] step:6001/10000 train_time:525653ms step_avg:87.59ms +[2025-08-22 19:51:27] [Rank 0] step:6021/10000 train_time:527398ms step_avg:87.59ms +[2025-08-22 19:51:27] [Rank 0] step:6021/10000 train_time:527398ms step_avg:87.59ms +[2025-08-22 19:51:29] [Rank 0] step:6041/10000 train_time:529225ms step_avg:87.61ms +[2025-08-22 19:51:29] [Rank 0] step:6041/10000 train_time:529225ms step_avg:87.61ms +[2025-08-22 19:51:30] [Rank 0] step:6061/10000 train_time:531054ms step_avg:87.62ms +[2025-08-22 19:51:30] [Rank 0] step:6061/10000 train_time:531054ms step_avg:87.62ms +[2025-08-22 19:51:32] [Rank 0] step:6081/10000 train_time:532879ms step_avg:87.63ms +[2025-08-22 19:51:32] [Rank 0] step:6081/10000 train_time:532879ms step_avg:87.63ms +[2025-08-22 19:51:34] [Rank 0] step:6101/10000 train_time:534710ms step_avg:87.64ms +[2025-08-22 19:51:34] [Rank 0] step:6101/10000 train_time:534710ms step_avg:87.64ms +[2025-08-22 19:51:36] [Rank 0] step:6121/10000 train_time:536803ms step_avg:87.70ms +[2025-08-22 19:51:36] [Rank 0] step:6121/10000 train_time:536803ms step_avg:87.70ms +[2025-08-22 19:51:38] [Rank 0] step:6141/10000 train_time:538642ms step_avg:87.71ms +[2025-08-22 19:51:38] [Rank 0] step:6141/10000 train_time:538642ms step_avg:87.71ms +[2025-08-22 19:51:40] [Rank 0] step:6161/10000 train_time:540475ms step_avg:87.73ms +[2025-08-22 19:51:40] [Rank 0] step:6161/10000 train_time:540475ms step_avg:87.73ms +[2025-08-22 19:51:42] [Rank 0] step:6181/10000 train_time:542303ms step_avg:87.74ms +[2025-08-22 19:51:42] [Rank 0] step:6181/10000 train_time:542303ms step_avg:87.74ms +[2025-08-22 19:51:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:51:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:51:57] [Rank 0] PRINT: step:6200/10000 val_loss:4.0518 svd_entropy: attn_qk:H=0.8614,top10E=0.10,eRank=322.9,q75/q25=91.95 attn_vo:H=0.5342,top10E=0.57,eRank=49.1,q75/q25=111.81 mlp_w1:H=0.7343,top10E=0.31,eRank=168.0,q75/q25=14.00 mlp_w2:H=0.8619,top10E=0.17,eRank=326.5,q75/q25=10.12 vo_prod:H=0.4401,top10E=0.73,eRank=25.1,q75/q25=8842.28 train_time:544227ms step_avg:87.78ms +[2025-08-22 19:51:57] [Rank 0] PRINT: step:6200/10000 val_loss:4.0518 svd_entropy: attn_qk:H=0.8614,top10E=0.10,eRank=322.9,q75/q25=91.95 attn_vo:H=0.5342,top10E=0.57,eRank=49.1,q75/q25=111.81 mlp_w1:H=0.7343,top10E=0.31,eRank=168.0,q75/q25=14.00 mlp_w2:H=0.8619,top10E=0.17,eRank=326.5,q75/q25=10.12 vo_prod:H=0.4401,top10E=0.73,eRank=25.1,q75/q25=8842.28 train_time:544227ms step_avg:87.78ms +[2025-08-22 19:51:57] [Rank 0] step:6201/10000 train_time:544241ms step_avg:87.77ms +[2025-08-22 19:51:57] [Rank 0] step:6201/10000 train_time:544241ms step_avg:87.77ms +[2025-08-22 19:51:59] [Rank 0] step:6221/10000 train_time:545983ms step_avg:87.76ms +[2025-08-22 19:51:59] [Rank 0] step:6221/10000 train_time:545983ms step_avg:87.76ms +[2025-08-22 19:52:01] [Rank 0] step:6241/10000 train_time:547809ms step_avg:87.78ms +[2025-08-22 19:52:01] [Rank 0] step:6241/10000 train_time:547809ms step_avg:87.78ms +[2025-08-22 19:52:03] [Rank 0] step:6261/10000 train_time:549640ms step_avg:87.79ms +[2025-08-22 19:52:03] [Rank 0] step:6261/10000 train_time:549640ms step_avg:87.79ms +[2025-08-22 19:52:04] [Rank 0] step:6281/10000 train_time:551475ms step_avg:87.80ms +[2025-08-22 19:52:04] [Rank 0] step:6281/10000 train_time:551475ms step_avg:87.80ms +[2025-08-22 19:52:06] [Rank 0] step:6301/10000 train_time:553310ms step_avg:87.81ms +[2025-08-22 19:52:06] [Rank 0] step:6301/10000 train_time:553310ms step_avg:87.81ms +[2025-08-22 19:52:08] [Rank 0] step:6321/10000 train_time:555141ms step_avg:87.82ms +[2025-08-22 19:52:08] [Rank 0] step:6321/10000 train_time:555141ms step_avg:87.82ms +[2025-08-22 19:52:10] [Rank 0] step:6341/10000 train_time:556977ms step_avg:87.84ms +[2025-08-22 19:52:10] [Rank 0] step:6341/10000 train_time:556977ms step_avg:87.84ms +[2025-08-22 19:52:12] [Rank 0] step:6361/10000 train_time:558816ms step_avg:87.85ms +[2025-08-22 19:52:12] [Rank 0] step:6361/10000 train_time:558816ms step_avg:87.85ms +[2025-08-22 19:52:14] [Rank 0] step:6381/10000 train_time:560653ms step_avg:87.86ms +[2025-08-22 19:52:14] [Rank 0] step:6381/10000 train_time:560653ms step_avg:87.86ms +[2025-08-22 19:52:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:52:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:52:29] [Rank 0] PRINT: step:6400/10000 val_loss:4.0314 svd_entropy: attn_qk:H=0.8621,top10E=0.10,eRank=324.3,q75/q25=91.00 attn_vo:H=0.5367,top10E=0.57,eRank=50.1,q75/q25=112.32 mlp_w1:H=0.7359,top10E=0.31,eRank=169.9,q75/q25=14.04 mlp_w2:H=0.8631,top10E=0.17,eRank=328.8,q75/q25=10.03 vo_prod:H=0.4425,top10E=0.73,eRank=25.5,q75/q25=9151.93 train_time:562575ms step_avg:87.90ms +[2025-08-22 19:52:29] [Rank 0] PRINT: step:6400/10000 val_loss:4.0314 svd_entropy: attn_qk:H=0.8621,top10E=0.10,eRank=324.3,q75/q25=91.00 attn_vo:H=0.5367,top10E=0.57,eRank=50.1,q75/q25=112.32 mlp_w1:H=0.7359,top10E=0.31,eRank=169.9,q75/q25=14.04 mlp_w2:H=0.8631,top10E=0.17,eRank=328.8,q75/q25=10.03 vo_prod:H=0.4425,top10E=0.73,eRank=25.5,q75/q25=9151.93 train_time:562575ms step_avg:87.90ms +[2025-08-22 19:52:29] [Rank 0] step:6401/10000 train_time:562590ms step_avg:87.89ms +[2025-08-22 19:52:29] [Rank 0] step:6401/10000 train_time:562590ms step_avg:87.89ms +[2025-08-22 19:52:31] [Rank 0] step:6421/10000 train_time:564335ms step_avg:87.89ms +[2025-08-22 19:52:31] [Rank 0] step:6421/10000 train_time:564335ms step_avg:87.89ms +[2025-08-22 19:52:33] [Rank 0] step:6441/10000 train_time:566163ms step_avg:87.90ms +[2025-08-22 19:52:33] [Rank 0] step:6441/10000 train_time:566163ms step_avg:87.90ms +[2025-08-22 19:52:34] [Rank 0] step:6461/10000 train_time:567992ms step_avg:87.91ms +[2025-08-22 19:52:34] [Rank 0] step:6461/10000 train_time:567992ms step_avg:87.91ms +[2025-08-22 19:52:36] [Rank 0] step:6481/10000 train_time:569828ms step_avg:87.92ms +[2025-08-22 19:52:36] [Rank 0] step:6481/10000 train_time:569828ms step_avg:87.92ms +[2025-08-22 19:52:38] [Rank 0] step:6501/10000 train_time:571652ms step_avg:87.93ms +[2025-08-22 19:52:38] [Rank 0] step:6501/10000 train_time:571652ms step_avg:87.93ms +[2025-08-22 19:52:40] [Rank 0] step:6521/10000 train_time:573476ms step_avg:87.94ms +[2025-08-22 19:52:40] [Rank 0] step:6521/10000 train_time:573476ms step_avg:87.94ms +[2025-08-22 19:52:42] [Rank 0] step:6541/10000 train_time:575309ms step_avg:87.95ms +[2025-08-22 19:52:42] [Rank 0] step:6541/10000 train_time:575309ms step_avg:87.95ms +[2025-08-22 19:52:44] [Rank 0] step:6561/10000 train_time:577140ms step_avg:87.97ms +[2025-08-22 19:52:44] [Rank 0] step:6561/10000 train_time:577140ms step_avg:87.97ms +[2025-08-22 19:52:45] [Rank 0] step:6581/10000 train_time:578966ms step_avg:87.98ms +[2025-08-22 19:52:45] [Rank 0] step:6581/10000 train_time:578966ms step_avg:87.98ms +[2025-08-22 19:52:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:52:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:53:01] [Rank 0] PRINT: step:6600/10000 val_loss:4.0184 svd_entropy: attn_qk:H=0.8627,top10E=0.10,eRank=325.4,q75/q25=91.36 attn_vo:H=0.5391,top10E=0.56,eRank=51.0,q75/q25=113.06 mlp_w1:H=0.7373,top10E=0.30,eRank=171.5,q75/q25=14.03 mlp_w2:H=0.8642,top10E=0.16,eRank=330.9,q75/q25=10.03 vo_prod:H=0.4448,top10E=0.72,eRank=26.0,q75/q25=9301.62 train_time:580888ms step_avg:88.01ms +[2025-08-22 19:53:01] [Rank 0] PRINT: step:6600/10000 val_loss:4.0184 svd_entropy: attn_qk:H=0.8627,top10E=0.10,eRank=325.4,q75/q25=91.36 attn_vo:H=0.5391,top10E=0.56,eRank=51.0,q75/q25=113.06 mlp_w1:H=0.7373,top10E=0.30,eRank=171.5,q75/q25=14.03 mlp_w2:H=0.8642,top10E=0.16,eRank=330.9,q75/q25=10.03 vo_prod:H=0.4448,top10E=0.72,eRank=26.0,q75/q25=9301.62 train_time:580888ms step_avg:88.01ms +[2025-08-22 19:53:01] [Rank 0] step:6601/10000 train_time:580903ms step_avg:88.00ms +[2025-08-22 19:53:01] [Rank 0] step:6601/10000 train_time:580903ms step_avg:88.00ms +[2025-08-22 19:53:03] [Rank 0] step:6621/10000 train_time:582656ms step_avg:88.00ms +[2025-08-22 19:53:03] [Rank 0] step:6621/10000 train_time:582656ms step_avg:88.00ms +[2025-08-22 19:53:05] [Rank 0] step:6641/10000 train_time:584487ms step_avg:88.01ms +[2025-08-22 19:53:05] [Rank 0] step:6641/10000 train_time:584487ms step_avg:88.01ms +[2025-08-22 19:53:06] [Rank 0] step:6661/10000 train_time:586325ms step_avg:88.02ms +[2025-08-22 19:53:06] [Rank 0] step:6661/10000 train_time:586325ms step_avg:88.02ms +[2025-08-22 19:53:08] [Rank 0] step:6681/10000 train_time:588169ms step_avg:88.04ms +[2025-08-22 19:53:08] [Rank 0] step:6681/10000 train_time:588169ms step_avg:88.04ms +[2025-08-22 19:53:10] [Rank 0] step:6701/10000 train_time:590033ms step_avg:88.05ms +[2025-08-22 19:53:10] [Rank 0] step:6701/10000 train_time:590033ms step_avg:88.05ms +[2025-08-22 19:53:12] [Rank 0] step:6721/10000 train_time:591893ms step_avg:88.07ms +[2025-08-22 19:53:12] [Rank 0] step:6721/10000 train_time:591893ms step_avg:88.07ms +[2025-08-22 19:53:14] [Rank 0] step:6741/10000 train_time:593750ms step_avg:88.08ms +[2025-08-22 19:53:14] [Rank 0] step:6741/10000 train_time:593750ms step_avg:88.08ms +[2025-08-22 19:53:16] [Rank 0] step:6761/10000 train_time:595602ms step_avg:88.09ms +[2025-08-22 19:53:16] [Rank 0] step:6761/10000 train_time:595602ms step_avg:88.09ms +[2025-08-22 19:53:18] [Rank 0] step:6781/10000 train_time:597461ms step_avg:88.11ms +[2025-08-22 19:53:18] [Rank 0] step:6781/10000 train_time:597461ms step_avg:88.11ms +[2025-08-22 19:53:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:53:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:53:33] [Rank 0] PRINT: step:6800/10000 val_loss:3.9997 svd_entropy: attn_qk:H=0.8633,top10E=0.10,eRank=326.5,q75/q25=90.82 attn_vo:H=0.5413,top10E=0.56,eRank=51.9,q75/q25=114.07 mlp_w1:H=0.7386,top10E=0.30,eRank=173.1,q75/q25=14.04 mlp_w2:H=0.8652,top10E=0.16,eRank=332.9,q75/q25=9.96 vo_prod:H=0.4469,top10E=0.72,eRank=26.3,q75/q25=9532.00 train_time:599414ms step_avg:88.15ms +[2025-08-22 19:53:33] [Rank 0] PRINT: step:6800/10000 val_loss:3.9997 svd_entropy: attn_qk:H=0.8633,top10E=0.10,eRank=326.5,q75/q25=90.82 attn_vo:H=0.5413,top10E=0.56,eRank=51.9,q75/q25=114.07 mlp_w1:H=0.7386,top10E=0.30,eRank=173.1,q75/q25=14.04 mlp_w2:H=0.8652,top10E=0.16,eRank=332.9,q75/q25=9.96 vo_prod:H=0.4469,top10E=0.72,eRank=26.3,q75/q25=9532.00 train_time:599414ms step_avg:88.15ms +[2025-08-22 19:53:33] [Rank 0] step:6801/10000 train_time:599430ms step_avg:88.14ms +[2025-08-22 19:53:33] [Rank 0] step:6801/10000 train_time:599430ms step_avg:88.14ms +[2025-08-22 19:53:35] [Rank 0] step:6821/10000 train_time:601204ms step_avg:88.14ms +[2025-08-22 19:53:35] [Rank 0] step:6821/10000 train_time:601204ms step_avg:88.14ms +[2025-08-22 19:53:37] [Rank 0] step:6841/10000 train_time:603054ms step_avg:88.15ms +[2025-08-22 19:53:37] [Rank 0] step:6841/10000 train_time:603054ms step_avg:88.15ms +[2025-08-22 19:53:39] [Rank 0] step:6861/10000 train_time:604905ms step_avg:88.17ms +[2025-08-22 19:53:39] [Rank 0] step:6861/10000 train_time:604905ms step_avg:88.17ms +[2025-08-22 19:53:40] [Rank 0] step:6881/10000 train_time:606759ms step_avg:88.18ms +[2025-08-22 19:53:40] [Rank 0] step:6881/10000 train_time:606759ms step_avg:88.18ms +[2025-08-22 19:53:42] [Rank 0] step:6901/10000 train_time:608612ms step_avg:88.19ms +[2025-08-22 19:53:42] [Rank 0] step:6901/10000 train_time:608612ms step_avg:88.19ms +[2025-08-22 19:53:44] [Rank 0] step:6921/10000 train_time:610466ms step_avg:88.20ms +[2025-08-22 19:53:44] [Rank 0] step:6921/10000 train_time:610466ms step_avg:88.20ms +[2025-08-22 19:53:46] [Rank 0] step:6941/10000 train_time:612326ms step_avg:88.22ms +[2025-08-22 19:53:46] [Rank 0] step:6941/10000 train_time:612326ms step_avg:88.22ms +[2025-08-22 19:53:48] [Rank 0] step:6961/10000 train_time:614200ms step_avg:88.23ms +[2025-08-22 19:53:48] [Rank 0] step:6961/10000 train_time:614200ms step_avg:88.23ms +[2025-08-22 19:53:50] [Rank 0] step:6981/10000 train_time:616064ms step_avg:88.25ms +[2025-08-22 19:53:50] [Rank 0] step:6981/10000 train_time:616064ms step_avg:88.25ms +[2025-08-22 19:53:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:53:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:54:05] [Rank 0] PRINT: step:7000/10000 val_loss:3.9828 svd_entropy: attn_qk:H=0.8638,top10E=0.10,eRank=327.4,q75/q25=91.24 attn_vo:H=0.5434,top10E=0.56,eRank=52.7,q75/q25=114.64 mlp_w1:H=0.7398,top10E=0.30,eRank=174.5,q75/q25=14.02 mlp_w2:H=0.8662,top10E=0.16,eRank=334.7,q75/q25=9.95 vo_prod:H=0.4488,top10E=0.72,eRank=26.7,q75/q25=9679.77 train_time:618021ms step_avg:88.29ms +[2025-08-22 19:54:05] [Rank 0] PRINT: step:7000/10000 val_loss:3.9828 svd_entropy: attn_qk:H=0.8638,top10E=0.10,eRank=327.4,q75/q25=91.24 attn_vo:H=0.5434,top10E=0.56,eRank=52.7,q75/q25=114.64 mlp_w1:H=0.7398,top10E=0.30,eRank=174.5,q75/q25=14.02 mlp_w2:H=0.8662,top10E=0.16,eRank=334.7,q75/q25=9.95 vo_prod:H=0.4488,top10E=0.72,eRank=26.7,q75/q25=9679.77 train_time:618021ms step_avg:88.29ms +[2025-08-22 19:54:05] [Rank 0] step:7001/10000 train_time:618037ms step_avg:88.28ms +[2025-08-22 19:54:05] [Rank 0] step:7001/10000 train_time:618037ms step_avg:88.28ms +[2025-08-22 19:54:07] [Rank 0] step:7021/10000 train_time:619817ms step_avg:88.28ms +[2025-08-22 19:54:07] [Rank 0] step:7021/10000 train_time:619817ms step_avg:88.28ms +[2025-08-22 19:54:09] [Rank 0] step:7041/10000 train_time:621674ms step_avg:88.29ms +[2025-08-22 19:54:09] [Rank 0] step:7041/10000 train_time:621674ms step_avg:88.29ms +[2025-08-22 19:54:11] [Rank 0] step:7061/10000 train_time:623532ms step_avg:88.31ms +[2025-08-22 19:54:11] [Rank 0] step:7061/10000 train_time:623532ms step_avg:88.31ms +[2025-08-22 19:54:13] [Rank 0] step:7081/10000 train_time:625389ms step_avg:88.32ms +[2025-08-22 19:54:13] [Rank 0] step:7081/10000 train_time:625389ms step_avg:88.32ms +[2025-08-22 19:54:15] [Rank 0] step:7101/10000 train_time:627257ms step_avg:88.33ms +[2025-08-22 19:54:15] [Rank 0] step:7101/10000 train_time:627257ms step_avg:88.33ms +[2025-08-22 19:54:17] [Rank 0] step:7121/10000 train_time:629116ms step_avg:88.35ms +[2025-08-22 19:54:17] [Rank 0] step:7121/10000 train_time:629116ms step_avg:88.35ms +[2025-08-22 19:54:19] [Rank 0] step:7141/10000 train_time:630980ms step_avg:88.36ms +[2025-08-22 19:54:19] [Rank 0] step:7141/10000 train_time:630980ms step_avg:88.36ms +[2025-08-22 19:54:20] [Rank 0] step:7161/10000 train_time:632844ms step_avg:88.37ms +[2025-08-22 19:54:20] [Rank 0] step:7161/10000 train_time:632844ms step_avg:88.37ms +[2025-08-22 19:54:22] [Rank 0] step:7181/10000 train_time:634707ms step_avg:88.39ms +[2025-08-22 19:54:22] [Rank 0] step:7181/10000 train_time:634707ms step_avg:88.39ms +[2025-08-22 19:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:54:38] [Rank 0] PRINT: step:7200/10000 val_loss:3.9715 svd_entropy: attn_qk:H=0.8643,top10E=0.10,eRank=328.3,q75/q25=90.82 attn_vo:H=0.5452,top10E=0.55,eRank=53.5,q75/q25=115.62 mlp_w1:H=0.7409,top10E=0.30,eRank=175.9,q75/q25=13.98 mlp_w2:H=0.8671,top10E=0.16,eRank=336.5,q75/q25=9.92 vo_prod:H=0.4502,top10E=0.71,eRank=27.0,q75/q25=9702.12 train_time:636666ms step_avg:88.43ms +[2025-08-22 19:54:38] [Rank 0] PRINT: step:7200/10000 val_loss:3.9715 svd_entropy: attn_qk:H=0.8643,top10E=0.10,eRank=328.3,q75/q25=90.82 attn_vo:H=0.5452,top10E=0.55,eRank=53.5,q75/q25=115.62 mlp_w1:H=0.7409,top10E=0.30,eRank=175.9,q75/q25=13.98 mlp_w2:H=0.8671,top10E=0.16,eRank=336.5,q75/q25=9.92 vo_prod:H=0.4502,top10E=0.71,eRank=27.0,q75/q25=9702.12 train_time:636666ms step_avg:88.43ms +[2025-08-22 19:54:38] [Rank 0] step:7201/10000 train_time:636681ms step_avg:88.42ms +[2025-08-22 19:54:38] [Rank 0] step:7201/10000 train_time:636681ms step_avg:88.42ms +[2025-08-22 19:54:40] [Rank 0] step:7221/10000 train_time:638469ms step_avg:88.42ms +[2025-08-22 19:54:40] [Rank 0] step:7221/10000 train_time:638469ms step_avg:88.42ms +[2025-08-22 19:54:42] [Rank 0] step:7241/10000 train_time:640322ms step_avg:88.43ms +[2025-08-22 19:54:42] [Rank 0] step:7241/10000 train_time:640322ms step_avg:88.43ms +[2025-08-22 19:54:44] [Rank 0] step:7261/10000 train_time:642175ms step_avg:88.44ms +[2025-08-22 19:54:44] [Rank 0] step:7261/10000 train_time:642175ms step_avg:88.44ms +[2025-08-22 19:54:45] [Rank 0] step:7281/10000 train_time:644040ms step_avg:88.45ms +[2025-08-22 19:54:45] [Rank 0] step:7281/10000 train_time:644040ms step_avg:88.45ms +[2025-08-22 19:54:47] [Rank 0] step:7301/10000 train_time:645897ms step_avg:88.47ms +[2025-08-22 19:54:47] [Rank 0] step:7301/10000 train_time:645897ms step_avg:88.47ms +[2025-08-22 19:54:49] [Rank 0] step:7321/10000 train_time:647767ms step_avg:88.48ms +[2025-08-22 19:54:49] [Rank 0] step:7321/10000 train_time:647767ms step_avg:88.48ms +[2025-08-22 19:54:51] [Rank 0] step:7341/10000 train_time:649625ms step_avg:88.49ms +[2025-08-22 19:54:51] [Rank 0] step:7341/10000 train_time:649625ms step_avg:88.49ms +[2025-08-22 19:54:53] [Rank 0] step:7361/10000 train_time:651491ms step_avg:88.51ms +[2025-08-22 19:54:53] [Rank 0] step:7361/10000 train_time:651491ms step_avg:88.51ms +[2025-08-22 19:54:55] [Rank 0] step:7381/10000 train_time:653356ms step_avg:88.52ms +[2025-08-22 19:54:55] [Rank 0] step:7381/10000 train_time:653356ms step_avg:88.52ms +[2025-08-22 19:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:55:10] [Rank 0] PRINT: step:7400/10000 val_loss:3.9431 svd_entropy: attn_qk:H=0.8647,top10E=0.10,eRank=329.0,q75/q25=90.91 attn_vo:H=0.5470,top10E=0.55,eRank=54.2,q75/q25=115.03 mlp_w1:H=0.7419,top10E=0.30,eRank=177.1,q75/q25=13.96 mlp_w2:H=0.8679,top10E=0.16,eRank=338.1,q75/q25=9.89 vo_prod:H=0.4522,top10E=0.71,eRank=27.4,q75/q25=9887.03 train_time:655293ms step_avg:88.55ms +[2025-08-22 19:55:10] [Rank 0] PRINT: step:7400/10000 val_loss:3.9431 svd_entropy: attn_qk:H=0.8647,top10E=0.10,eRank=329.0,q75/q25=90.91 attn_vo:H=0.5470,top10E=0.55,eRank=54.2,q75/q25=115.03 mlp_w1:H=0.7419,top10E=0.30,eRank=177.1,q75/q25=13.96 mlp_w2:H=0.8679,top10E=0.16,eRank=338.1,q75/q25=9.89 vo_prod:H=0.4522,top10E=0.71,eRank=27.4,q75/q25=9887.03 train_time:655293ms step_avg:88.55ms +[2025-08-22 19:55:10] [Rank 0] step:7401/10000 train_time:655308ms step_avg:88.54ms +[2025-08-22 19:55:10] [Rank 0] step:7401/10000 train_time:655308ms step_avg:88.54ms +[2025-08-22 19:55:12] [Rank 0] step:7421/10000 train_time:657090ms step_avg:88.54ms +[2025-08-22 19:55:12] [Rank 0] step:7421/10000 train_time:657090ms step_avg:88.54ms +[2025-08-22 19:55:14] [Rank 0] step:7441/10000 train_time:658945ms step_avg:88.56ms +[2025-08-22 19:55:14] [Rank 0] step:7441/10000 train_time:658945ms step_avg:88.56ms +[2025-08-22 19:55:16] [Rank 0] step:7461/10000 train_time:660802ms step_avg:88.57ms +[2025-08-22 19:55:16] [Rank 0] step:7461/10000 train_time:660802ms step_avg:88.57ms +[2025-08-22 19:55:18] [Rank 0] step:7481/10000 train_time:662667ms step_avg:88.58ms +[2025-08-22 19:55:18] [Rank 0] step:7481/10000 train_time:662667ms step_avg:88.58ms +[2025-08-22 19:55:20] [Rank 0] step:7501/10000 train_time:664531ms step_avg:88.59ms +[2025-08-22 19:55:20] [Rank 0] step:7501/10000 train_time:664531ms step_avg:88.59ms +[2025-08-22 19:55:22] [Rank 0] step:7521/10000 train_time:666394ms step_avg:88.60ms +[2025-08-22 19:55:22] [Rank 0] step:7521/10000 train_time:666394ms step_avg:88.60ms +[2025-08-22 19:55:23] [Rank 0] step:7541/10000 train_time:668273ms step_avg:88.62ms +[2025-08-22 19:55:23] [Rank 0] step:7541/10000 train_time:668273ms step_avg:88.62ms +[2025-08-22 19:55:25] [Rank 0] step:7561/10000 train_time:670127ms step_avg:88.63ms +[2025-08-22 19:55:25] [Rank 0] step:7561/10000 train_time:670127ms step_avg:88.63ms +[2025-08-22 19:55:27] [Rank 0] step:7581/10000 train_time:672001ms step_avg:88.64ms +[2025-08-22 19:55:27] [Rank 0] step:7581/10000 train_time:672001ms step_avg:88.64ms +[2025-08-22 19:55:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:55:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:55:43] [Rank 0] PRINT: step:7600/10000 val_loss:3.9436 svd_entropy: attn_qk:H=0.8650,top10E=0.10,eRank=329.7,q75/q25=90.52 attn_vo:H=0.5485,top10E=0.55,eRank=54.8,q75/q25=113.85 mlp_w1:H=0.7428,top10E=0.30,eRank=178.2,q75/q25=13.94 mlp_w2:H=0.8686,top10E=0.16,eRank=339.5,q75/q25=9.82 vo_prod:H=0.4534,top10E=0.71,eRank=27.7,q75/q25=9751.99 train_time:673964ms step_avg:88.68ms +[2025-08-22 19:55:43] [Rank 0] PRINT: step:7600/10000 val_loss:3.9436 svd_entropy: attn_qk:H=0.8650,top10E=0.10,eRank=329.7,q75/q25=90.52 attn_vo:H=0.5485,top10E=0.55,eRank=54.8,q75/q25=113.85 mlp_w1:H=0.7428,top10E=0.30,eRank=178.2,q75/q25=13.94 mlp_w2:H=0.8686,top10E=0.16,eRank=339.5,q75/q25=9.82 vo_prod:H=0.4534,top10E=0.71,eRank=27.7,q75/q25=9751.99 train_time:673964ms step_avg:88.68ms +[2025-08-22 19:55:43] [Rank 0] step:7601/10000 train_time:673979ms step_avg:88.67ms +[2025-08-22 19:55:43] [Rank 0] step:7601/10000 train_time:673979ms step_avg:88.67ms +[2025-08-22 19:55:45] [Rank 0] step:7621/10000 train_time:675761ms step_avg:88.67ms +[2025-08-22 19:55:45] [Rank 0] step:7621/10000 train_time:675761ms step_avg:88.67ms +[2025-08-22 19:55:47] [Rank 0] step:7641/10000 train_time:677617ms step_avg:88.68ms +[2025-08-22 19:55:47] [Rank 0] step:7641/10000 train_time:677617ms step_avg:88.68ms +[2025-08-22 19:55:49] [Rank 0] step:7661/10000 train_time:679478ms step_avg:88.69ms +[2025-08-22 19:55:49] [Rank 0] step:7661/10000 train_time:679478ms step_avg:88.69ms +[2025-08-22 19:55:50] [Rank 0] step:7681/10000 train_time:681333ms step_avg:88.70ms +[2025-08-22 19:55:50] [Rank 0] step:7681/10000 train_time:681333ms step_avg:88.70ms +[2025-08-22 19:55:52] [Rank 0] step:7701/10000 train_time:683193ms step_avg:88.71ms +[2025-08-22 19:55:52] [Rank 0] step:7701/10000 train_time:683193ms step_avg:88.71ms +[2025-08-22 19:55:54] [Rank 0] step:7721/10000 train_time:685067ms step_avg:88.73ms +[2025-08-22 19:55:54] [Rank 0] step:7721/10000 train_time:685067ms step_avg:88.73ms +[2025-08-22 19:55:56] [Rank 0] step:7741/10000 train_time:686930ms step_avg:88.74ms +[2025-08-22 19:55:56] [Rank 0] step:7741/10000 train_time:686930ms step_avg:88.74ms +[2025-08-22 19:55:58] [Rank 0] step:7761/10000 train_time:688803ms step_avg:88.75ms +[2025-08-22 19:55:58] [Rank 0] step:7761/10000 train_time:688803ms step_avg:88.75ms +[2025-08-22 19:56:00] [Rank 0] step:7781/10000 train_time:690670ms step_avg:88.76ms +[2025-08-22 19:56:00] [Rank 0] step:7781/10000 train_time:690670ms step_avg:88.76ms +[2025-08-22 19:56:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:56:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:56:15] [Rank 0] PRINT: step:7800/10000 val_loss:3.9227 svd_entropy: attn_qk:H=0.8654,top10E=0.10,eRank=330.3,q75/q25=90.38 attn_vo:H=0.5499,top10E=0.54,eRank=55.4,q75/q25=114.51 mlp_w1:H=0.7437,top10E=0.30,eRank=179.2,q75/q25=13.92 mlp_w2:H=0.8693,top10E=0.16,eRank=340.9,q75/q25=9.81 vo_prod:H=0.4547,top10E=0.71,eRank=27.9,q75/q25=10016.86 train_time:692639ms step_avg:88.80ms +[2025-08-22 19:56:15] [Rank 0] PRINT: step:7800/10000 val_loss:3.9227 svd_entropy: attn_qk:H=0.8654,top10E=0.10,eRank=330.3,q75/q25=90.38 attn_vo:H=0.5499,top10E=0.54,eRank=55.4,q75/q25=114.51 mlp_w1:H=0.7437,top10E=0.30,eRank=179.2,q75/q25=13.92 mlp_w2:H=0.8693,top10E=0.16,eRank=340.9,q75/q25=9.81 vo_prod:H=0.4547,top10E=0.71,eRank=27.9,q75/q25=10016.86 train_time:692639ms step_avg:88.80ms +[2025-08-22 19:56:15] [Rank 0] step:7801/10000 train_time:692653ms step_avg:88.79ms +[2025-08-22 19:56:15] [Rank 0] step:7801/10000 train_time:692653ms step_avg:88.79ms +[2025-08-22 19:56:17] [Rank 0] step:7821/10000 train_time:694426ms step_avg:88.79ms +[2025-08-22 19:56:17] [Rank 0] step:7821/10000 train_time:694426ms step_avg:88.79ms +[2025-08-22 19:56:19] [Rank 0] step:7841/10000 train_time:696284ms step_avg:88.80ms +[2025-08-22 19:56:19] [Rank 0] step:7841/10000 train_time:696284ms step_avg:88.80ms +[2025-08-22 19:56:21] [Rank 0] step:7861/10000 train_time:698150ms step_avg:88.81ms +[2025-08-22 19:56:21] [Rank 0] step:7861/10000 train_time:698150ms step_avg:88.81ms +[2025-08-22 19:56:23] [Rank 0] step:7881/10000 train_time:700021ms step_avg:88.82ms +[2025-08-22 19:56:23] [Rank 0] step:7881/10000 train_time:700021ms step_avg:88.82ms +[2025-08-22 19:56:25] [Rank 0] step:7901/10000 train_time:701885ms step_avg:88.83ms +[2025-08-22 19:56:25] [Rank 0] step:7901/10000 train_time:701885ms step_avg:88.83ms +[2025-08-22 19:56:26] [Rank 0] step:7921/10000 train_time:703755ms step_avg:88.85ms +[2025-08-22 19:56:26] [Rank 0] step:7921/10000 train_time:703755ms step_avg:88.85ms +[2025-08-22 19:56:28] [Rank 0] step:7941/10000 train_time:705630ms step_avg:88.86ms +[2025-08-22 19:56:28] [Rank 0] step:7941/10000 train_time:705630ms step_avg:88.86ms +[2025-08-22 19:56:30] [Rank 0] step:7961/10000 train_time:707500ms step_avg:88.87ms +[2025-08-22 19:56:30] [Rank 0] step:7961/10000 train_time:707500ms step_avg:88.87ms +[2025-08-22 19:56:32] [Rank 0] step:7981/10000 train_time:709361ms step_avg:88.88ms +[2025-08-22 19:56:32] [Rank 0] step:7981/10000 train_time:709361ms step_avg:88.88ms +[2025-08-22 19:56:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:56:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:56:48] [Rank 0] PRINT: step:8000/10000 val_loss:3.9025 svd_entropy: attn_qk:H=0.8657,top10E=0.10,eRank=330.9,q75/q25=89.72 attn_vo:H=0.5513,top10E=0.54,eRank=56.0,q75/q25=114.94 mlp_w1:H=0.7444,top10E=0.30,eRank=180.1,q75/q25=13.90 mlp_w2:H=0.8699,top10E=0.16,eRank=342.1,q75/q25=9.75 vo_prod:H=0.4562,top10E=0.70,eRank=28.2,q75/q25=10003.11 train_time:711327ms step_avg:88.92ms +[2025-08-22 19:56:48] [Rank 0] PRINT: step:8000/10000 val_loss:3.9025 svd_entropy: attn_qk:H=0.8657,top10E=0.10,eRank=330.9,q75/q25=89.72 attn_vo:H=0.5513,top10E=0.54,eRank=56.0,q75/q25=114.94 mlp_w1:H=0.7444,top10E=0.30,eRank=180.1,q75/q25=13.90 mlp_w2:H=0.8699,top10E=0.16,eRank=342.1,q75/q25=9.75 vo_prod:H=0.4562,top10E=0.70,eRank=28.2,q75/q25=10003.11 train_time:711327ms step_avg:88.92ms +[2025-08-22 19:56:48] [Rank 0] step:8001/10000 train_time:711342ms step_avg:88.91ms +[2025-08-22 19:56:48] [Rank 0] step:8001/10000 train_time:711342ms step_avg:88.91ms +[2025-08-22 19:56:50] [Rank 0] step:8021/10000 train_time:713109ms step_avg:88.91ms +[2025-08-22 19:56:50] [Rank 0] step:8021/10000 train_time:713109ms step_avg:88.91ms +[2025-08-22 19:56:52] [Rank 0] step:8041/10000 train_time:714975ms step_avg:88.92ms +[2025-08-22 19:56:52] [Rank 0] step:8041/10000 train_time:714975ms step_avg:88.92ms +[2025-08-22 19:56:53] [Rank 0] step:8061/10000 train_time:716838ms step_avg:88.93ms +[2025-08-22 19:56:53] [Rank 0] step:8061/10000 train_time:716838ms step_avg:88.93ms +[2025-08-22 19:56:55] [Rank 0] step:8081/10000 train_time:718691ms step_avg:88.94ms +[2025-08-22 19:56:55] [Rank 0] step:8081/10000 train_time:718691ms step_avg:88.94ms +[2025-08-22 19:56:57] [Rank 0] step:8101/10000 train_time:720562ms step_avg:88.95ms +[2025-08-22 19:56:57] [Rank 0] step:8101/10000 train_time:720562ms step_avg:88.95ms +[2025-08-22 19:56:59] [Rank 0] step:8121/10000 train_time:722423ms step_avg:88.96ms +[2025-08-22 19:56:59] [Rank 0] step:8121/10000 train_time:722423ms step_avg:88.96ms +[2025-08-22 19:57:01] [Rank 0] step:8141/10000 train_time:724857ms step_avg:89.04ms +[2025-08-22 19:57:01] [Rank 0] step:8141/10000 train_time:724857ms step_avg:89.04ms +[2025-08-22 19:57:03] [Rank 0] step:8161/10000 train_time:726733ms step_avg:89.05ms +[2025-08-22 19:57:03] [Rank 0] step:8161/10000 train_time:726733ms step_avg:89.05ms +[2025-08-22 19:57:05] [Rank 0] step:8181/10000 train_time:728622ms step_avg:89.06ms +[2025-08-22 19:57:05] [Rank 0] step:8181/10000 train_time:728622ms step_avg:89.06ms +[2025-08-22 19:57:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:57:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:57:21] [Rank 0] PRINT: step:8200/10000 val_loss:3.8925 svd_entropy: attn_qk:H=0.8659,top10E=0.10,eRank=331.4,q75/q25=89.74 attn_vo:H=0.5525,top10E=0.54,eRank=56.5,q75/q25=115.43 mlp_w1:H=0.7451,top10E=0.29,eRank=181.0,q75/q25=13.90 mlp_w2:H=0.8704,top10E=0.16,eRank=343.3,q75/q25=9.75 vo_prod:H=0.4570,top10E=0.70,eRank=28.4,q75/q25=10155.01 train_time:730632ms step_avg:89.10ms +[2025-08-22 19:57:21] [Rank 0] PRINT: step:8200/10000 val_loss:3.8925 svd_entropy: attn_qk:H=0.8659,top10E=0.10,eRank=331.4,q75/q25=89.74 attn_vo:H=0.5525,top10E=0.54,eRank=56.5,q75/q25=115.43 mlp_w1:H=0.7451,top10E=0.29,eRank=181.0,q75/q25=13.90 mlp_w2:H=0.8704,top10E=0.16,eRank=343.3,q75/q25=9.75 vo_prod:H=0.4570,top10E=0.70,eRank=28.4,q75/q25=10155.01 train_time:730632ms step_avg:89.10ms +[2025-08-22 19:57:21] [Rank 0] step:8201/10000 train_time:730649ms step_avg:89.09ms +[2025-08-22 19:57:21] [Rank 0] step:8201/10000 train_time:730649ms step_avg:89.09ms +[2025-08-22 19:57:23] [Rank 0] step:8221/10000 train_time:732468ms step_avg:89.10ms +[2025-08-22 19:57:23] [Rank 0] step:8221/10000 train_time:732468ms step_avg:89.10ms +[2025-08-22 19:57:25] [Rank 0] step:8241/10000 train_time:734363ms step_avg:89.11ms +[2025-08-22 19:57:25] [Rank 0] step:8241/10000 train_time:734363ms step_avg:89.11ms +[2025-08-22 19:57:27] [Rank 0] step:8261/10000 train_time:736257ms step_avg:89.12ms +[2025-08-22 19:57:27] [Rank 0] step:8261/10000 train_time:736257ms step_avg:89.12ms +[2025-08-22 19:57:29] [Rank 0] step:8281/10000 train_time:738146ms step_avg:89.14ms +[2025-08-22 19:57:29] [Rank 0] step:8281/10000 train_time:738146ms step_avg:89.14ms +[2025-08-22 19:57:30] [Rank 0] step:8301/10000 train_time:740036ms step_avg:89.15ms +[2025-08-22 19:57:30] [Rank 0] step:8301/10000 train_time:740036ms step_avg:89.15ms +[2025-08-22 19:57:32] [Rank 0] step:8321/10000 train_time:741921ms step_avg:89.16ms +[2025-08-22 19:57:32] [Rank 0] step:8321/10000 train_time:741921ms step_avg:89.16ms +[2025-08-22 19:57:34] [Rank 0] step:8341/10000 train_time:743816ms step_avg:89.18ms +[2025-08-22 19:57:34] [Rank 0] step:8341/10000 train_time:743816ms step_avg:89.18ms +[2025-08-22 19:57:36] [Rank 0] step:8361/10000 train_time:745709ms step_avg:89.19ms +[2025-08-22 19:57:36] [Rank 0] step:8361/10000 train_time:745709ms step_avg:89.19ms +[2025-08-22 19:57:38] [Rank 0] step:8381/10000 train_time:747597ms step_avg:89.20ms +[2025-08-22 19:57:38] [Rank 0] step:8381/10000 train_time:747597ms step_avg:89.20ms +[2025-08-22 19:57:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:57:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:57:54] [Rank 0] PRINT: step:8400/10000 val_loss:3.8772 svd_entropy: attn_qk:H=0.8662,top10E=0.10,eRank=331.8,q75/q25=90.17 attn_vo:H=0.5535,top10E=0.54,eRank=56.9,q75/q25=115.80 mlp_w1:H=0.7457,top10E=0.29,eRank=181.8,q75/q25=13.89 mlp_w2:H=0.8709,top10E=0.16,eRank=344.3,q75/q25=9.75 vo_prod:H=0.4579,top10E=0.70,eRank=28.6,q75/q25=10426.01 train_time:749577ms step_avg:89.24ms +[2025-08-22 19:57:54] [Rank 0] PRINT: step:8400/10000 val_loss:3.8772 svd_entropy: attn_qk:H=0.8662,top10E=0.10,eRank=331.8,q75/q25=90.17 attn_vo:H=0.5535,top10E=0.54,eRank=56.9,q75/q25=115.80 mlp_w1:H=0.7457,top10E=0.29,eRank=181.8,q75/q25=13.89 mlp_w2:H=0.8709,top10E=0.16,eRank=344.3,q75/q25=9.75 vo_prod:H=0.4579,top10E=0.70,eRank=28.6,q75/q25=10426.01 train_time:749577ms step_avg:89.24ms +[2025-08-22 19:57:54] [Rank 0] step:8401/10000 train_time:749592ms step_avg:89.23ms +[2025-08-22 19:57:54] [Rank 0] step:8401/10000 train_time:749592ms step_avg:89.23ms +[2025-08-22 19:57:56] [Rank 0] step:8421/10000 train_time:751380ms step_avg:89.23ms +[2025-08-22 19:57:56] [Rank 0] step:8421/10000 train_time:751380ms step_avg:89.23ms +[2025-08-22 19:57:58] [Rank 0] step:8441/10000 train_time:753263ms step_avg:89.24ms +[2025-08-22 19:57:58] [Rank 0] step:8441/10000 train_time:753263ms step_avg:89.24ms +[2025-08-22 19:57:59] [Rank 0] step:8461/10000 train_time:755147ms step_avg:89.25ms +[2025-08-22 19:57:59] [Rank 0] step:8461/10000 train_time:755147ms step_avg:89.25ms +[2025-08-22 19:58:01] [Rank 0] step:8481/10000 train_time:757040ms step_avg:89.26ms +[2025-08-22 19:58:01] [Rank 0] step:8481/10000 train_time:757040ms step_avg:89.26ms +[2025-08-22 19:58:03] [Rank 0] step:8501/10000 train_time:758950ms step_avg:89.28ms +[2025-08-22 19:58:03] [Rank 0] step:8501/10000 train_time:758950ms step_avg:89.28ms +[2025-08-22 19:58:05] [Rank 0] step:8521/10000 train_time:760847ms step_avg:89.29ms +[2025-08-22 19:58:05] [Rank 0] step:8521/10000 train_time:760847ms step_avg:89.29ms +[2025-08-22 19:58:07] [Rank 0] step:8541/10000 train_time:762751ms step_avg:89.30ms +[2025-08-22 19:58:07] [Rank 0] step:8541/10000 train_time:762751ms step_avg:89.30ms +[2025-08-22 19:58:09] [Rank 0] step:8561/10000 train_time:764646ms step_avg:89.32ms +[2025-08-22 19:58:09] [Rank 0] step:8561/10000 train_time:764646ms step_avg:89.32ms +[2025-08-22 19:58:11] [Rank 0] step:8581/10000 train_time:766545ms step_avg:89.33ms +[2025-08-22 19:58:11] [Rank 0] step:8581/10000 train_time:766545ms step_avg:89.33ms +[2025-08-22 19:58:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:58:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:58:26] [Rank 0] PRINT: step:8600/10000 val_loss:3.8673 svd_entropy: attn_qk:H=0.8663,top10E=0.10,eRank=332.2,q75/q25=90.23 attn_vo:H=0.5544,top10E=0.54,eRank=57.3,q75/q25=115.27 mlp_w1:H=0.7462,top10E=0.29,eRank=182.4,q75/q25=13.85 mlp_w2:H=0.8714,top10E=0.16,eRank=345.2,q75/q25=9.70 vo_prod:H=0.4587,top10E=0.70,eRank=28.8,q75/q25=10200.62 train_time:768524ms step_avg:89.36ms +[2025-08-22 19:58:26] [Rank 0] PRINT: step:8600/10000 val_loss:3.8673 svd_entropy: attn_qk:H=0.8663,top10E=0.10,eRank=332.2,q75/q25=90.23 attn_vo:H=0.5544,top10E=0.54,eRank=57.3,q75/q25=115.27 mlp_w1:H=0.7462,top10E=0.29,eRank=182.4,q75/q25=13.85 mlp_w2:H=0.8714,top10E=0.16,eRank=345.2,q75/q25=9.70 vo_prod:H=0.4587,top10E=0.70,eRank=28.8,q75/q25=10200.62 train_time:768524ms step_avg:89.36ms +[2025-08-22 19:58:27] [Rank 0] step:8601/10000 train_time:768540ms step_avg:89.35ms +[2025-08-22 19:58:27] [Rank 0] step:8601/10000 train_time:768540ms step_avg:89.35ms +[2025-08-22 19:58:29] [Rank 0] step:8621/10000 train_time:770347ms step_avg:89.36ms +[2025-08-22 19:58:29] [Rank 0] step:8621/10000 train_time:770347ms step_avg:89.36ms +[2025-08-22 19:58:30] [Rank 0] step:8641/10000 train_time:772234ms step_avg:89.37ms +[2025-08-22 19:58:30] [Rank 0] step:8641/10000 train_time:772234ms step_avg:89.37ms +[2025-08-22 19:58:32] [Rank 0] step:8661/10000 train_time:774129ms step_avg:89.38ms +[2025-08-22 19:58:32] [Rank 0] step:8661/10000 train_time:774129ms step_avg:89.38ms +[2025-08-22 19:58:34] [Rank 0] step:8681/10000 train_time:776021ms step_avg:89.39ms +[2025-08-22 19:58:34] [Rank 0] step:8681/10000 train_time:776021ms step_avg:89.39ms +[2025-08-22 19:58:36] [Rank 0] step:8701/10000 train_time:777910ms step_avg:89.40ms +[2025-08-22 19:58:36] [Rank 0] step:8701/10000 train_time:777910ms step_avg:89.40ms +[2025-08-22 19:58:38] [Rank 0] step:8721/10000 train_time:779808ms step_avg:89.42ms +[2025-08-22 19:58:38] [Rank 0] step:8721/10000 train_time:779808ms step_avg:89.42ms +[2025-08-22 19:58:40] [Rank 0] step:8741/10000 train_time:781692ms step_avg:89.43ms +[2025-08-22 19:58:40] [Rank 0] step:8741/10000 train_time:781692ms step_avg:89.43ms +[2025-08-22 19:58:42] [Rank 0] step:8761/10000 train_time:783586ms step_avg:89.44ms +[2025-08-22 19:58:42] [Rank 0] step:8761/10000 train_time:783586ms step_avg:89.44ms +[2025-08-22 19:58:44] [Rank 0] step:8781/10000 train_time:785482ms step_avg:89.45ms +[2025-08-22 19:58:44] [Rank 0] step:8781/10000 train_time:785482ms step_avg:89.45ms +[2025-08-22 19:58:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:58:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:58:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.8526 svd_entropy: attn_qk:H=0.8665,top10E=0.10,eRank=332.5,q75/q25=88.97 attn_vo:H=0.5553,top10E=0.53,eRank=57.7,q75/q25=115.88 mlp_w1:H=0.7467,top10E=0.29,eRank=183.1,q75/q25=13.84 mlp_w2:H=0.8718,top10E=0.16,eRank=346.1,q75/q25=9.67 vo_prod:H=0.4596,top10E=0.70,eRank=29.0,q75/q25=10313.01 train_time:787473ms step_avg:89.49ms +[2025-08-22 19:58:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.8526 svd_entropy: attn_qk:H=0.8665,top10E=0.10,eRank=332.5,q75/q25=88.97 attn_vo:H=0.5553,top10E=0.53,eRank=57.7,q75/q25=115.88 mlp_w1:H=0.7467,top10E=0.29,eRank=183.1,q75/q25=13.84 mlp_w2:H=0.8718,top10E=0.16,eRank=346.1,q75/q25=9.67 vo_prod:H=0.4596,top10E=0.70,eRank=29.0,q75/q25=10313.01 train_time:787473ms step_avg:89.49ms +[2025-08-22 19:58:59] [Rank 0] step:8801/10000 train_time:787489ms step_avg:89.48ms +[2025-08-22 19:58:59] [Rank 0] step:8801/10000 train_time:787489ms step_avg:89.48ms +[2025-08-22 19:59:01] [Rank 0] step:8821/10000 train_time:789300ms step_avg:89.48ms +[2025-08-22 19:59:01] [Rank 0] step:8821/10000 train_time:789300ms step_avg:89.48ms +[2025-08-22 19:59:03] [Rank 0] step:8841/10000 train_time:791205ms step_avg:89.49ms +[2025-08-22 19:59:03] [Rank 0] step:8841/10000 train_time:791205ms step_avg:89.49ms +[2025-08-22 19:59:05] [Rank 0] step:8861/10000 train_time:793087ms step_avg:89.50ms +[2025-08-22 19:59:05] [Rank 0] step:8861/10000 train_time:793087ms step_avg:89.50ms +[2025-08-22 19:59:07] [Rank 0] step:8881/10000 train_time:794976ms step_avg:89.51ms +[2025-08-22 19:59:07] [Rank 0] step:8881/10000 train_time:794976ms step_avg:89.51ms +[2025-08-22 19:59:09] [Rank 0] step:8901/10000 train_time:796866ms step_avg:89.53ms +[2025-08-22 19:59:09] [Rank 0] step:8901/10000 train_time:796866ms step_avg:89.53ms +[2025-08-22 19:59:11] [Rank 0] step:8921/10000 train_time:798768ms step_avg:89.54ms +[2025-08-22 19:59:11] [Rank 0] step:8921/10000 train_time:798768ms step_avg:89.54ms +[2025-08-22 19:59:13] [Rank 0] step:8941/10000 train_time:800669ms step_avg:89.55ms +[2025-08-22 19:59:13] [Rank 0] step:8941/10000 train_time:800669ms step_avg:89.55ms +[2025-08-22 19:59:15] [Rank 0] step:8961/10000 train_time:802558ms step_avg:89.56ms +[2025-08-22 19:59:15] [Rank 0] step:8961/10000 train_time:802558ms step_avg:89.56ms +[2025-08-22 19:59:16] [Rank 0] step:8981/10000 train_time:804446ms step_avg:89.57ms +[2025-08-22 19:59:16] [Rank 0] step:8981/10000 train_time:804446ms step_avg:89.57ms +[2025-08-22 19:59:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:59:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:59:32] [Rank 0] PRINT: step:9000/10000 val_loss:3.8419 svd_entropy: attn_qk:H=0.8667,top10E=0.10,eRank=332.8,q75/q25=88.42 attn_vo:H=0.5561,top10E=0.53,eRank=58.0,q75/q25=116.05 mlp_w1:H=0.7471,top10E=0.29,eRank=183.6,q75/q25=13.83 mlp_w2:H=0.8722,top10E=0.15,eRank=346.9,q75/q25=9.66 vo_prod:H=0.4603,top10E=0.70,eRank=29.1,q75/q25=10384.49 train_time:806429ms step_avg:89.60ms +[2025-08-22 19:59:32] [Rank 0] PRINT: step:9000/10000 val_loss:3.8419 svd_entropy: attn_qk:H=0.8667,top10E=0.10,eRank=332.8,q75/q25=88.42 attn_vo:H=0.5561,top10E=0.53,eRank=58.0,q75/q25=116.05 mlp_w1:H=0.7471,top10E=0.29,eRank=183.6,q75/q25=13.83 mlp_w2:H=0.8722,top10E=0.15,eRank=346.9,q75/q25=9.66 vo_prod:H=0.4603,top10E=0.70,eRank=29.1,q75/q25=10384.49 train_time:806429ms step_avg:89.60ms +[2025-08-22 19:59:32] [Rank 0] step:9001/10000 train_time:806445ms step_avg:89.60ms +[2025-08-22 19:59:32] [Rank 0] step:9001/10000 train_time:806445ms step_avg:89.60ms +[2025-08-22 19:59:34] [Rank 0] step:9021/10000 train_time:808239ms step_avg:89.60ms +[2025-08-22 19:59:34] [Rank 0] step:9021/10000 train_time:808239ms step_avg:89.60ms +[2025-08-22 19:59:36] [Rank 0] step:9041/10000 train_time:810129ms step_avg:89.61ms +[2025-08-22 19:59:36] [Rank 0] step:9041/10000 train_time:810129ms step_avg:89.61ms +[2025-08-22 19:59:38] [Rank 0] step:9061/10000 train_time:812028ms step_avg:89.62ms +[2025-08-22 19:59:38] [Rank 0] step:9061/10000 train_time:812028ms step_avg:89.62ms +[2025-08-22 19:59:40] [Rank 0] step:9081/10000 train_time:813926ms step_avg:89.63ms +[2025-08-22 19:59:40] [Rank 0] step:9081/10000 train_time:813926ms step_avg:89.63ms +[2025-08-22 19:59:42] [Rank 0] step:9101/10000 train_time:815840ms step_avg:89.64ms +[2025-08-22 19:59:42] [Rank 0] step:9101/10000 train_time:815840ms step_avg:89.64ms +[2025-08-22 19:59:44] [Rank 0] step:9121/10000 train_time:817733ms step_avg:89.65ms +[2025-08-22 19:59:44] [Rank 0] step:9121/10000 train_time:817733ms step_avg:89.65ms +[2025-08-22 19:59:45] [Rank 0] step:9141/10000 train_time:819612ms step_avg:89.66ms +[2025-08-22 19:59:45] [Rank 0] step:9141/10000 train_time:819612ms step_avg:89.66ms +[2025-08-22 19:59:47] [Rank 0] step:9161/10000 train_time:821497ms step_avg:89.67ms +[2025-08-22 19:59:47] [Rank 0] step:9161/10000 train_time:821497ms step_avg:89.67ms +[2025-08-22 19:59:49] [Rank 0] step:9181/10000 train_time:823421ms step_avg:89.69ms +[2025-08-22 19:59:49] [Rank 0] step:9181/10000 train_time:823421ms step_avg:89.69ms +[2025-08-22 19:59:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 19:59:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:00:05] [Rank 0] PRINT: step:9200/10000 val_loss:3.8313 svd_entropy: attn_qk:H=0.8668,top10E=0.10,eRank=333.0,q75/q25=87.97 attn_vo:H=0.5567,top10E=0.53,eRank=58.3,q75/q25=115.95 mlp_w1:H=0.7475,top10E=0.29,eRank=184.1,q75/q25=13.83 mlp_w2:H=0.8726,top10E=0.15,eRank=347.6,q75/q25=9.63 vo_prod:H=0.4609,top10E=0.70,eRank=29.2,q75/q25=10492.66 train_time:825403ms step_avg:89.72ms +[2025-08-22 20:00:05] [Rank 0] PRINT: step:9200/10000 val_loss:3.8313 svd_entropy: attn_qk:H=0.8668,top10E=0.10,eRank=333.0,q75/q25=87.97 attn_vo:H=0.5567,top10E=0.53,eRank=58.3,q75/q25=115.95 mlp_w1:H=0.7475,top10E=0.29,eRank=184.1,q75/q25=13.83 mlp_w2:H=0.8726,top10E=0.15,eRank=347.6,q75/q25=9.63 vo_prod:H=0.4609,top10E=0.70,eRank=29.2,q75/q25=10492.66 train_time:825403ms step_avg:89.72ms +[2025-08-22 20:00:05] [Rank 0] step:9201/10000 train_time:825417ms step_avg:89.71ms +[2025-08-22 20:00:05] [Rank 0] step:9201/10000 train_time:825417ms step_avg:89.71ms +[2025-08-22 20:00:07] [Rank 0] step:9221/10000 train_time:827231ms step_avg:89.71ms +[2025-08-22 20:00:07] [Rank 0] step:9221/10000 train_time:827231ms step_avg:89.71ms +[2025-08-22 20:00:09] [Rank 0] step:9241/10000 train_time:829130ms step_avg:89.72ms +[2025-08-22 20:00:09] [Rank 0] step:9241/10000 train_time:829130ms step_avg:89.72ms +[2025-08-22 20:00:11] [Rank 0] step:9261/10000 train_time:831032ms step_avg:89.73ms +[2025-08-22 20:00:11] [Rank 0] step:9261/10000 train_time:831032ms step_avg:89.73ms +[2025-08-22 20:00:13] [Rank 0] step:9281/10000 train_time:832915ms step_avg:89.74ms +[2025-08-22 20:00:13] [Rank 0] step:9281/10000 train_time:832915ms step_avg:89.74ms +[2025-08-22 20:00:15] [Rank 0] step:9301/10000 train_time:834802ms step_avg:89.75ms +[2025-08-22 20:00:15] [Rank 0] step:9301/10000 train_time:834802ms step_avg:89.75ms +[2025-08-22 20:00:16] [Rank 0] step:9321/10000 train_time:836701ms step_avg:89.77ms +[2025-08-22 20:00:16] [Rank 0] step:9321/10000 train_time:836701ms step_avg:89.77ms +[2025-08-22 20:00:18] [Rank 0] step:9341/10000 train_time:838593ms step_avg:89.78ms +[2025-08-22 20:00:18] [Rank 0] step:9341/10000 train_time:838593ms step_avg:89.78ms +[2025-08-22 20:00:20] [Rank 0] step:9361/10000 train_time:840493ms step_avg:89.79ms +[2025-08-22 20:00:20] [Rank 0] step:9361/10000 train_time:840493ms step_avg:89.79ms +[2025-08-22 20:00:22] [Rank 0] step:9381/10000 train_time:842403ms step_avg:89.80ms +[2025-08-22 20:00:22] [Rank 0] step:9381/10000 train_time:842403ms step_avg:89.80ms +[2025-08-22 20:00:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:00:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:00:38] [Rank 0] PRINT: step:9400/10000 val_loss:3.8219 svd_entropy: attn_qk:H=0.8669,top10E=0.10,eRank=333.2,q75/q25=88.18 attn_vo:H=0.5573,top10E=0.53,eRank=58.5,q75/q25=116.31 mlp_w1:H=0.7478,top10E=0.29,eRank=184.5,q75/q25=13.81 mlp_w2:H=0.8729,top10E=0.15,eRank=348.2,q75/q25=9.60 vo_prod:H=0.4613,top10E=0.69,eRank=29.3,q75/q25=10562.58 train_time:844401ms step_avg:89.83ms +[2025-08-22 20:00:38] [Rank 0] PRINT: step:9400/10000 val_loss:3.8219 svd_entropy: attn_qk:H=0.8669,top10E=0.10,eRank=333.2,q75/q25=88.18 attn_vo:H=0.5573,top10E=0.53,eRank=58.5,q75/q25=116.31 mlp_w1:H=0.7478,top10E=0.29,eRank=184.5,q75/q25=13.81 mlp_w2:H=0.8729,top10E=0.15,eRank=348.2,q75/q25=9.60 vo_prod:H=0.4613,top10E=0.69,eRank=29.3,q75/q25=10562.58 train_time:844401ms step_avg:89.83ms +[2025-08-22 20:00:38] [Rank 0] step:9401/10000 train_time:844416ms step_avg:89.82ms +[2025-08-22 20:00:38] [Rank 0] step:9401/10000 train_time:844416ms step_avg:89.82ms +[2025-08-22 20:00:40] [Rank 0] step:9421/10000 train_time:846219ms step_avg:89.82ms +[2025-08-22 20:00:40] [Rank 0] step:9421/10000 train_time:846219ms step_avg:89.82ms +[2025-08-22 20:00:42] [Rank 0] step:9441/10000 train_time:848116ms step_avg:89.83ms +[2025-08-22 20:00:42] [Rank 0] step:9441/10000 train_time:848116ms step_avg:89.83ms +[2025-08-22 20:00:44] [Rank 0] step:9461/10000 train_time:850018ms step_avg:89.84ms +[2025-08-22 20:00:44] [Rank 0] step:9461/10000 train_time:850018ms step_avg:89.84ms +[2025-08-22 20:00:46] [Rank 0] step:9481/10000 train_time:851916ms step_avg:89.86ms +[2025-08-22 20:00:46] [Rank 0] step:9481/10000 train_time:851916ms step_avg:89.86ms +[2025-08-22 20:00:47] [Rank 0] step:9501/10000 train_time:853826ms step_avg:89.87ms +[2025-08-22 20:00:47] [Rank 0] step:9501/10000 train_time:853826ms step_avg:89.87ms +[2025-08-22 20:00:49] [Rank 0] step:9521/10000 train_time:855717ms step_avg:89.88ms +[2025-08-22 20:00:49] [Rank 0] step:9521/10000 train_time:855717ms step_avg:89.88ms +[2025-08-22 20:00:51] [Rank 0] step:9541/10000 train_time:857615ms step_avg:89.89ms +[2025-08-22 20:00:51] [Rank 0] step:9541/10000 train_time:857615ms step_avg:89.89ms +[2025-08-22 20:00:53] [Rank 0] step:9561/10000 train_time:859507ms step_avg:89.90ms +[2025-08-22 20:00:53] [Rank 0] step:9561/10000 train_time:859507ms step_avg:89.90ms +[2025-08-22 20:00:55] [Rank 0] step:9581/10000 train_time:861407ms step_avg:89.91ms +[2025-08-22 20:00:55] [Rank 0] step:9581/10000 train_time:861407ms step_avg:89.91ms +[2025-08-22 20:00:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:00:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:01:11] [Rank 0] PRINT: step:9600/10000 val_loss:3.8125 svd_entropy: attn_qk:H=0.8670,top10E=0.10,eRank=333.4,q75/q25=88.11 attn_vo:H=0.5578,top10E=0.53,eRank=58.7,q75/q25=116.56 mlp_w1:H=0.7480,top10E=0.29,eRank=184.8,q75/q25=13.80 mlp_w2:H=0.8731,top10E=0.15,eRank=348.7,q75/q25=9.58 vo_prod:H=0.4618,top10E=0.69,eRank=29.4,q75/q25=10633.68 train_time:863411ms step_avg:89.94ms +[2025-08-22 20:01:11] [Rank 0] PRINT: step:9600/10000 val_loss:3.8125 svd_entropy: attn_qk:H=0.8670,top10E=0.10,eRank=333.4,q75/q25=88.11 attn_vo:H=0.5578,top10E=0.53,eRank=58.7,q75/q25=116.56 mlp_w1:H=0.7480,top10E=0.29,eRank=184.8,q75/q25=13.80 mlp_w2:H=0.8731,top10E=0.15,eRank=348.7,q75/q25=9.58 vo_prod:H=0.4618,top10E=0.69,eRank=29.4,q75/q25=10633.68 train_time:863411ms step_avg:89.94ms +[2025-08-22 20:01:11] [Rank 0] step:9601/10000 train_time:863426ms step_avg:89.93ms +[2025-08-22 20:01:11] [Rank 0] step:9601/10000 train_time:863426ms step_avg:89.93ms +[2025-08-22 20:01:13] [Rank 0] step:9621/10000 train_time:865236ms step_avg:89.93ms +[2025-08-22 20:01:13] [Rank 0] step:9621/10000 train_time:865236ms step_avg:89.93ms +[2025-08-22 20:01:15] [Rank 0] step:9641/10000 train_time:867132ms step_avg:89.94ms +[2025-08-22 20:01:15] [Rank 0] step:9641/10000 train_time:867132ms step_avg:89.94ms +[2025-08-22 20:01:17] [Rank 0] step:9661/10000 train_time:869057ms step_avg:89.96ms +[2025-08-22 20:01:17] [Rank 0] step:9661/10000 train_time:869057ms step_avg:89.96ms +[2025-08-22 20:01:18] [Rank 0] step:9681/10000 train_time:870972ms step_avg:89.97ms +[2025-08-22 20:01:18] [Rank 0] step:9681/10000 train_time:870972ms step_avg:89.97ms +[2025-08-22 20:01:20] [Rank 0] step:9701/10000 train_time:872902ms step_avg:89.98ms +[2025-08-22 20:01:20] [Rank 0] step:9701/10000 train_time:872902ms step_avg:89.98ms +[2025-08-22 20:01:22] [Rank 0] step:9721/10000 train_time:874812ms step_avg:89.99ms +[2025-08-22 20:01:22] [Rank 0] step:9721/10000 train_time:874812ms step_avg:89.99ms +[2025-08-22 20:01:24] [Rank 0] step:9741/10000 train_time:876752ms step_avg:90.01ms +[2025-08-22 20:01:24] [Rank 0] step:9741/10000 train_time:876752ms step_avg:90.01ms +[2025-08-22 20:01:26] [Rank 0] step:9761/10000 train_time:878675ms step_avg:90.02ms +[2025-08-22 20:01:26] [Rank 0] step:9761/10000 train_time:878675ms step_avg:90.02ms +[2025-08-22 20:01:28] [Rank 0] step:9781/10000 train_time:880602ms step_avg:90.03ms +[2025-08-22 20:01:28] [Rank 0] step:9781/10000 train_time:880602ms step_avg:90.03ms +[2025-08-22 20:01:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:01:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:01:44] [Rank 0] PRINT: step:9800/10000 val_loss:3.8049 svd_entropy: attn_qk:H=0.8670,top10E=0.10,eRank=333.5,q75/q25=88.15 attn_vo:H=0.5582,top10E=0.53,eRank=58.9,q75/q25=116.73 mlp_w1:H=0.7482,top10E=0.29,eRank=185.1,q75/q25=13.79 mlp_w2:H=0.8733,top10E=0.15,eRank=349.1,q75/q25=9.58 vo_prod:H=0.4621,top10E=0.69,eRank=29.5,q75/q25=10747.63 train_time:882638ms step_avg:90.07ms +[2025-08-22 20:01:44] [Rank 0] PRINT: step:9800/10000 val_loss:3.8049 svd_entropy: attn_qk:H=0.8670,top10E=0.10,eRank=333.5,q75/q25=88.15 attn_vo:H=0.5582,top10E=0.53,eRank=58.9,q75/q25=116.73 mlp_w1:H=0.7482,top10E=0.29,eRank=185.1,q75/q25=13.79 mlp_w2:H=0.8733,top10E=0.15,eRank=349.1,q75/q25=9.58 vo_prod:H=0.4621,top10E=0.69,eRank=29.5,q75/q25=10747.63 train_time:882638ms step_avg:90.07ms +[2025-08-22 20:01:44] [Rank 0] step:9801/10000 train_time:882651ms step_avg:90.06ms +[2025-08-22 20:01:44] [Rank 0] step:9801/10000 train_time:882651ms step_avg:90.06ms +[2025-08-22 20:01:46] [Rank 0] step:9821/10000 train_time:884473ms step_avg:90.06ms +[2025-08-22 20:01:46] [Rank 0] step:9821/10000 train_time:884473ms step_avg:90.06ms +[2025-08-22 20:01:48] [Rank 0] step:9841/10000 train_time:886404ms step_avg:90.07ms +[2025-08-22 20:01:48] [Rank 0] step:9841/10000 train_time:886404ms step_avg:90.07ms +[2025-08-22 20:01:50] [Rank 0] step:9861/10000 train_time:888312ms step_avg:90.08ms +[2025-08-22 20:01:50] [Rank 0] step:9861/10000 train_time:888312ms step_avg:90.08ms +[2025-08-22 20:01:52] [Rank 0] step:9881/10000 train_time:890222ms step_avg:90.09ms +[2025-08-22 20:01:52] [Rank 0] step:9881/10000 train_time:890222ms step_avg:90.09ms +[2025-08-22 20:01:54] [Rank 0] step:9901/10000 train_time:892147ms step_avg:90.11ms +[2025-08-22 20:01:54] [Rank 0] step:9901/10000 train_time:892147ms step_avg:90.11ms +[2025-08-22 20:01:55] [Rank 0] step:9921/10000 train_time:894068ms step_avg:90.12ms +[2025-08-22 20:01:55] [Rank 0] step:9921/10000 train_time:894068ms step_avg:90.12ms +[2025-08-22 20:01:57] [Rank 0] step:9941/10000 train_time:895989ms step_avg:90.13ms +[2025-08-22 20:01:57] [Rank 0] step:9941/10000 train_time:895989ms step_avg:90.13ms +[2025-08-22 20:01:59] [Rank 0] step:9961/10000 train_time:897910ms step_avg:90.14ms +[2025-08-22 20:01:59] [Rank 0] step:9961/10000 train_time:897910ms step_avg:90.14ms +[2025-08-22 20:02:01] [Rank 0] step:9981/10000 train_time:899834ms step_avg:90.15ms +[2025-08-22 20:02:01] [Rank 0] step:9981/10000 train_time:899834ms step_avg:90.15ms +[2025-08-22 20:02:03] [Rank 0] step:10000/10000 train_time:901665ms step_avg:90.17ms +[2025-08-22 20:02:03] [Rank 0] step:10000/10000 train_time:901665ms step_avg:90.17ms +[2025-08-22 20:02:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:02:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:02:17] [Rank 0] PRINT: step:10000/10000 val_loss:3.7963 svd_entropy: attn_qk:H=0.8671,top10E=0.10,eRank=333.6,q75/q25=88.19 attn_vo:H=0.5584,top10E=0.53,eRank=59.0,q75/q25=116.90 mlp_w1:H=0.7484,top10E=0.29,eRank=185.2,q75/q25=13.78 mlp_w2:H=0.8734,top10E=0.15,eRank=349.4,q75/q25=9.56 vo_prod:H=0.4623,top10E=0.69,eRank=29.5,q75/q25=10790.55 train_time:901865ms step_avg:90.19ms +[2025-08-22 20:02:17] [Rank 0] PRINT: step:10000/10000 val_loss:3.7963 svd_entropy: attn_qk:H=0.8671,top10E=0.10,eRank=333.6,q75/q25=88.19 attn_vo:H=0.5584,top10E=0.53,eRank=59.0,q75/q25=116.90 mlp_w1:H=0.7484,top10E=0.29,eRank=185.2,q75/q25=13.78 mlp_w2:H=0.8734,top10E=0.15,eRank=349.4,q75/q25=9.56 vo_prod:H=0.4623,top10E=0.69,eRank=29.5,q75/q25=10790.55 train_time:901865ms step_avg:90.19ms +[2025-08-22 20:02:17] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 20:02:17 2025 --- +[2025-08-22 20:02:17] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 20:02:17 2025 --- +[2025-08-22 20:02:17] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15796 MiB +[2025-08-22 20:02:17] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15796 MiB diff --git a/logs_svd_gated/mode_2_param_gated_seed_41/config.json b/logs_svd_gated/mode_2_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..6cd2c952131c8b0aff5066dcf2785989f3e94b0f --- /dev/null +++ b/logs_svd_gated/mode_2_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 2, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "d15196ea-f6c3-4dc5-b8ca-5796fa4e65ee", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_2_param_gated_seed_41/training_log_d15196ea-f6c3-4dc5-b8ca-5796fa4e65ee.txt b/logs_svd_gated/mode_2_param_gated_seed_41/training_log_d15196ea-f6c3-4dc5-b8ca-5796fa4e65ee.txt new file mode 100644 index 0000000000000000000000000000000000000000..aff55d25a34d15bca579df741d80782601b0e208 --- /dev/null +++ b/logs_svd_gated/mode_2_param_gated_seed_41/training_log_d15196ea-f6c3-4dc5-b8ca-5796fa4e65ee.txt @@ -0,0 +1,2926 @@ +[2025-08-22 09:32:50] [Rank 0] PRINT: --- Script Start: Fri Aug 22 09:32:50 2025 --- +[2025-08-22 09:32:50] [Rank 0] PRINT: --- Script Start: Fri Aug 22 09:32:50 2025 --- +[2025-08-22 09:32:50] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=2, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 09:32:50] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=2, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 09:32:50] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 09:32:50] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 09:32:50] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 09:32:50] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 09:32:50] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_2_param_gated_seed_41 +[2025-08-22 09:32:50] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_2_param_gated_seed_41 +[2025-08-22 09:32:50] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 09:32:50] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 09:32:50] [Rank 0] PRINT: Constructing model... +[2025-08-22 09:32:50] [Rank 0] PRINT: Constructing model... +[2025-08-22 09:32:52] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 09:32:52] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 09:32:52] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 09:32:52] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 09:32:52] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 09:32:52] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 09:32:52] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-08-22 09:32:52] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-08-22 09:32:52] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.05). +[2025-08-22 09:32:52] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.05). +[2025-08-22 09:32:52] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 09:32:52] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 09:32:52] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 09:32:52] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 09:32:52] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 09:32:52] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 09:32:52] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 09:32:52] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 09:32:52] [Rank 0] PRINT: Starting warmup... +[2025-08-22 09:32:52] [Rank 0] PRINT: Starting warmup... +[2025-08-22 09:34:17] [Rank 0] PRINT: Warmup complete. +[2025-08-22 09:34:17] [Rank 0] PRINT: Warmup complete. +[2025-08-22 09:34:17] [Rank 0] PRINT: Starting training... +[2025-08-22 09:34:17] [Rank 0] PRINT: Starting training... +[2025-08-22 09:34:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:34:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:35:43] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 09:35:43] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 09:35:44] [Rank 0] step:21/10000 train_time:1631ms step_avg:77.67ms +[2025-08-22 09:35:44] [Rank 0] step:21/10000 train_time:1631ms step_avg:77.67ms +[2025-08-22 09:35:46] [Rank 0] step:41/10000 train_time:3301ms step_avg:80.51ms +[2025-08-22 09:35:46] [Rank 0] step:41/10000 train_time:3301ms step_avg:80.51ms +[2025-08-22 09:35:48] [Rank 0] step:61/10000 train_time:4971ms step_avg:81.49ms +[2025-08-22 09:35:48] [Rank 0] step:61/10000 train_time:4971ms step_avg:81.49ms +[2025-08-22 09:35:49] [Rank 0] step:81/10000 train_time:6642ms step_avg:82.00ms +[2025-08-22 09:35:49] [Rank 0] step:81/10000 train_time:6642ms step_avg:82.00ms +[2025-08-22 09:35:51] [Rank 0] step:101/10000 train_time:8313ms step_avg:82.31ms +[2025-08-22 09:35:51] [Rank 0] step:101/10000 train_time:8313ms step_avg:82.31ms +[2025-08-22 09:35:53] [Rank 0] step:121/10000 train_time:9987ms step_avg:82.54ms +[2025-08-22 09:35:53] [Rank 0] step:121/10000 train_time:9987ms step_avg:82.54ms +[2025-08-22 09:35:54] [Rank 0] step:141/10000 train_time:11661ms step_avg:82.70ms +[2025-08-22 09:35:54] [Rank 0] step:141/10000 train_time:11661ms step_avg:82.70ms +[2025-08-22 09:35:56] [Rank 0] step:161/10000 train_time:13337ms step_avg:82.84ms +[2025-08-22 09:35:56] [Rank 0] step:161/10000 train_time:13337ms step_avg:82.84ms +[2025-08-22 09:35:58] [Rank 0] step:181/10000 train_time:15012ms step_avg:82.94ms +[2025-08-22 09:35:58] [Rank 0] step:181/10000 train_time:15012ms step_avg:82.94ms +[2025-08-22 09:35:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:35:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:36:13] [Rank 0] PRINT: step:200/10000 val_loss:6.6249 svd_entropy: attn_qk:H=0.5202,top10E=0.63,eRank=107.0,q75/q25=27.20 attn_vo:H=0.6970,top10E=0.23,eRank=175.3,q75/q25=inf mlp_w1:H=0.3801,top10E=0.85,eRank=16.5,q75/q25=5.43 mlp_w2:H=0.3646,top10E=0.85,eRank=13.4,q75/q25=7.58 vo_prod:H=0.4201,top10E=0.53,eRank=34.3,q75/q25=inf train_time:16772ms step_avg:83.86ms +[2025-08-22 09:36:13] [Rank 0] PRINT: step:200/10000 val_loss:6.6249 svd_entropy: attn_qk:H=0.5202,top10E=0.63,eRank=107.0,q75/q25=27.20 attn_vo:H=0.6970,top10E=0.23,eRank=175.3,q75/q25=inf mlp_w1:H=0.3801,top10E=0.85,eRank=16.5,q75/q25=5.43 mlp_w2:H=0.3646,top10E=0.85,eRank=13.4,q75/q25=7.58 vo_prod:H=0.4201,top10E=0.53,eRank=34.3,q75/q25=inf train_time:16772ms step_avg:83.86ms +[2025-08-22 09:36:13] [Rank 0] step:201/10000 train_time:16787ms step_avg:83.52ms +[2025-08-22 09:36:13] [Rank 0] step:201/10000 train_time:16787ms step_avg:83.52ms +[2025-08-22 09:36:15] [Rank 0] step:221/10000 train_time:18387ms step_avg:83.20ms +[2025-08-22 09:36:15] [Rank 0] step:221/10000 train_time:18387ms step_avg:83.20ms +[2025-08-22 09:36:17] [Rank 0] step:241/10000 train_time:20060ms step_avg:83.24ms +[2025-08-22 09:36:17] [Rank 0] step:241/10000 train_time:20060ms step_avg:83.24ms +[2025-08-22 09:36:18] [Rank 0] step:261/10000 train_time:21734ms step_avg:83.27ms +[2025-08-22 09:36:18] [Rank 0] step:261/10000 train_time:21734ms step_avg:83.27ms +[2025-08-22 09:36:20] [Rank 0] step:281/10000 train_time:23409ms step_avg:83.31ms +[2025-08-22 09:36:20] [Rank 0] step:281/10000 train_time:23409ms step_avg:83.31ms +[2025-08-22 09:36:22] [Rank 0] step:301/10000 train_time:25084ms step_avg:83.34ms +[2025-08-22 09:36:22] [Rank 0] step:301/10000 train_time:25084ms step_avg:83.34ms +[2025-08-22 09:36:23] [Rank 0] step:321/10000 train_time:26811ms step_avg:83.52ms +[2025-08-22 09:36:23] [Rank 0] step:321/10000 train_time:26811ms step_avg:83.52ms +[2025-08-22 09:36:25] [Rank 0] step:341/10000 train_time:28487ms step_avg:83.54ms +[2025-08-22 09:36:25] [Rank 0] step:341/10000 train_time:28487ms step_avg:83.54ms +[2025-08-22 09:36:27] [Rank 0] step:361/10000 train_time:30389ms step_avg:84.18ms +[2025-08-22 09:36:27] [Rank 0] step:361/10000 train_time:30389ms step_avg:84.18ms +[2025-08-22 09:36:28] [Rank 0] step:381/10000 train_time:31899ms step_avg:83.72ms +[2025-08-22 09:36:28] [Rank 0] step:381/10000 train_time:31899ms step_avg:83.72ms +[2025-08-22 09:36:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:36:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:36:44] [Rank 0] PRINT: step:400/10000 val_loss:5.9548 svd_entropy: attn_qk:H=0.6333,top10E=0.48,eRank=123.7,q75/q25=52.37 attn_vo:H=0.6575,top10E=0.25,eRank=146.8,q75/q25=inf mlp_w1:H=0.5517,top10E=0.60,eRank=48.1,q75/q25=8.79 mlp_w2:H=0.5408,top10E=0.61,eRank=41.9,q75/q25=9.79 vo_prod:H=0.4389,top10E=0.47,eRank=41.8,q75/q25=inf train_time:33680ms step_avg:84.20ms +[2025-08-22 09:36:44] [Rank 0] PRINT: step:400/10000 val_loss:5.9548 svd_entropy: attn_qk:H=0.6333,top10E=0.48,eRank=123.7,q75/q25=52.37 attn_vo:H=0.6575,top10E=0.25,eRank=146.8,q75/q25=inf mlp_w1:H=0.5517,top10E=0.60,eRank=48.1,q75/q25=8.79 mlp_w2:H=0.5408,top10E=0.61,eRank=41.9,q75/q25=9.79 vo_prod:H=0.4389,top10E=0.47,eRank=41.8,q75/q25=inf train_time:33680ms step_avg:84.20ms +[2025-08-22 09:36:44] [Rank 0] step:401/10000 train_time:33695ms step_avg:84.03ms +[2025-08-22 09:36:44] [Rank 0] step:401/10000 train_time:33695ms step_avg:84.03ms +[2025-08-22 09:36:45] [Rank 0] step:421/10000 train_time:35284ms step_avg:83.81ms +[2025-08-22 09:36:45] [Rank 0] step:421/10000 train_time:35284ms step_avg:83.81ms +[2025-08-22 09:36:47] [Rank 0] step:441/10000 train_time:36956ms step_avg:83.80ms +[2025-08-22 09:36:47] [Rank 0] step:441/10000 train_time:36956ms step_avg:83.80ms +[2025-08-22 09:36:49] [Rank 0] step:461/10000 train_time:38631ms step_avg:83.80ms +[2025-08-22 09:36:49] [Rank 0] step:461/10000 train_time:38631ms step_avg:83.80ms +[2025-08-22 09:36:50] [Rank 0] step:481/10000 train_time:40303ms step_avg:83.79ms +[2025-08-22 09:36:50] [Rank 0] step:481/10000 train_time:40303ms step_avg:83.79ms +[2025-08-22 09:36:52] [Rank 0] step:501/10000 train_time:41977ms step_avg:83.79ms +[2025-08-22 09:36:52] [Rank 0] step:501/10000 train_time:41977ms step_avg:83.79ms +[2025-08-22 09:36:54] [Rank 0] step:521/10000 train_time:43651ms step_avg:83.78ms +[2025-08-22 09:36:54] [Rank 0] step:521/10000 train_time:43651ms step_avg:83.78ms +[2025-08-22 09:36:55] [Rank 0] step:541/10000 train_time:45325ms step_avg:83.78ms +[2025-08-22 09:36:55] [Rank 0] step:541/10000 train_time:45325ms step_avg:83.78ms +[2025-08-22 09:36:57] [Rank 0] step:561/10000 train_time:47000ms step_avg:83.78ms +[2025-08-22 09:36:57] [Rank 0] step:561/10000 train_time:47000ms step_avg:83.78ms +[2025-08-22 09:36:59] [Rank 0] step:581/10000 train_time:48675ms step_avg:83.78ms +[2025-08-22 09:36:59] [Rank 0] step:581/10000 train_time:48675ms step_avg:83.78ms +[2025-08-22 09:37:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:37:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:37:14] [Rank 0] PRINT: step:600/10000 val_loss:5.5712 svd_entropy: attn_qk:H=0.6535,top10E=0.44,eRank=128.5,q75/q25=48.58 attn_vo:H=0.6563,top10E=0.22,eRank=145.3,q75/q25=inf mlp_w1:H=0.6610,top10E=0.42,eRank=90.3,q75/q25=6.67 mlp_w2:H=0.6911,top10E=0.39,eRank=106.8,q75/q25=8.77 vo_prod:H=0.4681,top10E=0.40,eRank=48.3,q75/q25=inf train_time:50432ms step_avg:84.05ms +[2025-08-22 09:37:14] [Rank 0] PRINT: step:600/10000 val_loss:5.5712 svd_entropy: attn_qk:H=0.6535,top10E=0.44,eRank=128.5,q75/q25=48.58 attn_vo:H=0.6563,top10E=0.22,eRank=145.3,q75/q25=inf mlp_w1:H=0.6610,top10E=0.42,eRank=90.3,q75/q25=6.67 mlp_w2:H=0.6911,top10E=0.39,eRank=106.8,q75/q25=8.77 vo_prod:H=0.4681,top10E=0.40,eRank=48.3,q75/q25=inf train_time:50432ms step_avg:84.05ms +[2025-08-22 09:37:14] [Rank 0] step:601/10000 train_time:50448ms step_avg:83.94ms +[2025-08-22 09:37:14] [Rank 0] step:601/10000 train_time:50448ms step_avg:83.94ms +[2025-08-22 09:37:16] [Rank 0] step:621/10000 train_time:52050ms step_avg:83.82ms +[2025-08-22 09:37:16] [Rank 0] step:621/10000 train_time:52050ms step_avg:83.82ms +[2025-08-22 09:37:17] [Rank 0] step:641/10000 train_time:53720ms step_avg:83.81ms +[2025-08-22 09:37:17] [Rank 0] step:641/10000 train_time:53720ms step_avg:83.81ms +[2025-08-22 09:37:19] [Rank 0] step:661/10000 train_time:55389ms step_avg:83.80ms +[2025-08-22 09:37:19] [Rank 0] step:661/10000 train_time:55389ms step_avg:83.80ms +[2025-08-22 09:37:21] [Rank 0] step:681/10000 train_time:57058ms step_avg:83.79ms +[2025-08-22 09:37:21] [Rank 0] step:681/10000 train_time:57058ms step_avg:83.79ms +[2025-08-22 09:37:22] [Rank 0] step:701/10000 train_time:58728ms step_avg:83.78ms +[2025-08-22 09:37:22] [Rank 0] step:701/10000 train_time:58728ms step_avg:83.78ms +[2025-08-22 09:37:24] [Rank 0] step:721/10000 train_time:60400ms step_avg:83.77ms +[2025-08-22 09:37:24] [Rank 0] step:721/10000 train_time:60400ms step_avg:83.77ms +[2025-08-22 09:37:26] [Rank 0] step:741/10000 train_time:62069ms step_avg:83.76ms +[2025-08-22 09:37:26] [Rank 0] step:741/10000 train_time:62069ms step_avg:83.76ms +[2025-08-22 09:37:28] [Rank 0] step:761/10000 train_time:63749ms step_avg:83.77ms +[2025-08-22 09:37:28] [Rank 0] step:761/10000 train_time:63749ms step_avg:83.77ms +[2025-08-22 09:37:29] [Rank 0] step:781/10000 train_time:65432ms step_avg:83.78ms +[2025-08-22 09:37:29] [Rank 0] step:781/10000 train_time:65432ms step_avg:83.78ms +[2025-08-22 09:37:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:37:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:37:44] [Rank 0] PRINT: step:800/10000 val_loss:5.2897 svd_entropy: attn_qk:H=0.6693,top10E=0.41,eRank=133.6,q75/q25=47.82 attn_vo:H=0.6696,top10E=0.19,eRank=153.7,q75/q25=inf mlp_w1:H=0.7084,top10E=0.34,eRank=119.7,q75/q25=6.97 mlp_w2:H=0.7648,top10E=0.28,eRank=165.0,q75/q25=9.02 vo_prod:H=0.4923,top10E=0.35,eRank=55.2,q75/q25=inf train_time:67250ms step_avg:84.06ms +[2025-08-22 09:37:44] [Rank 0] PRINT: step:800/10000 val_loss:5.2897 svd_entropy: attn_qk:H=0.6693,top10E=0.41,eRank=133.6,q75/q25=47.82 attn_vo:H=0.6696,top10E=0.19,eRank=153.7,q75/q25=inf mlp_w1:H=0.7084,top10E=0.34,eRank=119.7,q75/q25=6.97 mlp_w2:H=0.7648,top10E=0.28,eRank=165.0,q75/q25=9.02 vo_prod:H=0.4923,top10E=0.35,eRank=55.2,q75/q25=inf train_time:67250ms step_avg:84.06ms +[2025-08-22 09:37:45] [Rank 0] step:801/10000 train_time:67264ms step_avg:83.98ms +[2025-08-22 09:37:45] [Rank 0] step:801/10000 train_time:67264ms step_avg:83.98ms +[2025-08-22 09:37:46] [Rank 0] step:821/10000 train_time:68879ms step_avg:83.90ms +[2025-08-22 09:37:46] [Rank 0] step:821/10000 train_time:68879ms step_avg:83.90ms +[2025-08-22 09:37:48] [Rank 0] step:841/10000 train_time:70559ms step_avg:83.90ms +[2025-08-22 09:37:48] [Rank 0] step:841/10000 train_time:70559ms step_avg:83.90ms +[2025-08-22 09:37:50] [Rank 0] step:861/10000 train_time:72241ms step_avg:83.90ms +[2025-08-22 09:37:50] [Rank 0] step:861/10000 train_time:72241ms step_avg:83.90ms +[2025-08-22 09:37:51] [Rank 0] step:881/10000 train_time:73925ms step_avg:83.91ms +[2025-08-22 09:37:51] [Rank 0] step:881/10000 train_time:73925ms step_avg:83.91ms +[2025-08-22 09:37:53] [Rank 0] step:901/10000 train_time:75609ms step_avg:83.92ms +[2025-08-22 09:37:53] [Rank 0] step:901/10000 train_time:75609ms step_avg:83.92ms +[2025-08-22 09:37:55] [Rank 0] step:921/10000 train_time:77293ms step_avg:83.92ms +[2025-08-22 09:37:55] [Rank 0] step:921/10000 train_time:77293ms step_avg:83.92ms +[2025-08-22 09:37:56] [Rank 0] step:941/10000 train_time:78977ms step_avg:83.93ms +[2025-08-22 09:37:56] [Rank 0] step:941/10000 train_time:78977ms step_avg:83.93ms +[2025-08-22 09:37:58] [Rank 0] step:961/10000 train_time:80663ms step_avg:83.94ms +[2025-08-22 09:37:58] [Rank 0] step:961/10000 train_time:80663ms step_avg:83.94ms +[2025-08-22 09:38:00] [Rank 0] step:981/10000 train_time:82347ms step_avg:83.94ms +[2025-08-22 09:38:00] [Rank 0] step:981/10000 train_time:82347ms step_avg:83.94ms +[2025-08-22 09:38:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:38:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:38:15] [Rank 0] PRINT: step:1000/10000 val_loss:5.1169 svd_entropy: attn_qk:H=0.6818,top10E=0.38,eRank=138.5,q75/q25=49.76 attn_vo:H=0.6867,top10E=0.17,eRank=166.1,q75/q25=inf mlp_w1:H=0.7330,top10E=0.30,eRank=139.6,q75/q25=7.56 mlp_w2:H=0.8027,top10E=0.23,eRank=209.4,q75/q25=9.49 vo_prod:H=0.5117,top10E=0.31,eRank=62.0,q75/q25=inf train_time:84117ms step_avg:84.12ms +[2025-08-22 09:38:15] [Rank 0] PRINT: step:1000/10000 val_loss:5.1169 svd_entropy: attn_qk:H=0.6818,top10E=0.38,eRank=138.5,q75/q25=49.76 attn_vo:H=0.6867,top10E=0.17,eRank=166.1,q75/q25=inf mlp_w1:H=0.7330,top10E=0.30,eRank=139.6,q75/q25=7.56 mlp_w2:H=0.8027,top10E=0.23,eRank=209.4,q75/q25=9.49 vo_prod:H=0.5117,top10E=0.31,eRank=62.0,q75/q25=inf train_time:84117ms step_avg:84.12ms +[2025-08-22 09:38:15] [Rank 0] step:1001/10000 train_time:84133ms step_avg:84.05ms +[2025-08-22 09:38:15] [Rank 0] step:1001/10000 train_time:84133ms step_avg:84.05ms +[2025-08-22 09:38:17] [Rank 0] step:1021/10000 train_time:85747ms step_avg:83.98ms +[2025-08-22 09:38:17] [Rank 0] step:1021/10000 train_time:85747ms step_avg:83.98ms +[2025-08-22 09:38:18] [Rank 0] step:1041/10000 train_time:87430ms step_avg:83.99ms +[2025-08-22 09:38:18] [Rank 0] step:1041/10000 train_time:87430ms step_avg:83.99ms +[2025-08-22 09:38:20] [Rank 0] step:1061/10000 train_time:89115ms step_avg:83.99ms +[2025-08-22 09:38:20] [Rank 0] step:1061/10000 train_time:89115ms step_avg:83.99ms +[2025-08-22 09:38:22] [Rank 0] step:1081/10000 train_time:90800ms step_avg:84.00ms +[2025-08-22 09:38:22] [Rank 0] step:1081/10000 train_time:90800ms step_avg:84.00ms +[2025-08-22 09:38:23] [Rank 0] step:1101/10000 train_time:92486ms step_avg:84.00ms +[2025-08-22 09:38:23] [Rank 0] step:1101/10000 train_time:92486ms step_avg:84.00ms +[2025-08-22 09:38:25] [Rank 0] step:1121/10000 train_time:94174ms step_avg:84.01ms +[2025-08-22 09:38:25] [Rank 0] step:1121/10000 train_time:94174ms step_avg:84.01ms +[2025-08-22 09:38:27] [Rank 0] step:1141/10000 train_time:95863ms step_avg:84.02ms +[2025-08-22 09:38:27] [Rank 0] step:1141/10000 train_time:95863ms step_avg:84.02ms +[2025-08-22 09:38:29] [Rank 0] step:1161/10000 train_time:97552ms step_avg:84.02ms +[2025-08-22 09:38:29] [Rank 0] step:1161/10000 train_time:97552ms step_avg:84.02ms +[2025-08-22 09:38:30] [Rank 0] step:1181/10000 train_time:99243ms step_avg:84.03ms +[2025-08-22 09:38:30] [Rank 0] step:1181/10000 train_time:99243ms step_avg:84.03ms +[2025-08-22 09:38:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:38:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:38:45] [Rank 0] PRINT: step:1200/10000 val_loss:4.9766 svd_entropy: attn_qk:H=0.6914,top10E=0.36,eRank=142.8,q75/q25=54.71 attn_vo:H=0.7034,top10E=0.15,eRank=180.2,q75/q25=inf mlp_w1:H=0.7507,top10E=0.28,eRank=155.8,q75/q25=8.17 mlp_w2:H=0.8262,top10E=0.20,eRank=243.5,q75/q25=9.90 vo_prod:H=0.5284,top10E=0.28,eRank=69.1,q75/q25=inf train_time:101017ms step_avg:84.18ms +[2025-08-22 09:38:45] [Rank 0] PRINT: step:1200/10000 val_loss:4.9766 svd_entropy: attn_qk:H=0.6914,top10E=0.36,eRank=142.8,q75/q25=54.71 attn_vo:H=0.7034,top10E=0.15,eRank=180.2,q75/q25=inf mlp_w1:H=0.7507,top10E=0.28,eRank=155.8,q75/q25=8.17 mlp_w2:H=0.8262,top10E=0.20,eRank=243.5,q75/q25=9.90 vo_prod:H=0.5284,top10E=0.28,eRank=69.1,q75/q25=inf train_time:101017ms step_avg:84.18ms +[2025-08-22 09:38:46] [Rank 0] step:1201/10000 train_time:101032ms step_avg:84.12ms +[2025-08-22 09:38:46] [Rank 0] step:1201/10000 train_time:101032ms step_avg:84.12ms +[2025-08-22 09:38:47] [Rank 0] step:1221/10000 train_time:102637ms step_avg:84.06ms +[2025-08-22 09:38:47] [Rank 0] step:1221/10000 train_time:102637ms step_avg:84.06ms +[2025-08-22 09:38:49] [Rank 0] step:1241/10000 train_time:104320ms step_avg:84.06ms +[2025-08-22 09:38:49] [Rank 0] step:1241/10000 train_time:104320ms step_avg:84.06ms +[2025-08-22 09:38:51] [Rank 0] step:1261/10000 train_time:106005ms step_avg:84.06ms +[2025-08-22 09:38:51] [Rank 0] step:1261/10000 train_time:106005ms step_avg:84.06ms +[2025-08-22 09:38:52] [Rank 0] step:1281/10000 train_time:107691ms step_avg:84.07ms +[2025-08-22 09:38:52] [Rank 0] step:1281/10000 train_time:107691ms step_avg:84.07ms +[2025-08-22 09:38:54] [Rank 0] step:1301/10000 train_time:109377ms step_avg:84.07ms +[2025-08-22 09:38:54] [Rank 0] step:1301/10000 train_time:109377ms step_avg:84.07ms +[2025-08-22 09:38:56] [Rank 0] step:1321/10000 train_time:111063ms step_avg:84.07ms +[2025-08-22 09:38:56] [Rank 0] step:1321/10000 train_time:111063ms step_avg:84.07ms +[2025-08-22 09:38:57] [Rank 0] step:1341/10000 train_time:112749ms step_avg:84.08ms +[2025-08-22 09:38:57] [Rank 0] step:1341/10000 train_time:112749ms step_avg:84.08ms +[2025-08-22 09:38:59] [Rank 0] step:1361/10000 train_time:114436ms step_avg:84.08ms +[2025-08-22 09:38:59] [Rank 0] step:1361/10000 train_time:114436ms step_avg:84.08ms +[2025-08-22 09:39:01] [Rank 0] step:1381/10000 train_time:116122ms step_avg:84.09ms +[2025-08-22 09:39:01] [Rank 0] step:1381/10000 train_time:116122ms step_avg:84.09ms +[2025-08-22 09:39:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:39:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:39:16] [Rank 0] PRINT: step:1400/10000 val_loss:4.8897 svd_entropy: attn_qk:H=0.6999,top10E=0.35,eRank=146.8,q75/q25=58.44 attn_vo:H=0.7180,top10E=0.14,eRank=194.5,q75/q25=inf mlp_w1:H=0.7646,top10E=0.26,eRank=169.9,q75/q25=8.77 mlp_w2:H=0.8432,top10E=0.18,eRank=272.1,q75/q25=9.99 vo_prod:H=0.5435,top10E=0.26,eRank=76.4,q75/q25=inf train_time:117892ms step_avg:84.21ms +[2025-08-22 09:39:16] [Rank 0] PRINT: step:1400/10000 val_loss:4.8897 svd_entropy: attn_qk:H=0.6999,top10E=0.35,eRank=146.8,q75/q25=58.44 attn_vo:H=0.7180,top10E=0.14,eRank=194.5,q75/q25=inf mlp_w1:H=0.7646,top10E=0.26,eRank=169.9,q75/q25=8.77 mlp_w2:H=0.8432,top10E=0.18,eRank=272.1,q75/q25=9.99 vo_prod:H=0.5435,top10E=0.26,eRank=76.4,q75/q25=inf train_time:117892ms step_avg:84.21ms +[2025-08-22 09:39:16] [Rank 0] step:1401/10000 train_time:117907ms step_avg:84.16ms +[2025-08-22 09:39:16] [Rank 0] step:1401/10000 train_time:117907ms step_avg:84.16ms +[2025-08-22 09:39:18] [Rank 0] step:1421/10000 train_time:119512ms step_avg:84.10ms +[2025-08-22 09:39:18] [Rank 0] step:1421/10000 train_time:119512ms step_avg:84.10ms +[2025-08-22 09:39:19] [Rank 0] step:1441/10000 train_time:121193ms step_avg:84.10ms +[2025-08-22 09:39:19] [Rank 0] step:1441/10000 train_time:121193ms step_avg:84.10ms +[2025-08-22 09:39:21] [Rank 0] step:1461/10000 train_time:122877ms step_avg:84.10ms +[2025-08-22 09:39:21] [Rank 0] step:1461/10000 train_time:122877ms step_avg:84.10ms +[2025-08-22 09:39:23] [Rank 0] step:1481/10000 train_time:124560ms step_avg:84.11ms +[2025-08-22 09:39:23] [Rank 0] step:1481/10000 train_time:124560ms step_avg:84.11ms +[2025-08-22 09:39:24] [Rank 0] step:1501/10000 train_time:126251ms step_avg:84.11ms +[2025-08-22 09:39:24] [Rank 0] step:1501/10000 train_time:126251ms step_avg:84.11ms +[2025-08-22 09:39:26] [Rank 0] step:1521/10000 train_time:127945ms step_avg:84.12ms +[2025-08-22 09:39:26] [Rank 0] step:1521/10000 train_time:127945ms step_avg:84.12ms +[2025-08-22 09:39:28] [Rank 0] step:1541/10000 train_time:129640ms step_avg:84.13ms +[2025-08-22 09:39:28] [Rank 0] step:1541/10000 train_time:129640ms step_avg:84.13ms +[2025-08-22 09:39:30] [Rank 0] step:1561/10000 train_time:131336ms step_avg:84.14ms +[2025-08-22 09:39:30] [Rank 0] step:1561/10000 train_time:131336ms step_avg:84.14ms +[2025-08-22 09:39:31] [Rank 0] step:1581/10000 train_time:133032ms step_avg:84.14ms +[2025-08-22 09:39:31] [Rank 0] step:1581/10000 train_time:133032ms step_avg:84.14ms +[2025-08-22 09:39:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:39:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:39:46] [Rank 0] PRINT: step:1600/10000 val_loss:4.7835 svd_entropy: attn_qk:H=0.7069,top10E=0.34,eRank=150.4,q75/q25=62.68 attn_vo:H=0.7305,top10E=0.13,eRank=208.5,q75/q25=inf mlp_w1:H=0.7760,top10E=0.25,eRank=182.4,q75/q25=9.25 mlp_w2:H=0.8561,top10E=0.16,eRank=296.2,q75/q25=9.90 vo_prod:H=0.5568,top10E=0.25,eRank=83.7,q75/q25=inf train_time:134814ms step_avg:84.26ms +[2025-08-22 09:39:46] [Rank 0] PRINT: step:1600/10000 val_loss:4.7835 svd_entropy: attn_qk:H=0.7069,top10E=0.34,eRank=150.4,q75/q25=62.68 attn_vo:H=0.7305,top10E=0.13,eRank=208.5,q75/q25=inf mlp_w1:H=0.7760,top10E=0.25,eRank=182.4,q75/q25=9.25 mlp_w2:H=0.8561,top10E=0.16,eRank=296.2,q75/q25=9.90 vo_prod:H=0.5568,top10E=0.25,eRank=83.7,q75/q25=inf train_time:134814ms step_avg:84.26ms +[2025-08-22 09:39:46] [Rank 0] step:1601/10000 train_time:134830ms step_avg:84.22ms +[2025-08-22 09:39:46] [Rank 0] step:1601/10000 train_time:134830ms step_avg:84.22ms +[2025-08-22 09:39:48] [Rank 0] step:1621/10000 train_time:136437ms step_avg:84.17ms +[2025-08-22 09:39:48] [Rank 0] step:1621/10000 train_time:136437ms step_avg:84.17ms +[2025-08-22 09:39:50] [Rank 0] step:1641/10000 train_time:138132ms step_avg:84.18ms +[2025-08-22 09:39:50] [Rank 0] step:1641/10000 train_time:138132ms step_avg:84.18ms +[2025-08-22 09:39:51] [Rank 0] step:1661/10000 train_time:139828ms step_avg:84.18ms +[2025-08-22 09:39:51] [Rank 0] step:1661/10000 train_time:139828ms step_avg:84.18ms +[2025-08-22 09:39:53] [Rank 0] step:1681/10000 train_time:141524ms step_avg:84.19ms +[2025-08-22 09:39:53] [Rank 0] step:1681/10000 train_time:141524ms step_avg:84.19ms +[2025-08-22 09:39:55] [Rank 0] step:1701/10000 train_time:143222ms step_avg:84.20ms +[2025-08-22 09:39:55] [Rank 0] step:1701/10000 train_time:143222ms step_avg:84.20ms +[2025-08-22 09:39:56] [Rank 0] step:1721/10000 train_time:144920ms step_avg:84.21ms +[2025-08-22 09:39:56] [Rank 0] step:1721/10000 train_time:144920ms step_avg:84.21ms +[2025-08-22 09:39:58] [Rank 0] step:1741/10000 train_time:146619ms step_avg:84.22ms +[2025-08-22 09:39:58] [Rank 0] step:1741/10000 train_time:146619ms step_avg:84.22ms +[2025-08-22 09:40:00] [Rank 0] step:1761/10000 train_time:148318ms step_avg:84.22ms +[2025-08-22 09:40:00] [Rank 0] step:1761/10000 train_time:148318ms step_avg:84.22ms +[2025-08-22 09:40:02] [Rank 0] step:1781/10000 train_time:150019ms step_avg:84.23ms +[2025-08-22 09:40:02] [Rank 0] step:1781/10000 train_time:150019ms step_avg:84.23ms +[2025-08-22 09:40:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:40:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:40:17] [Rank 0] PRINT: step:1800/10000 val_loss:4.6648 svd_entropy: attn_qk:H=0.7131,top10E=0.33,eRank=153.8,q75/q25=66.34 attn_vo:H=0.7411,top10E=0.12,eRank=221.7,q75/q25=inf mlp_w1:H=0.7856,top10E=0.23,eRank=193.7,q75/q25=9.66 mlp_w2:H=0.8666,top10E=0.15,eRank=317.4,q75/q25=9.58 vo_prod:H=0.5681,top10E=0.23,eRank=90.9,q75/q25=inf train_time:151804ms step_avg:84.34ms +[2025-08-22 09:40:17] [Rank 0] PRINT: step:1800/10000 val_loss:4.6648 svd_entropy: attn_qk:H=0.7131,top10E=0.33,eRank=153.8,q75/q25=66.34 attn_vo:H=0.7411,top10E=0.12,eRank=221.7,q75/q25=inf mlp_w1:H=0.7856,top10E=0.23,eRank=193.7,q75/q25=9.66 mlp_w2:H=0.8666,top10E=0.15,eRank=317.4,q75/q25=9.58 vo_prod:H=0.5681,top10E=0.23,eRank=90.9,q75/q25=inf train_time:151804ms step_avg:84.34ms +[2025-08-22 09:40:17] [Rank 0] step:1801/10000 train_time:151819ms step_avg:84.30ms +[2025-08-22 09:40:17] [Rank 0] step:1801/10000 train_time:151819ms step_avg:84.30ms +[2025-08-22 09:40:19] [Rank 0] step:1821/10000 train_time:153440ms step_avg:84.26ms +[2025-08-22 09:40:19] [Rank 0] step:1821/10000 train_time:153440ms step_avg:84.26ms +[2025-08-22 09:40:20] [Rank 0] step:1841/10000 train_time:155138ms step_avg:84.27ms +[2025-08-22 09:40:20] [Rank 0] step:1841/10000 train_time:155138ms step_avg:84.27ms +[2025-08-22 09:40:22] [Rank 0] step:1861/10000 train_time:156834ms step_avg:84.27ms +[2025-08-22 09:40:22] [Rank 0] step:1861/10000 train_time:156834ms step_avg:84.27ms +[2025-08-22 09:40:24] [Rank 0] step:1881/10000 train_time:158530ms step_avg:84.28ms +[2025-08-22 09:40:24] [Rank 0] step:1881/10000 train_time:158530ms step_avg:84.28ms +[2025-08-22 09:40:25] [Rank 0] step:1901/10000 train_time:160226ms step_avg:84.29ms +[2025-08-22 09:40:25] [Rank 0] step:1901/10000 train_time:160226ms step_avg:84.29ms +[2025-08-22 09:40:27] [Rank 0] step:1921/10000 train_time:161923ms step_avg:84.29ms +[2025-08-22 09:40:27] [Rank 0] step:1921/10000 train_time:161923ms step_avg:84.29ms +[2025-08-22 09:40:29] [Rank 0] step:1941/10000 train_time:163627ms step_avg:84.30ms +[2025-08-22 09:40:29] [Rank 0] step:1941/10000 train_time:163627ms step_avg:84.30ms +[2025-08-22 09:40:30] [Rank 0] step:1961/10000 train_time:165324ms step_avg:84.31ms +[2025-08-22 09:40:30] [Rank 0] step:1961/10000 train_time:165324ms step_avg:84.31ms +[2025-08-22 09:40:32] [Rank 0] step:1981/10000 train_time:167021ms step_avg:84.31ms +[2025-08-22 09:40:32] [Rank 0] step:1981/10000 train_time:167021ms step_avg:84.31ms +[2025-08-22 09:40:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:40:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:40:47] [Rank 0] PRINT: step:2000/10000 val_loss:4.5574 svd_entropy: attn_qk:H=0.7185,top10E=0.32,eRank=156.9,q75/q25=69.39 attn_vo:H=0.7497,top10E=0.12,eRank=233.7,q75/q25=inf mlp_w1:H=0.7940,top10E=0.23,eRank=204.1,q75/q25=9.96 mlp_w2:H=0.8750,top10E=0.14,eRank=335.5,q75/q25=9.23 vo_prod:H=0.5770,top10E=0.23,eRank=97.3,q75/q25=inf train_time:168802ms step_avg:84.40ms +[2025-08-22 09:40:47] [Rank 0] PRINT: step:2000/10000 val_loss:4.5574 svd_entropy: attn_qk:H=0.7185,top10E=0.32,eRank=156.9,q75/q25=69.39 attn_vo:H=0.7497,top10E=0.12,eRank=233.7,q75/q25=inf mlp_w1:H=0.7940,top10E=0.23,eRank=204.1,q75/q25=9.96 mlp_w2:H=0.8750,top10E=0.14,eRank=335.5,q75/q25=9.23 vo_prod:H=0.5770,top10E=0.23,eRank=97.3,q75/q25=inf train_time:168802ms step_avg:84.40ms +[2025-08-22 09:40:48] [Rank 0] step:2001/10000 train_time:168818ms step_avg:84.37ms +[2025-08-22 09:40:48] [Rank 0] step:2001/10000 train_time:168818ms step_avg:84.37ms +[2025-08-22 09:40:49] [Rank 0] step:2021/10000 train_time:170446ms step_avg:84.34ms +[2025-08-22 09:40:49] [Rank 0] step:2021/10000 train_time:170446ms step_avg:84.34ms +[2025-08-22 09:40:51] [Rank 0] step:2041/10000 train_time:172138ms step_avg:84.34ms +[2025-08-22 09:40:51] [Rank 0] step:2041/10000 train_time:172138ms step_avg:84.34ms +[2025-08-22 09:40:53] [Rank 0] step:2061/10000 train_time:173832ms step_avg:84.34ms +[2025-08-22 09:40:53] [Rank 0] step:2061/10000 train_time:173832ms step_avg:84.34ms +[2025-08-22 09:40:54] [Rank 0] step:2081/10000 train_time:175530ms step_avg:84.35ms +[2025-08-22 09:40:54] [Rank 0] step:2081/10000 train_time:175530ms step_avg:84.35ms +[2025-08-22 09:40:56] [Rank 0] step:2101/10000 train_time:177225ms step_avg:84.35ms +[2025-08-22 09:40:56] [Rank 0] step:2101/10000 train_time:177225ms step_avg:84.35ms +[2025-08-22 09:40:58] [Rank 0] step:2121/10000 train_time:178921ms step_avg:84.36ms +[2025-08-22 09:40:58] [Rank 0] step:2121/10000 train_time:178921ms step_avg:84.36ms +[2025-08-22 09:40:59] [Rank 0] step:2141/10000 train_time:180618ms step_avg:84.36ms +[2025-08-22 09:40:59] [Rank 0] step:2141/10000 train_time:180618ms step_avg:84.36ms +[2025-08-22 09:41:01] [Rank 0] step:2161/10000 train_time:182316ms step_avg:84.37ms +[2025-08-22 09:41:01] [Rank 0] step:2161/10000 train_time:182316ms step_avg:84.37ms +[2025-08-22 09:41:03] [Rank 0] step:2181/10000 train_time:184013ms step_avg:84.37ms +[2025-08-22 09:41:03] [Rank 0] step:2181/10000 train_time:184013ms step_avg:84.37ms +[2025-08-22 09:41:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:41:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:41:18] [Rank 0] PRINT: step:2200/10000 val_loss:4.4393 svd_entropy: attn_qk:H=0.7232,top10E=0.31,eRank=159.7,q75/q25=71.20 attn_vo:H=0.7564,top10E=0.11,eRank=243.7,q75/q25=inf mlp_w1:H=0.8012,top10E=0.22,eRank=213.6,q75/q25=10.21 mlp_w2:H=0.8819,top10E=0.14,eRank=351.3,q75/q25=8.83 vo_prod:H=0.5844,top10E=0.22,eRank=102.9,q75/q25=inf train_time:185796ms step_avg:84.45ms +[2025-08-22 09:41:18] [Rank 0] PRINT: step:2200/10000 val_loss:4.4393 svd_entropy: attn_qk:H=0.7232,top10E=0.31,eRank=159.7,q75/q25=71.20 attn_vo:H=0.7564,top10E=0.11,eRank=243.7,q75/q25=inf mlp_w1:H=0.8012,top10E=0.22,eRank=213.6,q75/q25=10.21 mlp_w2:H=0.8819,top10E=0.14,eRank=351.3,q75/q25=8.83 vo_prod:H=0.5844,top10E=0.22,eRank=102.9,q75/q25=inf train_time:185796ms step_avg:84.45ms +[2025-08-22 09:41:18] [Rank 0] step:2201/10000 train_time:185812ms step_avg:84.42ms +[2025-08-22 09:41:18] [Rank 0] step:2201/10000 train_time:185812ms step_avg:84.42ms +[2025-08-22 09:41:20] [Rank 0] step:2221/10000 train_time:187422ms step_avg:84.39ms +[2025-08-22 09:41:20] [Rank 0] step:2221/10000 train_time:187422ms step_avg:84.39ms +[2025-08-22 09:41:22] [Rank 0] step:2241/10000 train_time:189153ms step_avg:84.41ms +[2025-08-22 09:41:22] [Rank 0] step:2241/10000 train_time:189153ms step_avg:84.41ms +[2025-08-22 09:41:23] [Rank 0] step:2261/10000 train_time:190892ms step_avg:84.43ms +[2025-08-22 09:41:23] [Rank 0] step:2261/10000 train_time:190892ms step_avg:84.43ms +[2025-08-22 09:41:25] [Rank 0] step:2281/10000 train_time:192633ms step_avg:84.45ms +[2025-08-22 09:41:25] [Rank 0] step:2281/10000 train_time:192633ms step_avg:84.45ms +[2025-08-22 09:41:27] [Rank 0] step:2301/10000 train_time:194375ms step_avg:84.47ms +[2025-08-22 09:41:27] [Rank 0] step:2301/10000 train_time:194375ms step_avg:84.47ms +[2025-08-22 09:41:29] [Rank 0] step:2321/10000 train_time:196115ms step_avg:84.50ms +[2025-08-22 09:41:29] [Rank 0] step:2321/10000 train_time:196115ms step_avg:84.50ms +[2025-08-22 09:41:30] [Rank 0] step:2341/10000 train_time:197858ms step_avg:84.52ms +[2025-08-22 09:41:30] [Rank 0] step:2341/10000 train_time:197858ms step_avg:84.52ms +[2025-08-22 09:41:32] [Rank 0] step:2361/10000 train_time:199601ms step_avg:84.54ms +[2025-08-22 09:41:32] [Rank 0] step:2361/10000 train_time:199601ms step_avg:84.54ms +[2025-08-22 09:41:34] [Rank 0] step:2381/10000 train_time:201347ms step_avg:84.56ms +[2025-08-22 09:41:34] [Rank 0] step:2381/10000 train_time:201347ms step_avg:84.56ms +[2025-08-22 09:41:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:41:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:41:49] [Rank 0] PRINT: step:2400/10000 val_loss:4.3510 svd_entropy: attn_qk:H=0.7270,top10E=0.31,eRank=162.1,q75/q25=73.60 attn_vo:H=0.7621,top10E=0.11,eRank=252.6,q75/q25=inf mlp_w1:H=0.8077,top10E=0.21,eRank=222.6,q75/q25=10.34 mlp_w2:H=0.8876,top10E=0.13,eRank=365.0,q75/q25=8.41 vo_prod:H=0.5911,top10E=0.21,eRank=108.3,q75/q25=inf train_time:203179ms step_avg:84.66ms +[2025-08-22 09:41:49] [Rank 0] PRINT: step:2400/10000 val_loss:4.3510 svd_entropy: attn_qk:H=0.7270,top10E=0.31,eRank=162.1,q75/q25=73.60 attn_vo:H=0.7621,top10E=0.11,eRank=252.6,q75/q25=inf mlp_w1:H=0.8077,top10E=0.21,eRank=222.6,q75/q25=10.34 mlp_w2:H=0.8876,top10E=0.13,eRank=365.0,q75/q25=8.41 vo_prod:H=0.5911,top10E=0.21,eRank=108.3,q75/q25=inf train_time:203179ms step_avg:84.66ms +[2025-08-22 09:41:49] [Rank 0] step:2401/10000 train_time:203193ms step_avg:84.63ms +[2025-08-22 09:41:49] [Rank 0] step:2401/10000 train_time:203193ms step_avg:84.63ms +[2025-08-22 09:41:51] [Rank 0] step:2421/10000 train_time:204857ms step_avg:84.62ms +[2025-08-22 09:41:51] [Rank 0] step:2421/10000 train_time:204857ms step_avg:84.62ms +[2025-08-22 09:41:53] [Rank 0] step:2441/10000 train_time:206598ms step_avg:84.64ms +[2025-08-22 09:41:53] [Rank 0] step:2441/10000 train_time:206598ms step_avg:84.64ms +[2025-08-22 09:41:54] [Rank 0] step:2461/10000 train_time:208338ms step_avg:84.66ms +[2025-08-22 09:41:54] [Rank 0] step:2461/10000 train_time:208338ms step_avg:84.66ms +[2025-08-22 09:41:56] [Rank 0] step:2481/10000 train_time:210080ms step_avg:84.68ms +[2025-08-22 09:41:56] [Rank 0] step:2481/10000 train_time:210080ms step_avg:84.68ms +[2025-08-22 09:41:58] [Rank 0] step:2501/10000 train_time:211820ms step_avg:84.69ms +[2025-08-22 09:41:58] [Rank 0] step:2501/10000 train_time:211820ms step_avg:84.69ms +[2025-08-22 09:42:00] [Rank 0] step:2521/10000 train_time:213561ms step_avg:84.71ms +[2025-08-22 09:42:00] [Rank 0] step:2521/10000 train_time:213561ms step_avg:84.71ms +[2025-08-22 09:42:01] [Rank 0] step:2541/10000 train_time:215303ms step_avg:84.73ms +[2025-08-22 09:42:01] [Rank 0] step:2541/10000 train_time:215303ms step_avg:84.73ms +[2025-08-22 09:42:03] [Rank 0] step:2561/10000 train_time:217047ms step_avg:84.75ms +[2025-08-22 09:42:03] [Rank 0] step:2561/10000 train_time:217047ms step_avg:84.75ms +[2025-08-22 09:42:05] [Rank 0] step:2581/10000 train_time:218787ms step_avg:84.77ms +[2025-08-22 09:42:05] [Rank 0] step:2581/10000 train_time:218787ms step_avg:84.77ms +[2025-08-22 09:42:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:42:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:42:20] [Rank 0] PRINT: step:2600/10000 val_loss:4.2839 svd_entropy: attn_qk:H=0.7308,top10E=0.30,eRank=164.5,q75/q25=75.62 attn_vo:H=0.7669,top10E=0.10,eRank=260.4,q75/q25=inf mlp_w1:H=0.8134,top10E=0.21,eRank=230.9,q75/q25=10.41 mlp_w2:H=0.8925,top10E=0.13,eRank=377.1,q75/q25=7.99 vo_prod:H=0.5973,top10E=0.20,eRank=113.5,q75/q25=inf train_time:220617ms step_avg:84.85ms +[2025-08-22 09:42:20] [Rank 0] PRINT: step:2600/10000 val_loss:4.2839 svd_entropy: attn_qk:H=0.7308,top10E=0.30,eRank=164.5,q75/q25=75.62 attn_vo:H=0.7669,top10E=0.10,eRank=260.4,q75/q25=inf mlp_w1:H=0.8134,top10E=0.21,eRank=230.9,q75/q25=10.41 mlp_w2:H=0.8925,top10E=0.13,eRank=377.1,q75/q25=7.99 vo_prod:H=0.5973,top10E=0.20,eRank=113.5,q75/q25=inf train_time:220617ms step_avg:84.85ms +[2025-08-22 09:42:20] [Rank 0] step:2601/10000 train_time:220633ms step_avg:84.83ms +[2025-08-22 09:42:20] [Rank 0] step:2601/10000 train_time:220633ms step_avg:84.83ms +[2025-08-22 09:42:22] [Rank 0] step:2621/10000 train_time:222289ms step_avg:84.81ms +[2025-08-22 09:42:22] [Rank 0] step:2621/10000 train_time:222289ms step_avg:84.81ms +[2025-08-22 09:42:24] [Rank 0] step:2641/10000 train_time:224026ms step_avg:84.83ms +[2025-08-22 09:42:24] [Rank 0] step:2641/10000 train_time:224026ms step_avg:84.83ms +[2025-08-22 09:42:25] [Rank 0] step:2661/10000 train_time:225763ms step_avg:84.84ms +[2025-08-22 09:42:25] [Rank 0] step:2661/10000 train_time:225763ms step_avg:84.84ms +[2025-08-22 09:42:27] [Rank 0] step:2681/10000 train_time:227500ms step_avg:84.86ms +[2025-08-22 09:42:27] [Rank 0] step:2681/10000 train_time:227500ms step_avg:84.86ms +[2025-08-22 09:42:29] [Rank 0] step:2701/10000 train_time:229238ms step_avg:84.87ms +[2025-08-22 09:42:29] [Rank 0] step:2701/10000 train_time:229238ms step_avg:84.87ms +[2025-08-22 09:42:31] [Rank 0] step:2721/10000 train_time:230976ms step_avg:84.89ms +[2025-08-22 09:42:31] [Rank 0] step:2721/10000 train_time:230976ms step_avg:84.89ms +[2025-08-22 09:42:32] [Rank 0] step:2741/10000 train_time:232716ms step_avg:84.90ms +[2025-08-22 09:42:32] [Rank 0] step:2741/10000 train_time:232716ms step_avg:84.90ms +[2025-08-22 09:42:34] [Rank 0] step:2761/10000 train_time:234457ms step_avg:84.92ms +[2025-08-22 09:42:34] [Rank 0] step:2761/10000 train_time:234457ms step_avg:84.92ms +[2025-08-22 09:42:36] [Rank 0] step:2781/10000 train_time:236198ms step_avg:84.93ms +[2025-08-22 09:42:36] [Rank 0] step:2781/10000 train_time:236198ms step_avg:84.93ms +[2025-08-22 09:42:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:42:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:42:51] [Rank 0] PRINT: step:2800/10000 val_loss:4.2432 svd_entropy: attn_qk:H=0.7345,top10E=0.29,eRank=166.9,q75/q25=77.22 attn_vo:H=0.7710,top10E=0.10,eRank=267.3,q75/q25=inf mlp_w1:H=0.8186,top10E=0.20,eRank=238.6,q75/q25=10.45 mlp_w2:H=0.8967,top10E=0.12,eRank=387.8,q75/q25=7.63 vo_prod:H=0.6030,top10E=0.20,eRank=118.4,q75/q25=inf train_time:238029ms step_avg:85.01ms +[2025-08-22 09:42:51] [Rank 0] PRINT: step:2800/10000 val_loss:4.2432 svd_entropy: attn_qk:H=0.7345,top10E=0.29,eRank=166.9,q75/q25=77.22 attn_vo:H=0.7710,top10E=0.10,eRank=267.3,q75/q25=inf mlp_w1:H=0.8186,top10E=0.20,eRank=238.6,q75/q25=10.45 mlp_w2:H=0.8967,top10E=0.12,eRank=387.8,q75/q25=7.63 vo_prod:H=0.6030,top10E=0.20,eRank=118.4,q75/q25=inf train_time:238029ms step_avg:85.01ms +[2025-08-22 09:42:51] [Rank 0] step:2801/10000 train_time:238045ms step_avg:84.99ms +[2025-08-22 09:42:51] [Rank 0] step:2801/10000 train_time:238045ms step_avg:84.99ms +[2025-08-22 09:42:53] [Rank 0] step:2821/10000 train_time:239704ms step_avg:84.97ms +[2025-08-22 09:42:53] [Rank 0] step:2821/10000 train_time:239704ms step_avg:84.97ms +[2025-08-22 09:42:55] [Rank 0] step:2841/10000 train_time:241443ms step_avg:84.99ms +[2025-08-22 09:42:55] [Rank 0] step:2841/10000 train_time:241443ms step_avg:84.99ms +[2025-08-22 09:42:57] [Rank 0] step:2861/10000 train_time:243186ms step_avg:85.00ms +[2025-08-22 09:42:57] [Rank 0] step:2861/10000 train_time:243186ms step_avg:85.00ms +[2025-08-22 09:42:58] [Rank 0] step:2881/10000 train_time:244928ms step_avg:85.01ms +[2025-08-22 09:42:58] [Rank 0] step:2881/10000 train_time:244928ms step_avg:85.01ms +[2025-08-22 09:43:00] [Rank 0] step:2901/10000 train_time:246670ms step_avg:85.03ms +[2025-08-22 09:43:00] [Rank 0] step:2901/10000 train_time:246670ms step_avg:85.03ms +[2025-08-22 09:43:02] [Rank 0] step:2921/10000 train_time:248411ms step_avg:85.04ms +[2025-08-22 09:43:02] [Rank 0] step:2921/10000 train_time:248411ms step_avg:85.04ms +[2025-08-22 09:43:03] [Rank 0] step:2941/10000 train_time:250156ms step_avg:85.06ms +[2025-08-22 09:43:03] [Rank 0] step:2941/10000 train_time:250156ms step_avg:85.06ms +[2025-08-22 09:43:05] [Rank 0] step:2961/10000 train_time:251901ms step_avg:85.07ms +[2025-08-22 09:43:05] [Rank 0] step:2961/10000 train_time:251901ms step_avg:85.07ms +[2025-08-22 09:43:07] [Rank 0] step:2981/10000 train_time:253654ms step_avg:85.09ms +[2025-08-22 09:43:07] [Rank 0] step:2981/10000 train_time:253654ms step_avg:85.09ms +[2025-08-22 09:43:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:43:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:43:22] [Rank 0] PRINT: step:3000/10000 val_loss:4.1921 svd_entropy: attn_qk:H=0.7378,top10E=0.29,eRank=169.2,q75/q25=78.23 attn_vo:H=0.7747,top10E=0.10,eRank=273.6,q75/q25=inf mlp_w1:H=0.8233,top10E=0.20,eRank=245.9,q75/q25=10.43 mlp_w2:H=0.9002,top10E=0.12,eRank=397.2,q75/q25=7.32 vo_prod:H=0.6082,top10E=0.19,eRank=123.0,q75/q25=inf train_time:255494ms step_avg:85.16ms +[2025-08-22 09:43:22] [Rank 0] PRINT: step:3000/10000 val_loss:4.1921 svd_entropy: attn_qk:H=0.7378,top10E=0.29,eRank=169.2,q75/q25=78.23 attn_vo:H=0.7747,top10E=0.10,eRank=273.6,q75/q25=inf mlp_w1:H=0.8233,top10E=0.20,eRank=245.9,q75/q25=10.43 mlp_w2:H=0.9002,top10E=0.12,eRank=397.2,q75/q25=7.32 vo_prod:H=0.6082,top10E=0.19,eRank=123.0,q75/q25=inf train_time:255494ms step_avg:85.16ms +[2025-08-22 09:43:22] [Rank 0] step:3001/10000 train_time:255509ms step_avg:85.14ms +[2025-08-22 09:43:22] [Rank 0] step:3001/10000 train_time:255509ms step_avg:85.14ms +[2025-08-22 09:43:24] [Rank 0] step:3021/10000 train_time:257174ms step_avg:85.13ms +[2025-08-22 09:43:24] [Rank 0] step:3021/10000 train_time:257174ms step_avg:85.13ms +[2025-08-22 09:43:26] [Rank 0] step:3041/10000 train_time:258920ms step_avg:85.14ms +[2025-08-22 09:43:26] [Rank 0] step:3041/10000 train_time:258920ms step_avg:85.14ms +[2025-08-22 09:43:28] [Rank 0] step:3061/10000 train_time:260667ms step_avg:85.16ms +[2025-08-22 09:43:28] [Rank 0] step:3061/10000 train_time:260667ms step_avg:85.16ms +[2025-08-22 09:43:29] [Rank 0] step:3081/10000 train_time:262416ms step_avg:85.17ms +[2025-08-22 09:43:29] [Rank 0] step:3081/10000 train_time:262416ms step_avg:85.17ms +[2025-08-22 09:43:31] [Rank 0] step:3101/10000 train_time:264166ms step_avg:85.19ms +[2025-08-22 09:43:31] [Rank 0] step:3101/10000 train_time:264166ms step_avg:85.19ms +[2025-08-22 09:43:33] [Rank 0] step:3121/10000 train_time:265913ms step_avg:85.20ms +[2025-08-22 09:43:33] [Rank 0] step:3121/10000 train_time:265913ms step_avg:85.20ms +[2025-08-22 09:43:35] [Rank 0] step:3141/10000 train_time:267662ms step_avg:85.22ms +[2025-08-22 09:43:35] [Rank 0] step:3141/10000 train_time:267662ms step_avg:85.22ms +[2025-08-22 09:43:36] [Rank 0] step:3161/10000 train_time:269411ms step_avg:85.23ms +[2025-08-22 09:43:36] [Rank 0] step:3161/10000 train_time:269411ms step_avg:85.23ms +[2025-08-22 09:43:38] [Rank 0] step:3181/10000 train_time:271161ms step_avg:85.24ms +[2025-08-22 09:43:38] [Rank 0] step:3181/10000 train_time:271161ms step_avg:85.24ms +[2025-08-22 09:43:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:43:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:43:53] [Rank 0] PRINT: step:3200/10000 val_loss:4.1674 svd_entropy: attn_qk:H=0.7410,top10E=0.28,eRank=171.4,q75/q25=79.23 attn_vo:H=0.7779,top10E=0.09,eRank=279.2,q75/q25=inf mlp_w1:H=0.8274,top10E=0.19,eRank=252.6,q75/q25=10.38 mlp_w2:H=0.9035,top10E=0.12,eRank=406.1,q75/q25=7.02 vo_prod:H=0.6129,top10E=0.18,eRank=127.3,q75/q25=inf train_time:272998ms step_avg:85.31ms +[2025-08-22 09:43:53] [Rank 0] PRINT: step:3200/10000 val_loss:4.1674 svd_entropy: attn_qk:H=0.7410,top10E=0.28,eRank=171.4,q75/q25=79.23 attn_vo:H=0.7779,top10E=0.09,eRank=279.2,q75/q25=inf mlp_w1:H=0.8274,top10E=0.19,eRank=252.6,q75/q25=10.38 mlp_w2:H=0.9035,top10E=0.12,eRank=406.1,q75/q25=7.02 vo_prod:H=0.6129,top10E=0.18,eRank=127.3,q75/q25=inf train_time:272998ms step_avg:85.31ms +[2025-08-22 09:43:54] [Rank 0] step:3201/10000 train_time:273013ms step_avg:85.29ms +[2025-08-22 09:43:54] [Rank 0] step:3201/10000 train_time:273013ms step_avg:85.29ms +[2025-08-22 09:43:55] [Rank 0] step:3221/10000 train_time:274689ms step_avg:85.28ms +[2025-08-22 09:43:55] [Rank 0] step:3221/10000 train_time:274689ms step_avg:85.28ms +[2025-08-22 09:43:57] [Rank 0] step:3241/10000 train_time:276434ms step_avg:85.29ms +[2025-08-22 09:43:57] [Rank 0] step:3241/10000 train_time:276434ms step_avg:85.29ms +[2025-08-22 09:43:59] [Rank 0] step:3261/10000 train_time:278180ms step_avg:85.31ms +[2025-08-22 09:43:59] [Rank 0] step:3261/10000 train_time:278180ms step_avg:85.31ms +[2025-08-22 09:44:01] [Rank 0] step:3281/10000 train_time:279928ms step_avg:85.32ms +[2025-08-22 09:44:01] [Rank 0] step:3281/10000 train_time:279928ms step_avg:85.32ms +[2025-08-22 09:44:02] [Rank 0] step:3301/10000 train_time:281674ms step_avg:85.33ms +[2025-08-22 09:44:02] [Rank 0] step:3301/10000 train_time:281674ms step_avg:85.33ms +[2025-08-22 09:44:04] [Rank 0] step:3321/10000 train_time:283425ms step_avg:85.34ms +[2025-08-22 09:44:04] [Rank 0] step:3321/10000 train_time:283425ms step_avg:85.34ms +[2025-08-22 09:44:06] [Rank 0] step:3341/10000 train_time:285170ms step_avg:85.35ms +[2025-08-22 09:44:06] [Rank 0] step:3341/10000 train_time:285170ms step_avg:85.35ms +[2025-08-22 09:44:08] [Rank 0] step:3361/10000 train_time:286920ms step_avg:85.37ms +[2025-08-22 09:44:08] [Rank 0] step:3361/10000 train_time:286920ms step_avg:85.37ms +[2025-08-22 09:44:09] [Rank 0] step:3381/10000 train_time:288669ms step_avg:85.38ms +[2025-08-22 09:44:09] [Rank 0] step:3381/10000 train_time:288669ms step_avg:85.38ms +[2025-08-22 09:44:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:44:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:44:25] [Rank 0] PRINT: step:3400/10000 val_loss:4.1762 svd_entropy: attn_qk:H=0.7443,top10E=0.28,eRank=173.8,q75/q25=80.61 attn_vo:H=0.7808,top10E=0.09,eRank=284.6,q75/q25=inf mlp_w1:H=0.8313,top10E=0.19,eRank=259.2,q75/q25=10.28 mlp_w2:H=0.9065,top10E=0.12,eRank=414.0,q75/q25=6.74 vo_prod:H=0.6175,top10E=0.18,eRank=131.7,q75/q25=inf train_time:290509ms step_avg:85.44ms +[2025-08-22 09:44:25] [Rank 0] PRINT: step:3400/10000 val_loss:4.1762 svd_entropy: attn_qk:H=0.7443,top10E=0.28,eRank=173.8,q75/q25=80.61 attn_vo:H=0.7808,top10E=0.09,eRank=284.6,q75/q25=inf mlp_w1:H=0.8313,top10E=0.19,eRank=259.2,q75/q25=10.28 mlp_w2:H=0.9065,top10E=0.12,eRank=414.0,q75/q25=6.74 vo_prod:H=0.6175,top10E=0.18,eRank=131.7,q75/q25=inf train_time:290509ms step_avg:85.44ms +[2025-08-22 09:44:25] [Rank 0] step:3401/10000 train_time:290524ms step_avg:85.42ms +[2025-08-22 09:44:25] [Rank 0] step:3401/10000 train_time:290524ms step_avg:85.42ms +[2025-08-22 09:44:27] [Rank 0] step:3421/10000 train_time:292200ms step_avg:85.41ms +[2025-08-22 09:44:27] [Rank 0] step:3421/10000 train_time:292200ms step_avg:85.41ms +[2025-08-22 09:44:29] [Rank 0] step:3441/10000 train_time:293946ms step_avg:85.42ms +[2025-08-22 09:44:29] [Rank 0] step:3441/10000 train_time:293946ms step_avg:85.42ms +[2025-08-22 09:44:30] [Rank 0] step:3461/10000 train_time:295696ms step_avg:85.44ms +[2025-08-22 09:44:30] [Rank 0] step:3461/10000 train_time:295696ms step_avg:85.44ms +[2025-08-22 09:44:32] [Rank 0] step:3481/10000 train_time:297444ms step_avg:85.45ms +[2025-08-22 09:44:32] [Rank 0] step:3481/10000 train_time:297444ms step_avg:85.45ms +[2025-08-22 09:44:34] [Rank 0] step:3501/10000 train_time:299195ms step_avg:85.46ms +[2025-08-22 09:44:34] [Rank 0] step:3501/10000 train_time:299195ms step_avg:85.46ms +[2025-08-22 09:44:36] [Rank 0] step:3521/10000 train_time:300948ms step_avg:85.47ms +[2025-08-22 09:44:36] [Rank 0] step:3521/10000 train_time:300948ms step_avg:85.47ms +[2025-08-22 09:44:37] [Rank 0] step:3541/10000 train_time:302698ms step_avg:85.48ms +[2025-08-22 09:44:37] [Rank 0] step:3541/10000 train_time:302698ms step_avg:85.48ms +[2025-08-22 09:44:39] [Rank 0] step:3561/10000 train_time:304451ms step_avg:85.50ms +[2025-08-22 09:44:39] [Rank 0] step:3561/10000 train_time:304451ms step_avg:85.50ms +[2025-08-22 09:44:41] [Rank 0] step:3581/10000 train_time:306203ms step_avg:85.51ms +[2025-08-22 09:44:41] [Rank 0] step:3581/10000 train_time:306203ms step_avg:85.51ms +[2025-08-22 09:44:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:44:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:44:56] [Rank 0] PRINT: step:3600/10000 val_loss:4.1038 svd_entropy: attn_qk:H=0.7470,top10E=0.28,eRank=175.8,q75/q25=80.84 attn_vo:H=0.7834,top10E=0.09,eRank=289.3,q75/q25=inf mlp_w1:H=0.8350,top10E=0.18,eRank=265.4,q75/q25=10.16 mlp_w2:H=0.9090,top10E=0.11,eRank=421.2,q75/q25=6.50 vo_prod:H=0.6214,top10E=0.17,eRank=135.4,q75/q25=inf train_time:308046ms step_avg:85.57ms +[2025-08-22 09:44:56] [Rank 0] PRINT: step:3600/10000 val_loss:4.1038 svd_entropy: attn_qk:H=0.7470,top10E=0.28,eRank=175.8,q75/q25=80.84 attn_vo:H=0.7834,top10E=0.09,eRank=289.3,q75/q25=inf mlp_w1:H=0.8350,top10E=0.18,eRank=265.4,q75/q25=10.16 mlp_w2:H=0.9090,top10E=0.11,eRank=421.2,q75/q25=6.50 vo_prod:H=0.6214,top10E=0.17,eRank=135.4,q75/q25=inf train_time:308046ms step_avg:85.57ms +[2025-08-22 09:44:56] [Rank 0] step:3601/10000 train_time:308062ms step_avg:85.55ms +[2025-08-22 09:44:56] [Rank 0] step:3601/10000 train_time:308062ms step_avg:85.55ms +[2025-08-22 09:44:58] [Rank 0] step:3621/10000 train_time:309727ms step_avg:85.54ms +[2025-08-22 09:44:58] [Rank 0] step:3621/10000 train_time:309727ms step_avg:85.54ms +[2025-08-22 09:45:00] [Rank 0] step:3641/10000 train_time:311473ms step_avg:85.55ms +[2025-08-22 09:45:00] [Rank 0] step:3641/10000 train_time:311473ms step_avg:85.55ms +[2025-08-22 09:45:01] [Rank 0] step:3661/10000 train_time:313221ms step_avg:85.56ms +[2025-08-22 09:45:01] [Rank 0] step:3661/10000 train_time:313221ms step_avg:85.56ms +[2025-08-22 09:45:03] [Rank 0] step:3681/10000 train_time:314971ms step_avg:85.57ms +[2025-08-22 09:45:03] [Rank 0] step:3681/10000 train_time:314971ms step_avg:85.57ms +[2025-08-22 09:45:05] [Rank 0] step:3701/10000 train_time:316718ms step_avg:85.58ms +[2025-08-22 09:45:05] [Rank 0] step:3701/10000 train_time:316718ms step_avg:85.58ms +[2025-08-22 09:45:07] [Rank 0] step:3721/10000 train_time:318493ms step_avg:85.59ms +[2025-08-22 09:45:07] [Rank 0] step:3721/10000 train_time:318493ms step_avg:85.59ms +[2025-08-22 09:45:09] [Rank 0] step:3741/10000 train_time:320277ms step_avg:85.61ms +[2025-08-22 09:45:09] [Rank 0] step:3741/10000 train_time:320277ms step_avg:85.61ms +[2025-08-22 09:45:10] [Rank 0] step:3761/10000 train_time:322063ms step_avg:85.63ms +[2025-08-22 09:45:10] [Rank 0] step:3761/10000 train_time:322063ms step_avg:85.63ms +[2025-08-22 09:45:12] [Rank 0] step:3781/10000 train_time:323850ms step_avg:85.65ms +[2025-08-22 09:45:12] [Rank 0] step:3781/10000 train_time:323850ms step_avg:85.65ms +[2025-08-22 09:45:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:45:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:45:27] [Rank 0] PRINT: step:3800/10000 val_loss:4.0514 svd_entropy: attn_qk:H=0.7496,top10E=0.27,eRank=177.8,q75/q25=81.42 attn_vo:H=0.7857,top10E=0.09,eRank=293.6,q75/q25=inf mlp_w1:H=0.8382,top10E=0.18,eRank=271.2,q75/q25=10.05 mlp_w2:H=0.9113,top10E=0.11,eRank=427.6,q75/q25=6.30 vo_prod:H=0.6250,top10E=0.17,eRank=139.0,q75/q25=inf train_time:325726ms step_avg:85.72ms +[2025-08-22 09:45:27] [Rank 0] PRINT: step:3800/10000 val_loss:4.0514 svd_entropy: attn_qk:H=0.7496,top10E=0.27,eRank=177.8,q75/q25=81.42 attn_vo:H=0.7857,top10E=0.09,eRank=293.6,q75/q25=inf mlp_w1:H=0.8382,top10E=0.18,eRank=271.2,q75/q25=10.05 mlp_w2:H=0.9113,top10E=0.11,eRank=427.6,q75/q25=6.30 vo_prod:H=0.6250,top10E=0.17,eRank=139.0,q75/q25=inf train_time:325726ms step_avg:85.72ms +[2025-08-22 09:45:28] [Rank 0] step:3801/10000 train_time:325741ms step_avg:85.70ms +[2025-08-22 09:45:28] [Rank 0] step:3801/10000 train_time:325741ms step_avg:85.70ms +[2025-08-22 09:45:29] [Rank 0] step:3821/10000 train_time:327452ms step_avg:85.70ms +[2025-08-22 09:45:29] [Rank 0] step:3821/10000 train_time:327452ms step_avg:85.70ms +[2025-08-22 09:45:31] [Rank 0] step:3841/10000 train_time:329238ms step_avg:85.72ms +[2025-08-22 09:45:31] [Rank 0] step:3841/10000 train_time:329238ms step_avg:85.72ms +[2025-08-22 09:45:33] [Rank 0] step:3861/10000 train_time:331021ms step_avg:85.73ms +[2025-08-22 09:45:33] [Rank 0] step:3861/10000 train_time:331021ms step_avg:85.73ms +[2025-08-22 09:45:35] [Rank 0] step:3881/10000 train_time:332803ms step_avg:85.75ms +[2025-08-22 09:45:35] [Rank 0] step:3881/10000 train_time:332803ms step_avg:85.75ms +[2025-08-22 09:45:37] [Rank 0] step:3901/10000 train_time:334587ms step_avg:85.77ms +[2025-08-22 09:45:37] [Rank 0] step:3901/10000 train_time:334587ms step_avg:85.77ms +[2025-08-22 09:45:38] [Rank 0] step:3921/10000 train_time:336369ms step_avg:85.79ms +[2025-08-22 09:45:38] [Rank 0] step:3921/10000 train_time:336369ms step_avg:85.79ms +[2025-08-22 09:45:40] [Rank 0] step:3941/10000 train_time:338154ms step_avg:85.80ms +[2025-08-22 09:45:40] [Rank 0] step:3941/10000 train_time:338154ms step_avg:85.80ms +[2025-08-22 09:45:42] [Rank 0] step:3961/10000 train_time:339938ms step_avg:85.82ms +[2025-08-22 09:45:42] [Rank 0] step:3961/10000 train_time:339938ms step_avg:85.82ms +[2025-08-22 09:45:44] [Rank 0] step:3981/10000 train_time:341723ms step_avg:85.84ms +[2025-08-22 09:45:44] [Rank 0] step:3981/10000 train_time:341723ms step_avg:85.84ms +[2025-08-22 09:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:45:59] [Rank 0] PRINT: step:4000/10000 val_loss:4.0199 svd_entropy: attn_qk:H=0.7521,top10E=0.27,eRank=179.6,q75/q25=81.72 attn_vo:H=0.7877,top10E=0.09,eRank=297.3,q75/q25=inf mlp_w1:H=0.8412,top10E=0.18,eRank=276.6,q75/q25=9.89 mlp_w2:H=0.9136,top10E=0.11,eRank=434.0,q75/q25=6.09 vo_prod:H=0.6281,top10E=0.17,eRank=142.2,q75/q25=inf train_time:343597ms step_avg:85.90ms +[2025-08-22 09:45:59] [Rank 0] PRINT: step:4000/10000 val_loss:4.0199 svd_entropy: attn_qk:H=0.7521,top10E=0.27,eRank=179.6,q75/q25=81.72 attn_vo:H=0.7877,top10E=0.09,eRank=297.3,q75/q25=inf mlp_w1:H=0.8412,top10E=0.18,eRank=276.6,q75/q25=9.89 mlp_w2:H=0.9136,top10E=0.11,eRank=434.0,q75/q25=6.09 vo_prod:H=0.6281,top10E=0.17,eRank=142.2,q75/q25=inf train_time:343597ms step_avg:85.90ms +[2025-08-22 09:45:59] [Rank 0] step:4001/10000 train_time:343611ms step_avg:85.88ms +[2025-08-22 09:45:59] [Rank 0] step:4001/10000 train_time:343611ms step_avg:85.88ms +[2025-08-22 09:46:01] [Rank 0] step:4021/10000 train_time:345332ms step_avg:85.88ms +[2025-08-22 09:46:01] [Rank 0] step:4021/10000 train_time:345332ms step_avg:85.88ms +[2025-08-22 09:46:03] [Rank 0] step:4041/10000 train_time:347117ms step_avg:85.90ms +[2025-08-22 09:46:03] [Rank 0] step:4041/10000 train_time:347117ms step_avg:85.90ms +[2025-08-22 09:46:04] [Rank 0] step:4061/10000 train_time:348904ms step_avg:85.92ms +[2025-08-22 09:46:04] [Rank 0] step:4061/10000 train_time:348904ms step_avg:85.92ms +[2025-08-22 09:46:06] [Rank 0] step:4081/10000 train_time:350860ms step_avg:85.97ms +[2025-08-22 09:46:06] [Rank 0] step:4081/10000 train_time:350860ms step_avg:85.97ms +[2025-08-22 09:46:08] [Rank 0] step:4101/10000 train_time:352647ms step_avg:85.99ms +[2025-08-22 09:46:08] [Rank 0] step:4101/10000 train_time:352647ms step_avg:85.99ms +[2025-08-22 09:46:10] [Rank 0] step:4121/10000 train_time:354434ms step_avg:86.01ms +[2025-08-22 09:46:10] [Rank 0] step:4121/10000 train_time:354434ms step_avg:86.01ms +[2025-08-22 09:46:12] [Rank 0] step:4141/10000 train_time:356221ms step_avg:86.02ms +[2025-08-22 09:46:12] [Rank 0] step:4141/10000 train_time:356221ms step_avg:86.02ms +[2025-08-22 09:46:13] [Rank 0] step:4161/10000 train_time:358009ms step_avg:86.04ms +[2025-08-22 09:46:13] [Rank 0] step:4161/10000 train_time:358009ms step_avg:86.04ms +[2025-08-22 09:46:15] [Rank 0] step:4181/10000 train_time:359797ms step_avg:86.06ms +[2025-08-22 09:46:15] [Rank 0] step:4181/10000 train_time:359797ms step_avg:86.06ms +[2025-08-22 09:46:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:46:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:46:31] [Rank 0] PRINT: step:4200/10000 val_loss:4.0086 svd_entropy: attn_qk:H=0.7545,top10E=0.26,eRank=181.5,q75/q25=81.07 attn_vo:H=0.7896,top10E=0.09,eRank=301.0,q75/q25=inf mlp_w1:H=0.8440,top10E=0.18,eRank=281.8,q75/q25=9.77 mlp_w2:H=0.9155,top10E=0.11,eRank=439.7,q75/q25=5.91 vo_prod:H=0.6313,top10E=0.16,eRank=145.5,q75/q25=inf train_time:361675ms step_avg:86.11ms +[2025-08-22 09:46:31] [Rank 0] PRINT: step:4200/10000 val_loss:4.0086 svd_entropy: attn_qk:H=0.7545,top10E=0.26,eRank=181.5,q75/q25=81.07 attn_vo:H=0.7896,top10E=0.09,eRank=301.0,q75/q25=inf mlp_w1:H=0.8440,top10E=0.18,eRank=281.8,q75/q25=9.77 mlp_w2:H=0.9155,top10E=0.11,eRank=439.7,q75/q25=5.91 vo_prod:H=0.6313,top10E=0.16,eRank=145.5,q75/q25=inf train_time:361675ms step_avg:86.11ms +[2025-08-22 09:46:31] [Rank 0] step:4201/10000 train_time:361690ms step_avg:86.10ms +[2025-08-22 09:46:31] [Rank 0] step:4201/10000 train_time:361690ms step_avg:86.10ms +[2025-08-22 09:46:33] [Rank 0] step:4221/10000 train_time:363397ms step_avg:86.09ms +[2025-08-22 09:46:33] [Rank 0] step:4221/10000 train_time:363397ms step_avg:86.09ms +[2025-08-22 09:46:34] [Rank 0] step:4241/10000 train_time:365182ms step_avg:86.11ms +[2025-08-22 09:46:34] [Rank 0] step:4241/10000 train_time:365182ms step_avg:86.11ms +[2025-08-22 09:46:36] [Rank 0] step:4261/10000 train_time:366965ms step_avg:86.12ms +[2025-08-22 09:46:36] [Rank 0] step:4261/10000 train_time:366965ms step_avg:86.12ms +[2025-08-22 09:46:38] [Rank 0] step:4281/10000 train_time:368749ms step_avg:86.14ms +[2025-08-22 09:46:38] [Rank 0] step:4281/10000 train_time:368749ms step_avg:86.14ms +[2025-08-22 09:46:40] [Rank 0] step:4301/10000 train_time:370532ms step_avg:86.15ms +[2025-08-22 09:46:40] [Rank 0] step:4301/10000 train_time:370532ms step_avg:86.15ms +[2025-08-22 09:46:41] [Rank 0] step:4321/10000 train_time:372317ms step_avg:86.16ms +[2025-08-22 09:46:41] [Rank 0] step:4321/10000 train_time:372317ms step_avg:86.16ms +[2025-08-22 09:46:43] [Rank 0] step:4341/10000 train_time:374099ms step_avg:86.18ms +[2025-08-22 09:46:43] [Rank 0] step:4341/10000 train_time:374099ms step_avg:86.18ms +[2025-08-22 09:46:45] [Rank 0] step:4361/10000 train_time:375884ms step_avg:86.19ms +[2025-08-22 09:46:45] [Rank 0] step:4361/10000 train_time:375884ms step_avg:86.19ms +[2025-08-22 09:46:47] [Rank 0] step:4381/10000 train_time:377668ms step_avg:86.21ms +[2025-08-22 09:46:47] [Rank 0] step:4381/10000 train_time:377668ms step_avg:86.21ms +[2025-08-22 09:46:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:46:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:47:02] [Rank 0] PRINT: step:4400/10000 val_loss:3.9972 svd_entropy: attn_qk:H=0.7567,top10E=0.26,eRank=183.3,q75/q25=82.02 attn_vo:H=0.7914,top10E=0.08,eRank=304.5,q75/q25=inf mlp_w1:H=0.8466,top10E=0.17,eRank=286.8,q75/q25=9.62 mlp_w2:H=0.9172,top10E=0.11,eRank=444.8,q75/q25=5.75 vo_prod:H=0.6341,top10E=0.16,eRank=148.5,q75/q25=inf train_time:379541ms step_avg:86.26ms +[2025-08-22 09:47:02] [Rank 0] PRINT: step:4400/10000 val_loss:3.9972 svd_entropy: attn_qk:H=0.7567,top10E=0.26,eRank=183.3,q75/q25=82.02 attn_vo:H=0.7914,top10E=0.08,eRank=304.5,q75/q25=inf mlp_w1:H=0.8466,top10E=0.17,eRank=286.8,q75/q25=9.62 mlp_w2:H=0.9172,top10E=0.11,eRank=444.8,q75/q25=5.75 vo_prod:H=0.6341,top10E=0.16,eRank=148.5,q75/q25=inf train_time:379541ms step_avg:86.26ms +[2025-08-22 09:47:02] [Rank 0] step:4401/10000 train_time:379556ms step_avg:86.24ms +[2025-08-22 09:47:02] [Rank 0] step:4401/10000 train_time:379556ms step_avg:86.24ms +[2025-08-22 09:47:04] [Rank 0] step:4421/10000 train_time:381246ms step_avg:86.24ms +[2025-08-22 09:47:04] [Rank 0] step:4421/10000 train_time:381246ms step_avg:86.24ms +[2025-08-22 09:47:06] [Rank 0] step:4441/10000 train_time:383026ms step_avg:86.25ms +[2025-08-22 09:47:06] [Rank 0] step:4441/10000 train_time:383026ms step_avg:86.25ms +[2025-08-22 09:47:08] [Rank 0] step:4461/10000 train_time:384814ms step_avg:86.26ms +[2025-08-22 09:47:08] [Rank 0] step:4461/10000 train_time:384814ms step_avg:86.26ms +[2025-08-22 09:47:09] [Rank 0] step:4481/10000 train_time:386601ms step_avg:86.28ms +[2025-08-22 09:47:09] [Rank 0] step:4481/10000 train_time:386601ms step_avg:86.28ms +[2025-08-22 09:47:11] [Rank 0] step:4501/10000 train_time:388389ms step_avg:86.29ms +[2025-08-22 09:47:11] [Rank 0] step:4501/10000 train_time:388389ms step_avg:86.29ms +[2025-08-22 09:47:13] [Rank 0] step:4521/10000 train_time:390176ms step_avg:86.30ms +[2025-08-22 09:47:13] [Rank 0] step:4521/10000 train_time:390176ms step_avg:86.30ms +[2025-08-22 09:47:15] [Rank 0] step:4541/10000 train_time:391965ms step_avg:86.32ms +[2025-08-22 09:47:15] [Rank 0] step:4541/10000 train_time:391965ms step_avg:86.32ms +[2025-08-22 09:47:17] [Rank 0] step:4561/10000 train_time:393754ms step_avg:86.33ms +[2025-08-22 09:47:17] [Rank 0] step:4561/10000 train_time:393754ms step_avg:86.33ms +[2025-08-22 09:47:18] [Rank 0] step:4581/10000 train_time:395544ms step_avg:86.34ms +[2025-08-22 09:47:18] [Rank 0] step:4581/10000 train_time:395544ms step_avg:86.34ms +[2025-08-22 09:47:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:47:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:47:34] [Rank 0] PRINT: step:4600/10000 val_loss:3.9533 svd_entropy: attn_qk:H=0.7587,top10E=0.26,eRank=185.0,q75/q25=82.35 attn_vo:H=0.7931,top10E=0.08,eRank=307.8,q75/q25=inf mlp_w1:H=0.8492,top10E=0.17,eRank=291.8,q75/q25=9.48 mlp_w2:H=0.9189,top10E=0.11,eRank=449.7,q75/q25=5.61 vo_prod:H=0.6368,top10E=0.16,eRank=151.4,q75/q25=inf train_time:397424ms step_avg:86.40ms +[2025-08-22 09:47:34] [Rank 0] PRINT: step:4600/10000 val_loss:3.9533 svd_entropy: attn_qk:H=0.7587,top10E=0.26,eRank=185.0,q75/q25=82.35 attn_vo:H=0.7931,top10E=0.08,eRank=307.8,q75/q25=inf mlp_w1:H=0.8492,top10E=0.17,eRank=291.8,q75/q25=9.48 mlp_w2:H=0.9189,top10E=0.11,eRank=449.7,q75/q25=5.61 vo_prod:H=0.6368,top10E=0.16,eRank=151.4,q75/q25=inf train_time:397424ms step_avg:86.40ms +[2025-08-22 09:47:34] [Rank 0] step:4601/10000 train_time:397439ms step_avg:86.38ms +[2025-08-22 09:47:34] [Rank 0] step:4601/10000 train_time:397439ms step_avg:86.38ms +[2025-08-22 09:47:36] [Rank 0] step:4621/10000 train_time:399156ms step_avg:86.38ms +[2025-08-22 09:47:36] [Rank 0] step:4621/10000 train_time:399156ms step_avg:86.38ms +[2025-08-22 09:47:37] [Rank 0] step:4641/10000 train_time:400948ms step_avg:86.39ms +[2025-08-22 09:47:37] [Rank 0] step:4641/10000 train_time:400948ms step_avg:86.39ms +[2025-08-22 09:47:39] [Rank 0] step:4661/10000 train_time:402736ms step_avg:86.41ms +[2025-08-22 09:47:39] [Rank 0] step:4661/10000 train_time:402736ms step_avg:86.41ms +[2025-08-22 09:47:41] [Rank 0] step:4681/10000 train_time:404524ms step_avg:86.42ms +[2025-08-22 09:47:41] [Rank 0] step:4681/10000 train_time:404524ms step_avg:86.42ms +[2025-08-22 09:47:43] [Rank 0] step:4701/10000 train_time:406314ms step_avg:86.43ms +[2025-08-22 09:47:43] [Rank 0] step:4701/10000 train_time:406314ms step_avg:86.43ms +[2025-08-22 09:47:45] [Rank 0] step:4721/10000 train_time:408102ms step_avg:86.44ms +[2025-08-22 09:47:45] [Rank 0] step:4721/10000 train_time:408102ms step_avg:86.44ms +[2025-08-22 09:47:46] [Rank 0] step:4741/10000 train_time:409892ms step_avg:86.46ms +[2025-08-22 09:47:46] [Rank 0] step:4741/10000 train_time:409892ms step_avg:86.46ms +[2025-08-22 09:47:48] [Rank 0] step:4761/10000 train_time:411683ms step_avg:86.47ms +[2025-08-22 09:47:48] [Rank 0] step:4761/10000 train_time:411683ms step_avg:86.47ms +[2025-08-22 09:47:50] [Rank 0] step:4781/10000 train_time:413473ms step_avg:86.48ms +[2025-08-22 09:47:50] [Rank 0] step:4781/10000 train_time:413473ms step_avg:86.48ms +[2025-08-22 09:47:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:47:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:48:05] [Rank 0] PRINT: step:4800/10000 val_loss:3.9418 svd_entropy: attn_qk:H=0.7607,top10E=0.26,eRank=186.6,q75/q25=82.96 attn_vo:H=0.7946,top10E=0.08,eRank=310.9,q75/q25=inf mlp_w1:H=0.8514,top10E=0.17,eRank=296.3,q75/q25=9.36 mlp_w2:H=0.9204,top10E=0.11,eRank=454.0,q75/q25=5.49 vo_prod:H=0.6393,top10E=0.15,eRank=154.1,q75/q25=inf train_time:415355ms step_avg:86.53ms +[2025-08-22 09:48:05] [Rank 0] PRINT: step:4800/10000 val_loss:3.9418 svd_entropy: attn_qk:H=0.7607,top10E=0.26,eRank=186.6,q75/q25=82.96 attn_vo:H=0.7946,top10E=0.08,eRank=310.9,q75/q25=inf mlp_w1:H=0.8514,top10E=0.17,eRank=296.3,q75/q25=9.36 mlp_w2:H=0.9204,top10E=0.11,eRank=454.0,q75/q25=5.49 vo_prod:H=0.6393,top10E=0.15,eRank=154.1,q75/q25=inf train_time:415355ms step_avg:86.53ms +[2025-08-22 09:48:05] [Rank 0] step:4801/10000 train_time:415371ms step_avg:86.52ms +[2025-08-22 09:48:05] [Rank 0] step:4801/10000 train_time:415371ms step_avg:86.52ms +[2025-08-22 09:48:07] [Rank 0] step:4821/10000 train_time:417090ms step_avg:86.52ms +[2025-08-22 09:48:07] [Rank 0] step:4821/10000 train_time:417090ms step_avg:86.52ms +[2025-08-22 09:48:09] [Rank 0] step:4841/10000 train_time:418880ms step_avg:86.53ms +[2025-08-22 09:48:09] [Rank 0] step:4841/10000 train_time:418880ms step_avg:86.53ms +[2025-08-22 09:48:11] [Rank 0] step:4861/10000 train_time:420672ms step_avg:86.54ms +[2025-08-22 09:48:11] [Rank 0] step:4861/10000 train_time:420672ms step_avg:86.54ms +[2025-08-22 09:48:13] [Rank 0] step:4881/10000 train_time:422463ms step_avg:86.55ms +[2025-08-22 09:48:13] [Rank 0] step:4881/10000 train_time:422463ms step_avg:86.55ms +[2025-08-22 09:48:14] [Rank 0] step:4901/10000 train_time:424255ms step_avg:86.57ms +[2025-08-22 09:48:14] [Rank 0] step:4901/10000 train_time:424255ms step_avg:86.57ms +[2025-08-22 09:48:16] [Rank 0] step:4921/10000 train_time:426052ms step_avg:86.58ms +[2025-08-22 09:48:16] [Rank 0] step:4921/10000 train_time:426052ms step_avg:86.58ms +[2025-08-22 09:48:18] [Rank 0] step:4941/10000 train_time:427847ms step_avg:86.59ms +[2025-08-22 09:48:18] [Rank 0] step:4941/10000 train_time:427847ms step_avg:86.59ms +[2025-08-22 09:48:20] [Rank 0] step:4961/10000 train_time:429642ms step_avg:86.60ms +[2025-08-22 09:48:20] [Rank 0] step:4961/10000 train_time:429642ms step_avg:86.60ms +[2025-08-22 09:48:22] [Rank 0] step:4981/10000 train_time:431449ms step_avg:86.62ms +[2025-08-22 09:48:22] [Rank 0] step:4981/10000 train_time:431449ms step_avg:86.62ms +[2025-08-22 09:48:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:48:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:48:37] [Rank 0] PRINT: step:5000/10000 val_loss:3.9228 svd_entropy: attn_qk:H=0.7624,top10E=0.25,eRank=188.1,q75/q25=82.88 attn_vo:H=0.7960,top10E=0.08,eRank=313.6,q75/q25=inf mlp_w1:H=0.8535,top10E=0.17,eRank=300.5,q75/q25=9.22 mlp_w2:H=0.9218,top10E=0.10,eRank=458.2,q75/q25=5.38 vo_prod:H=0.6413,top10E=0.15,eRank=156.3,q75/q25=inf train_time:433334ms step_avg:86.67ms +[2025-08-22 09:48:37] [Rank 0] PRINT: step:5000/10000 val_loss:3.9228 svd_entropy: attn_qk:H=0.7624,top10E=0.25,eRank=188.1,q75/q25=82.88 attn_vo:H=0.7960,top10E=0.08,eRank=313.6,q75/q25=inf mlp_w1:H=0.8535,top10E=0.17,eRank=300.5,q75/q25=9.22 mlp_w2:H=0.9218,top10E=0.10,eRank=458.2,q75/q25=5.38 vo_prod:H=0.6413,top10E=0.15,eRank=156.3,q75/q25=inf train_time:433334ms step_avg:86.67ms +[2025-08-22 09:48:37] [Rank 0] step:5001/10000 train_time:433349ms step_avg:86.65ms +[2025-08-22 09:48:37] [Rank 0] step:5001/10000 train_time:433349ms step_avg:86.65ms +[2025-08-22 09:48:39] [Rank 0] step:5021/10000 train_time:435054ms step_avg:86.65ms +[2025-08-22 09:48:39] [Rank 0] step:5021/10000 train_time:435054ms step_avg:86.65ms +[2025-08-22 09:48:41] [Rank 0] step:5041/10000 train_time:436848ms step_avg:86.66ms +[2025-08-22 09:48:41] [Rank 0] step:5041/10000 train_time:436848ms step_avg:86.66ms +[2025-08-22 09:48:42] [Rank 0] step:5061/10000 train_time:438638ms step_avg:86.67ms +[2025-08-22 09:48:42] [Rank 0] step:5061/10000 train_time:438638ms step_avg:86.67ms +[2025-08-22 09:48:44] [Rank 0] step:5081/10000 train_time:440430ms step_avg:86.68ms +[2025-08-22 09:48:44] [Rank 0] step:5081/10000 train_time:440430ms step_avg:86.68ms +[2025-08-22 09:48:46] [Rank 0] step:5101/10000 train_time:442221ms step_avg:86.69ms +[2025-08-22 09:48:46] [Rank 0] step:5101/10000 train_time:442221ms step_avg:86.69ms +[2025-08-22 09:48:48] [Rank 0] step:5121/10000 train_time:444014ms step_avg:86.70ms +[2025-08-22 09:48:48] [Rank 0] step:5121/10000 train_time:444014ms step_avg:86.70ms +[2025-08-22 09:48:50] [Rank 0] step:5141/10000 train_time:445810ms step_avg:86.72ms +[2025-08-22 09:48:50] [Rank 0] step:5141/10000 train_time:445810ms step_avg:86.72ms +[2025-08-22 09:48:51] [Rank 0] step:5161/10000 train_time:447601ms step_avg:86.73ms +[2025-08-22 09:48:51] [Rank 0] step:5161/10000 train_time:447601ms step_avg:86.73ms +[2025-08-22 09:48:53] [Rank 0] step:5181/10000 train_time:449395ms step_avg:86.74ms +[2025-08-22 09:48:53] [Rank 0] step:5181/10000 train_time:449395ms step_avg:86.74ms +[2025-08-22 09:48:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:48:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:49:08] [Rank 0] PRINT: step:5200/10000 val_loss:3.9015 svd_entropy: attn_qk:H=0.7642,top10E=0.25,eRank=189.6,q75/q25=83.07 attn_vo:H=0.7973,top10E=0.08,eRank=316.2,q75/q25=inf mlp_w1:H=0.8555,top10E=0.17,eRank=304.6,q75/q25=9.08 mlp_w2:H=0.9230,top10E=0.10,eRank=462.0,q75/q25=5.27 vo_prod:H=0.6434,top10E=0.15,eRank=158.6,q75/q25=inf train_time:451302ms step_avg:86.79ms +[2025-08-22 09:49:08] [Rank 0] PRINT: step:5200/10000 val_loss:3.9015 svd_entropy: attn_qk:H=0.7642,top10E=0.25,eRank=189.6,q75/q25=83.07 attn_vo:H=0.7973,top10E=0.08,eRank=316.2,q75/q25=inf mlp_w1:H=0.8555,top10E=0.17,eRank=304.6,q75/q25=9.08 mlp_w2:H=0.9230,top10E=0.10,eRank=462.0,q75/q25=5.27 vo_prod:H=0.6434,top10E=0.15,eRank=158.6,q75/q25=inf train_time:451302ms step_avg:86.79ms +[2025-08-22 09:49:09] [Rank 0] step:5201/10000 train_time:451316ms step_avg:86.77ms +[2025-08-22 09:49:09] [Rank 0] step:5201/10000 train_time:451316ms step_avg:86.77ms +[2025-08-22 09:49:10] [Rank 0] step:5221/10000 train_time:453047ms step_avg:86.77ms +[2025-08-22 09:49:10] [Rank 0] step:5221/10000 train_time:453047ms step_avg:86.77ms +[2025-08-22 09:49:12] [Rank 0] step:5241/10000 train_time:454871ms step_avg:86.79ms +[2025-08-22 09:49:12] [Rank 0] step:5241/10000 train_time:454871ms step_avg:86.79ms +[2025-08-22 09:49:14] [Rank 0] step:5261/10000 train_time:456690ms step_avg:86.81ms +[2025-08-22 09:49:14] [Rank 0] step:5261/10000 train_time:456690ms step_avg:86.81ms +[2025-08-22 09:49:16] [Rank 0] step:5281/10000 train_time:458510ms step_avg:86.82ms +[2025-08-22 09:49:16] [Rank 0] step:5281/10000 train_time:458510ms step_avg:86.82ms +[2025-08-22 09:49:18] [Rank 0] step:5301/10000 train_time:460340ms step_avg:86.84ms +[2025-08-22 09:49:18] [Rank 0] step:5301/10000 train_time:460340ms step_avg:86.84ms +[2025-08-22 09:49:20] [Rank 0] step:5321/10000 train_time:462162ms step_avg:86.86ms +[2025-08-22 09:49:20] [Rank 0] step:5321/10000 train_time:462162ms step_avg:86.86ms +[2025-08-22 09:49:21] [Rank 0] step:5341/10000 train_time:463984ms step_avg:86.87ms +[2025-08-22 09:49:21] [Rank 0] step:5341/10000 train_time:463984ms step_avg:86.87ms +[2025-08-22 09:49:23] [Rank 0] step:5361/10000 train_time:465810ms step_avg:86.89ms +[2025-08-22 09:49:23] [Rank 0] step:5361/10000 train_time:465810ms step_avg:86.89ms +[2025-08-22 09:49:25] [Rank 0] step:5381/10000 train_time:467637ms step_avg:86.91ms +[2025-08-22 09:49:25] [Rank 0] step:5381/10000 train_time:467637ms step_avg:86.91ms +[2025-08-22 09:49:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:49:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:49:40] [Rank 0] PRINT: step:5400/10000 val_loss:3.8867 svd_entropy: attn_qk:H=0.7658,top10E=0.25,eRank=190.9,q75/q25=82.49 attn_vo:H=0.7984,top10E=0.08,eRank=318.6,q75/q25=inf mlp_w1:H=0.8574,top10E=0.16,eRank=308.5,q75/q25=8.96 mlp_w2:H=0.9241,top10E=0.10,eRank=465.4,q75/q25=5.18 vo_prod:H=0.6452,top10E=0.15,eRank=160.7,q75/q25=inf train_time:469550ms step_avg:86.95ms +[2025-08-22 09:49:40] [Rank 0] PRINT: step:5400/10000 val_loss:3.8867 svd_entropy: attn_qk:H=0.7658,top10E=0.25,eRank=190.9,q75/q25=82.49 attn_vo:H=0.7984,top10E=0.08,eRank=318.6,q75/q25=inf mlp_w1:H=0.8574,top10E=0.16,eRank=308.5,q75/q25=8.96 mlp_w2:H=0.9241,top10E=0.10,eRank=465.4,q75/q25=5.18 vo_prod:H=0.6452,top10E=0.15,eRank=160.7,q75/q25=inf train_time:469550ms step_avg:86.95ms +[2025-08-22 09:49:40] [Rank 0] step:5401/10000 train_time:469566ms step_avg:86.94ms +[2025-08-22 09:49:40] [Rank 0] step:5401/10000 train_time:469566ms step_avg:86.94ms +[2025-08-22 09:49:42] [Rank 0] step:5421/10000 train_time:471300ms step_avg:86.94ms +[2025-08-22 09:49:42] [Rank 0] step:5421/10000 train_time:471300ms step_avg:86.94ms +[2025-08-22 09:49:44] [Rank 0] step:5441/10000 train_time:473119ms step_avg:86.95ms +[2025-08-22 09:49:44] [Rank 0] step:5441/10000 train_time:473119ms step_avg:86.95ms +[2025-08-22 09:49:46] [Rank 0] step:5461/10000 train_time:474947ms step_avg:86.97ms +[2025-08-22 09:49:46] [Rank 0] step:5461/10000 train_time:474947ms step_avg:86.97ms +[2025-08-22 09:49:48] [Rank 0] step:5481/10000 train_time:476773ms step_avg:86.99ms +[2025-08-22 09:49:48] [Rank 0] step:5481/10000 train_time:476773ms step_avg:86.99ms +[2025-08-22 09:49:49] [Rank 0] step:5501/10000 train_time:478602ms step_avg:87.00ms +[2025-08-22 09:49:49] [Rank 0] step:5501/10000 train_time:478602ms step_avg:87.00ms +[2025-08-22 09:49:51] [Rank 0] step:5521/10000 train_time:480434ms step_avg:87.02ms +[2025-08-22 09:49:51] [Rank 0] step:5521/10000 train_time:480434ms step_avg:87.02ms +[2025-08-22 09:49:53] [Rank 0] step:5541/10000 train_time:482263ms step_avg:87.04ms +[2025-08-22 09:49:53] [Rank 0] step:5541/10000 train_time:482263ms step_avg:87.04ms +[2025-08-22 09:49:55] [Rank 0] step:5561/10000 train_time:484088ms step_avg:87.05ms +[2025-08-22 09:49:55] [Rank 0] step:5561/10000 train_time:484088ms step_avg:87.05ms +[2025-08-22 09:49:57] [Rank 0] step:5581/10000 train_time:485915ms step_avg:87.07ms +[2025-08-22 09:49:57] [Rank 0] step:5581/10000 train_time:485915ms step_avg:87.07ms +[2025-08-22 09:49:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:49:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:50:12] [Rank 0] PRINT: step:5600/10000 val_loss:3.8754 svd_entropy: attn_qk:H=0.7673,top10E=0.25,eRank=192.3,q75/q25=82.63 attn_vo:H=0.7995,top10E=0.08,eRank=320.9,q75/q25=inf mlp_w1:H=0.8591,top10E=0.16,eRank=312.2,q75/q25=8.84 mlp_w2:H=0.9251,top10E=0.10,eRank=468.5,q75/q25=5.10 vo_prod:H=0.6470,top10E=0.15,eRank=162.7,q75/q25=inf train_time:487834ms step_avg:87.11ms +[2025-08-22 09:50:12] [Rank 0] PRINT: step:5600/10000 val_loss:3.8754 svd_entropy: attn_qk:H=0.7673,top10E=0.25,eRank=192.3,q75/q25=82.63 attn_vo:H=0.7995,top10E=0.08,eRank=320.9,q75/q25=inf mlp_w1:H=0.8591,top10E=0.16,eRank=312.2,q75/q25=8.84 mlp_w2:H=0.9251,top10E=0.10,eRank=468.5,q75/q25=5.10 vo_prod:H=0.6470,top10E=0.15,eRank=162.7,q75/q25=inf train_time:487834ms step_avg:87.11ms +[2025-08-22 09:50:12] [Rank 0] step:5601/10000 train_time:487849ms step_avg:87.10ms +[2025-08-22 09:50:12] [Rank 0] step:5601/10000 train_time:487849ms step_avg:87.10ms +[2025-08-22 09:50:14] [Rank 0] step:5621/10000 train_time:489589ms step_avg:87.10ms +[2025-08-22 09:50:14] [Rank 0] step:5621/10000 train_time:489589ms step_avg:87.10ms +[2025-08-22 09:50:16] [Rank 0] step:5641/10000 train_time:491410ms step_avg:87.11ms +[2025-08-22 09:50:16] [Rank 0] step:5641/10000 train_time:491410ms step_avg:87.11ms +[2025-08-22 09:50:18] [Rank 0] step:5661/10000 train_time:493229ms step_avg:87.13ms +[2025-08-22 09:50:18] [Rank 0] step:5661/10000 train_time:493229ms step_avg:87.13ms +[2025-08-22 09:50:20] [Rank 0] step:5681/10000 train_time:495055ms step_avg:87.14ms +[2025-08-22 09:50:20] [Rank 0] step:5681/10000 train_time:495055ms step_avg:87.14ms +[2025-08-22 09:50:21] [Rank 0] step:5701/10000 train_time:496877ms step_avg:87.16ms +[2025-08-22 09:50:21] [Rank 0] step:5701/10000 train_time:496877ms step_avg:87.16ms +[2025-08-22 09:50:23] [Rank 0] step:5721/10000 train_time:498705ms step_avg:87.17ms +[2025-08-22 09:50:23] [Rank 0] step:5721/10000 train_time:498705ms step_avg:87.17ms +[2025-08-22 09:50:25] [Rank 0] step:5741/10000 train_time:500527ms step_avg:87.18ms +[2025-08-22 09:50:25] [Rank 0] step:5741/10000 train_time:500527ms step_avg:87.18ms +[2025-08-22 09:50:27] [Rank 0] step:5761/10000 train_time:502350ms step_avg:87.20ms +[2025-08-22 09:50:27] [Rank 0] step:5761/10000 train_time:502350ms step_avg:87.20ms +[2025-08-22 09:50:29] [Rank 0] step:5781/10000 train_time:504177ms step_avg:87.21ms +[2025-08-22 09:50:29] [Rank 0] step:5781/10000 train_time:504177ms step_avg:87.21ms +[2025-08-22 09:50:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:50:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:50:44] [Rank 0] PRINT: step:5800/10000 val_loss:3.9007 svd_entropy: attn_qk:H=0.7688,top10E=0.24,eRank=193.6,q75/q25=82.35 attn_vo:H=0.8005,top10E=0.08,eRank=322.9,q75/q25=inf mlp_w1:H=0.8608,top10E=0.16,eRank=315.9,q75/q25=8.71 mlp_w2:H=0.9260,top10E=0.10,eRank=471.4,q75/q25=5.02 vo_prod:H=0.6486,top10E=0.15,eRank=164.6,q75/q25=inf train_time:506092ms step_avg:87.26ms +[2025-08-22 09:50:44] [Rank 0] PRINT: step:5800/10000 val_loss:3.9007 svd_entropy: attn_qk:H=0.7688,top10E=0.24,eRank=193.6,q75/q25=82.35 attn_vo:H=0.8005,top10E=0.08,eRank=322.9,q75/q25=inf mlp_w1:H=0.8608,top10E=0.16,eRank=315.9,q75/q25=8.71 mlp_w2:H=0.9260,top10E=0.10,eRank=471.4,q75/q25=5.02 vo_prod:H=0.6486,top10E=0.15,eRank=164.6,q75/q25=inf train_time:506092ms step_avg:87.26ms +[2025-08-22 09:50:44] [Rank 0] step:5801/10000 train_time:506106ms step_avg:87.24ms +[2025-08-22 09:50:44] [Rank 0] step:5801/10000 train_time:506106ms step_avg:87.24ms +[2025-08-22 09:50:46] [Rank 0] step:5821/10000 train_time:507835ms step_avg:87.24ms +[2025-08-22 09:50:46] [Rank 0] step:5821/10000 train_time:507835ms step_avg:87.24ms +[2025-08-22 09:50:48] [Rank 0] step:5841/10000 train_time:509657ms step_avg:87.26ms +[2025-08-22 09:50:48] [Rank 0] step:5841/10000 train_time:509657ms step_avg:87.26ms +[2025-08-22 09:50:50] [Rank 0] step:5861/10000 train_time:511484ms step_avg:87.27ms +[2025-08-22 09:50:50] [Rank 0] step:5861/10000 train_time:511484ms step_avg:87.27ms +[2025-08-22 09:50:52] [Rank 0] step:5881/10000 train_time:513308ms step_avg:87.28ms +[2025-08-22 09:50:52] [Rank 0] step:5881/10000 train_time:513308ms step_avg:87.28ms +[2025-08-22 09:50:53] [Rank 0] step:5901/10000 train_time:515134ms step_avg:87.30ms +[2025-08-22 09:50:53] [Rank 0] step:5901/10000 train_time:515134ms step_avg:87.30ms +[2025-08-22 09:50:55] [Rank 0] step:5921/10000 train_time:516959ms step_avg:87.31ms +[2025-08-22 09:50:55] [Rank 0] step:5921/10000 train_time:516959ms step_avg:87.31ms +[2025-08-22 09:50:57] [Rank 0] step:5941/10000 train_time:518785ms step_avg:87.32ms +[2025-08-22 09:50:57] [Rank 0] step:5941/10000 train_time:518785ms step_avg:87.32ms +[2025-08-22 09:50:59] [Rank 0] step:5961/10000 train_time:520612ms step_avg:87.34ms +[2025-08-22 09:50:59] [Rank 0] step:5961/10000 train_time:520612ms step_avg:87.34ms +[2025-08-22 09:51:01] [Rank 0] step:5981/10000 train_time:522438ms step_avg:87.35ms +[2025-08-22 09:51:01] [Rank 0] step:5981/10000 train_time:522438ms step_avg:87.35ms +[2025-08-22 09:51:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:51:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:51:16] [Rank 0] PRINT: step:6000/10000 val_loss:3.8504 svd_entropy: attn_qk:H=0.7702,top10E=0.24,eRank=194.9,q75/q25=82.48 attn_vo:H=0.8015,top10E=0.08,eRank=325.0,q75/q25=inf mlp_w1:H=0.8624,top10E=0.16,eRank=319.4,q75/q25=8.60 mlp_w2:H=0.9269,top10E=0.10,eRank=474.1,q75/q25=4.96 vo_prod:H=0.6501,top10E=0.14,eRank=166.3,q75/q25=inf train_time:524351ms step_avg:87.39ms +[2025-08-22 09:51:16] [Rank 0] PRINT: step:6000/10000 val_loss:3.8504 svd_entropy: attn_qk:H=0.7702,top10E=0.24,eRank=194.9,q75/q25=82.48 attn_vo:H=0.8015,top10E=0.08,eRank=325.0,q75/q25=inf mlp_w1:H=0.8624,top10E=0.16,eRank=319.4,q75/q25=8.60 mlp_w2:H=0.9269,top10E=0.10,eRank=474.1,q75/q25=4.96 vo_prod:H=0.6501,top10E=0.14,eRank=166.3,q75/q25=inf train_time:524351ms step_avg:87.39ms +[2025-08-22 09:51:16] [Rank 0] step:6001/10000 train_time:524366ms step_avg:87.38ms +[2025-08-22 09:51:16] [Rank 0] step:6001/10000 train_time:524366ms step_avg:87.38ms +[2025-08-22 09:51:18] [Rank 0] step:6021/10000 train_time:526113ms step_avg:87.38ms +[2025-08-22 09:51:18] [Rank 0] step:6021/10000 train_time:526113ms step_avg:87.38ms +[2025-08-22 09:51:20] [Rank 0] step:6041/10000 train_time:527937ms step_avg:87.39ms +[2025-08-22 09:51:20] [Rank 0] step:6041/10000 train_time:527937ms step_avg:87.39ms +[2025-08-22 09:51:22] [Rank 0] step:6061/10000 train_time:529766ms step_avg:87.41ms +[2025-08-22 09:51:22] [Rank 0] step:6061/10000 train_time:529766ms step_avg:87.41ms +[2025-08-22 09:51:24] [Rank 0] step:6081/10000 train_time:531624ms step_avg:87.42ms +[2025-08-22 09:51:24] [Rank 0] step:6081/10000 train_time:531624ms step_avg:87.42ms +[2025-08-22 09:51:25] [Rank 0] step:6101/10000 train_time:533453ms step_avg:87.44ms +[2025-08-22 09:51:25] [Rank 0] step:6101/10000 train_time:533453ms step_avg:87.44ms +[2025-08-22 09:51:27] [Rank 0] step:6121/10000 train_time:535546ms step_avg:87.49ms +[2025-08-22 09:51:27] [Rank 0] step:6121/10000 train_time:535546ms step_avg:87.49ms +[2025-08-22 09:51:29] [Rank 0] step:6141/10000 train_time:537381ms step_avg:87.51ms +[2025-08-22 09:51:29] [Rank 0] step:6141/10000 train_time:537381ms step_avg:87.51ms +[2025-08-22 09:51:31] [Rank 0] step:6161/10000 train_time:539208ms step_avg:87.52ms +[2025-08-22 09:51:31] [Rank 0] step:6161/10000 train_time:539208ms step_avg:87.52ms +[2025-08-22 09:51:33] [Rank 0] step:6181/10000 train_time:541031ms step_avg:87.53ms +[2025-08-22 09:51:33] [Rank 0] step:6181/10000 train_time:541031ms step_avg:87.53ms +[2025-08-22 09:51:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:51:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:51:48] [Rank 0] PRINT: step:6200/10000 val_loss:3.8298 svd_entropy: attn_qk:H=0.7715,top10E=0.24,eRank=196.1,q75/q25=82.36 attn_vo:H=0.8024,top10E=0.08,eRank=326.9,q75/q25=inf mlp_w1:H=0.8639,top10E=0.16,eRank=322.7,q75/q25=8.50 mlp_w2:H=0.9277,top10E=0.10,eRank=476.7,q75/q25=4.90 vo_prod:H=0.6515,top10E=0.14,eRank=168.0,q75/q25=inf train_time:542953ms step_avg:87.57ms +[2025-08-22 09:51:48] [Rank 0] PRINT: step:6200/10000 val_loss:3.8298 svd_entropy: attn_qk:H=0.7715,top10E=0.24,eRank=196.1,q75/q25=82.36 attn_vo:H=0.8024,top10E=0.08,eRank=326.9,q75/q25=inf mlp_w1:H=0.8639,top10E=0.16,eRank=322.7,q75/q25=8.50 mlp_w2:H=0.9277,top10E=0.10,eRank=476.7,q75/q25=4.90 vo_prod:H=0.6515,top10E=0.14,eRank=168.0,q75/q25=inf train_time:542953ms step_avg:87.57ms +[2025-08-22 09:51:49] [Rank 0] step:6201/10000 train_time:542969ms step_avg:87.56ms +[2025-08-22 09:51:49] [Rank 0] step:6201/10000 train_time:542969ms step_avg:87.56ms +[2025-08-22 09:51:50] [Rank 0] step:6221/10000 train_time:544708ms step_avg:87.56ms +[2025-08-22 09:51:50] [Rank 0] step:6221/10000 train_time:544708ms step_avg:87.56ms +[2025-08-22 09:51:52] [Rank 0] step:6241/10000 train_time:546533ms step_avg:87.57ms +[2025-08-22 09:51:52] [Rank 0] step:6241/10000 train_time:546533ms step_avg:87.57ms +[2025-08-22 09:51:54] [Rank 0] step:6261/10000 train_time:548361ms step_avg:87.58ms +[2025-08-22 09:51:54] [Rank 0] step:6261/10000 train_time:548361ms step_avg:87.58ms +[2025-08-22 09:51:56] [Rank 0] step:6281/10000 train_time:550193ms step_avg:87.60ms +[2025-08-22 09:51:56] [Rank 0] step:6281/10000 train_time:550193ms step_avg:87.60ms +[2025-08-22 09:51:58] [Rank 0] step:6301/10000 train_time:552023ms step_avg:87.61ms +[2025-08-22 09:51:58] [Rank 0] step:6301/10000 train_time:552023ms step_avg:87.61ms +[2025-08-22 09:51:59] [Rank 0] step:6321/10000 train_time:553853ms step_avg:87.62ms +[2025-08-22 09:51:59] [Rank 0] step:6321/10000 train_time:553853ms step_avg:87.62ms +[2025-08-22 09:52:01] [Rank 0] step:6341/10000 train_time:555687ms step_avg:87.63ms +[2025-08-22 09:52:01] [Rank 0] step:6341/10000 train_time:555687ms step_avg:87.63ms +[2025-08-22 09:52:03] [Rank 0] step:6361/10000 train_time:557525ms step_avg:87.65ms +[2025-08-22 09:52:03] [Rank 0] step:6361/10000 train_time:557525ms step_avg:87.65ms +[2025-08-22 09:52:05] [Rank 0] step:6381/10000 train_time:559360ms step_avg:87.66ms +[2025-08-22 09:52:05] [Rank 0] step:6381/10000 train_time:559360ms step_avg:87.66ms +[2025-08-22 09:52:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:52:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:52:20] [Rank 0] PRINT: step:6400/10000 val_loss:3.8121 svd_entropy: attn_qk:H=0.7727,top10E=0.24,eRank=197.2,q75/q25=82.15 attn_vo:H=0.8032,top10E=0.08,eRank=328.5,q75/q25=inf mlp_w1:H=0.8653,top10E=0.16,eRank=325.5,q75/q25=8.41 mlp_w2:H=0.9283,top10E=0.10,eRank=478.8,q75/q25=4.85 vo_prod:H=0.6528,top10E=0.14,eRank=169.5,q75/q25=inf train_time:561283ms step_avg:87.70ms +[2025-08-22 09:52:20] [Rank 0] PRINT: step:6400/10000 val_loss:3.8121 svd_entropy: attn_qk:H=0.7727,top10E=0.24,eRank=197.2,q75/q25=82.15 attn_vo:H=0.8032,top10E=0.08,eRank=328.5,q75/q25=inf mlp_w1:H=0.8653,top10E=0.16,eRank=325.5,q75/q25=8.41 mlp_w2:H=0.9283,top10E=0.10,eRank=478.8,q75/q25=4.85 vo_prod:H=0.6528,top10E=0.14,eRank=169.5,q75/q25=inf train_time:561283ms step_avg:87.70ms +[2025-08-22 09:52:21] [Rank 0] step:6401/10000 train_time:561298ms step_avg:87.69ms +[2025-08-22 09:52:21] [Rank 0] step:6401/10000 train_time:561298ms step_avg:87.69ms +[2025-08-22 09:52:22] [Rank 0] step:6421/10000 train_time:563052ms step_avg:87.69ms +[2025-08-22 09:52:22] [Rank 0] step:6421/10000 train_time:563052ms step_avg:87.69ms +[2025-08-22 09:52:24] [Rank 0] step:6441/10000 train_time:564881ms step_avg:87.70ms +[2025-08-22 09:52:24] [Rank 0] step:6441/10000 train_time:564881ms step_avg:87.70ms +[2025-08-22 09:52:26] [Rank 0] step:6461/10000 train_time:566714ms step_avg:87.71ms +[2025-08-22 09:52:26] [Rank 0] step:6461/10000 train_time:566714ms step_avg:87.71ms +[2025-08-22 09:52:28] [Rank 0] step:6481/10000 train_time:568550ms step_avg:87.73ms +[2025-08-22 09:52:28] [Rank 0] step:6481/10000 train_time:568550ms step_avg:87.73ms +[2025-08-22 09:52:30] [Rank 0] step:6501/10000 train_time:570377ms step_avg:87.74ms +[2025-08-22 09:52:30] [Rank 0] step:6501/10000 train_time:570377ms step_avg:87.74ms +[2025-08-22 09:52:32] [Rank 0] step:6521/10000 train_time:572200ms step_avg:87.75ms +[2025-08-22 09:52:32] [Rank 0] step:6521/10000 train_time:572200ms step_avg:87.75ms +[2025-08-22 09:52:33] [Rank 0] step:6541/10000 train_time:574032ms step_avg:87.76ms +[2025-08-22 09:52:33] [Rank 0] step:6541/10000 train_time:574032ms step_avg:87.76ms +[2025-08-22 09:52:35] [Rank 0] step:6561/10000 train_time:575864ms step_avg:87.77ms +[2025-08-22 09:52:35] [Rank 0] step:6561/10000 train_time:575864ms step_avg:87.77ms +[2025-08-22 09:52:37] [Rank 0] step:6581/10000 train_time:577690ms step_avg:87.78ms +[2025-08-22 09:52:37] [Rank 0] step:6581/10000 train_time:577690ms step_avg:87.78ms +[2025-08-22 09:52:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:52:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:52:52] [Rank 0] PRINT: step:6600/10000 val_loss:3.7964 svd_entropy: attn_qk:H=0.7738,top10E=0.24,eRank=198.2,q75/q25=82.12 attn_vo:H=0.8039,top10E=0.08,eRank=330.1,q75/q25=inf mlp_w1:H=0.8664,top10E=0.16,eRank=328.2,q75/q25=8.33 mlp_w2:H=0.9290,top10E=0.10,eRank=480.7,q75/q25=4.81 vo_prod:H=0.6538,top10E=0.14,eRank=170.8,q75/q25=inf train_time:579614ms step_avg:87.82ms +[2025-08-22 09:52:52] [Rank 0] PRINT: step:6600/10000 val_loss:3.7964 svd_entropy: attn_qk:H=0.7738,top10E=0.24,eRank=198.2,q75/q25=82.12 attn_vo:H=0.8039,top10E=0.08,eRank=330.1,q75/q25=inf mlp_w1:H=0.8664,top10E=0.16,eRank=328.2,q75/q25=8.33 mlp_w2:H=0.9290,top10E=0.10,eRank=480.7,q75/q25=4.81 vo_prod:H=0.6538,top10E=0.14,eRank=170.8,q75/q25=inf train_time:579614ms step_avg:87.82ms +[2025-08-22 09:52:53] [Rank 0] step:6601/10000 train_time:579628ms step_avg:87.81ms +[2025-08-22 09:52:53] [Rank 0] step:6601/10000 train_time:579628ms step_avg:87.81ms +[2025-08-22 09:52:54] [Rank 0] step:6621/10000 train_time:581393ms step_avg:87.81ms +[2025-08-22 09:52:54] [Rank 0] step:6621/10000 train_time:581393ms step_avg:87.81ms +[2025-08-22 09:52:56] [Rank 0] step:6641/10000 train_time:583225ms step_avg:87.82ms +[2025-08-22 09:52:56] [Rank 0] step:6641/10000 train_time:583225ms step_avg:87.82ms +[2025-08-22 09:52:58] [Rank 0] step:6661/10000 train_time:585051ms step_avg:87.83ms +[2025-08-22 09:52:58] [Rank 0] step:6661/10000 train_time:585051ms step_avg:87.83ms +[2025-08-22 09:53:00] [Rank 0] step:6681/10000 train_time:586899ms step_avg:87.85ms +[2025-08-22 09:53:00] [Rank 0] step:6681/10000 train_time:586899ms step_avg:87.85ms +[2025-08-22 09:53:02] [Rank 0] step:6701/10000 train_time:588764ms step_avg:87.86ms +[2025-08-22 09:53:02] [Rank 0] step:6701/10000 train_time:588764ms step_avg:87.86ms +[2025-08-22 09:53:04] [Rank 0] step:6721/10000 train_time:590627ms step_avg:87.88ms +[2025-08-22 09:53:04] [Rank 0] step:6721/10000 train_time:590627ms step_avg:87.88ms +[2025-08-22 09:53:06] [Rank 0] step:6741/10000 train_time:592487ms step_avg:87.89ms +[2025-08-22 09:53:06] [Rank 0] step:6741/10000 train_time:592487ms step_avg:87.89ms +[2025-08-22 09:53:07] [Rank 0] step:6761/10000 train_time:594342ms step_avg:87.91ms +[2025-08-22 09:53:07] [Rank 0] step:6761/10000 train_time:594342ms step_avg:87.91ms +[2025-08-22 09:53:09] [Rank 0] step:6781/10000 train_time:596204ms step_avg:87.92ms +[2025-08-22 09:53:09] [Rank 0] step:6781/10000 train_time:596204ms step_avg:87.92ms +[2025-08-22 09:53:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:53:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:53:25] [Rank 0] PRINT: step:6800/10000 val_loss:3.7826 svd_entropy: attn_qk:H=0.7749,top10E=0.24,eRank=199.2,q75/q25=81.93 attn_vo:H=0.8045,top10E=0.07,eRank=331.4,q75/q25=inf mlp_w1:H=0.8676,top10E=0.15,eRank=330.7,q75/q25=8.25 mlp_w2:H=0.9295,top10E=0.10,eRank=482.5,q75/q25=4.75 vo_prod:H=0.6548,top10E=0.14,eRank=172.0,q75/q25=inf train_time:598161ms step_avg:87.96ms +[2025-08-22 09:53:25] [Rank 0] PRINT: step:6800/10000 val_loss:3.7826 svd_entropy: attn_qk:H=0.7749,top10E=0.24,eRank=199.2,q75/q25=81.93 attn_vo:H=0.8045,top10E=0.07,eRank=331.4,q75/q25=inf mlp_w1:H=0.8676,top10E=0.15,eRank=330.7,q75/q25=8.25 mlp_w2:H=0.9295,top10E=0.10,eRank=482.5,q75/q25=4.75 vo_prod:H=0.6548,top10E=0.14,eRank=172.0,q75/q25=inf train_time:598161ms step_avg:87.96ms +[2025-08-22 09:53:25] [Rank 0] step:6801/10000 train_time:598177ms step_avg:87.95ms +[2025-08-22 09:53:25] [Rank 0] step:6801/10000 train_time:598177ms step_avg:87.95ms +[2025-08-22 09:53:27] [Rank 0] step:6821/10000 train_time:599955ms step_avg:87.96ms +[2025-08-22 09:53:27] [Rank 0] step:6821/10000 train_time:599955ms step_avg:87.96ms +[2025-08-22 09:53:29] [Rank 0] step:6841/10000 train_time:601811ms step_avg:87.97ms +[2025-08-22 09:53:29] [Rank 0] step:6841/10000 train_time:601811ms step_avg:87.97ms +[2025-08-22 09:53:30] [Rank 0] step:6861/10000 train_time:603668ms step_avg:87.99ms +[2025-08-22 09:53:30] [Rank 0] step:6861/10000 train_time:603668ms step_avg:87.99ms +[2025-08-22 09:53:32] [Rank 0] step:6881/10000 train_time:605528ms step_avg:88.00ms +[2025-08-22 09:53:32] [Rank 0] step:6881/10000 train_time:605528ms step_avg:88.00ms +[2025-08-22 09:53:34] [Rank 0] step:6901/10000 train_time:607387ms step_avg:88.01ms +[2025-08-22 09:53:34] [Rank 0] step:6901/10000 train_time:607387ms step_avg:88.01ms +[2025-08-22 09:53:36] [Rank 0] step:6921/10000 train_time:609242ms step_avg:88.03ms +[2025-08-22 09:53:36] [Rank 0] step:6921/10000 train_time:609242ms step_avg:88.03ms +[2025-08-22 09:53:38] [Rank 0] step:6941/10000 train_time:611107ms step_avg:88.04ms +[2025-08-22 09:53:38] [Rank 0] step:6941/10000 train_time:611107ms step_avg:88.04ms +[2025-08-22 09:53:40] [Rank 0] step:6961/10000 train_time:612983ms step_avg:88.06ms +[2025-08-22 09:53:40] [Rank 0] step:6961/10000 train_time:612983ms step_avg:88.06ms +[2025-08-22 09:53:42] [Rank 0] step:6981/10000 train_time:614850ms step_avg:88.07ms +[2025-08-22 09:53:42] [Rank 0] step:6981/10000 train_time:614850ms step_avg:88.07ms +[2025-08-22 09:53:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:53:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:53:57] [Rank 0] PRINT: step:7000/10000 val_loss:3.7641 svd_entropy: attn_qk:H=0.7758,top10E=0.24,eRank=200.1,q75/q25=81.66 attn_vo:H=0.8051,top10E=0.07,eRank=332.6,q75/q25=inf mlp_w1:H=0.8686,top10E=0.15,eRank=333.1,q75/q25=8.18 mlp_w2:H=0.9300,top10E=0.10,eRank=484.1,q75/q25=4.72 vo_prod:H=0.6557,top10E=0.14,eRank=173.1,q75/q25=inf train_time:616808ms step_avg:88.12ms +[2025-08-22 09:53:57] [Rank 0] PRINT: step:7000/10000 val_loss:3.7641 svd_entropy: attn_qk:H=0.7758,top10E=0.24,eRank=200.1,q75/q25=81.66 attn_vo:H=0.8051,top10E=0.07,eRank=332.6,q75/q25=inf mlp_w1:H=0.8686,top10E=0.15,eRank=333.1,q75/q25=8.18 mlp_w2:H=0.9300,top10E=0.10,eRank=484.1,q75/q25=4.72 vo_prod:H=0.6557,top10E=0.14,eRank=173.1,q75/q25=inf train_time:616808ms step_avg:88.12ms +[2025-08-22 09:53:57] [Rank 0] step:7001/10000 train_time:616823ms step_avg:88.10ms +[2025-08-22 09:53:57] [Rank 0] step:7001/10000 train_time:616823ms step_avg:88.10ms +[2025-08-22 09:53:59] [Rank 0] step:7021/10000 train_time:618586ms step_avg:88.11ms +[2025-08-22 09:53:59] [Rank 0] step:7021/10000 train_time:618586ms step_avg:88.11ms +[2025-08-22 09:54:01] [Rank 0] step:7041/10000 train_time:620440ms step_avg:88.12ms +[2025-08-22 09:54:01] [Rank 0] step:7041/10000 train_time:620440ms step_avg:88.12ms +[2025-08-22 09:54:03] [Rank 0] step:7061/10000 train_time:622297ms step_avg:88.13ms +[2025-08-22 09:54:03] [Rank 0] step:7061/10000 train_time:622297ms step_avg:88.13ms +[2025-08-22 09:54:05] [Rank 0] step:7081/10000 train_time:624152ms step_avg:88.14ms +[2025-08-22 09:54:05] [Rank 0] step:7081/10000 train_time:624152ms step_avg:88.14ms +[2025-08-22 09:54:07] [Rank 0] step:7101/10000 train_time:626013ms step_avg:88.16ms +[2025-08-22 09:54:07] [Rank 0] step:7101/10000 train_time:626013ms step_avg:88.16ms +[2025-08-22 09:54:08] [Rank 0] step:7121/10000 train_time:627868ms step_avg:88.17ms +[2025-08-22 09:54:08] [Rank 0] step:7121/10000 train_time:627868ms step_avg:88.17ms +[2025-08-22 09:54:10] [Rank 0] step:7141/10000 train_time:629724ms step_avg:88.18ms +[2025-08-22 09:54:10] [Rank 0] step:7141/10000 train_time:629724ms step_avg:88.18ms +[2025-08-22 09:54:12] [Rank 0] step:7161/10000 train_time:631585ms step_avg:88.20ms +[2025-08-22 09:54:12] [Rank 0] step:7161/10000 train_time:631585ms step_avg:88.20ms +[2025-08-22 09:54:14] [Rank 0] step:7181/10000 train_time:633445ms step_avg:88.21ms +[2025-08-22 09:54:14] [Rank 0] step:7181/10000 train_time:633445ms step_avg:88.21ms +[2025-08-22 09:54:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:54:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:54:30] [Rank 0] PRINT: step:7200/10000 val_loss:3.7501 svd_entropy: attn_qk:H=0.7767,top10E=0.23,eRank=200.9,q75/q25=81.88 attn_vo:H=0.8056,top10E=0.07,eRank=333.8,q75/q25=inf mlp_w1:H=0.8695,top10E=0.15,eRank=335.1,q75/q25=8.11 mlp_w2:H=0.9305,top10E=0.10,eRank=485.6,q75/q25=4.68 vo_prod:H=0.6566,top10E=0.14,eRank=174.2,q75/q25=inf train_time:635399ms step_avg:88.25ms +[2025-08-22 09:54:30] [Rank 0] PRINT: step:7200/10000 val_loss:3.7501 svd_entropy: attn_qk:H=0.7767,top10E=0.23,eRank=200.9,q75/q25=81.88 attn_vo:H=0.8056,top10E=0.07,eRank=333.8,q75/q25=inf mlp_w1:H=0.8695,top10E=0.15,eRank=335.1,q75/q25=8.11 mlp_w2:H=0.9305,top10E=0.10,eRank=485.6,q75/q25=4.68 vo_prod:H=0.6566,top10E=0.14,eRank=174.2,q75/q25=inf train_time:635399ms step_avg:88.25ms +[2025-08-22 09:54:30] [Rank 0] step:7201/10000 train_time:635414ms step_avg:88.24ms +[2025-08-22 09:54:30] [Rank 0] step:7201/10000 train_time:635414ms step_avg:88.24ms +[2025-08-22 09:54:32] [Rank 0] step:7221/10000 train_time:637202ms step_avg:88.24ms +[2025-08-22 09:54:32] [Rank 0] step:7221/10000 train_time:637202ms step_avg:88.24ms +[2025-08-22 09:54:33] [Rank 0] step:7241/10000 train_time:639052ms step_avg:88.25ms +[2025-08-22 09:54:33] [Rank 0] step:7241/10000 train_time:639052ms step_avg:88.25ms +[2025-08-22 09:54:35] [Rank 0] step:7261/10000 train_time:640902ms step_avg:88.27ms +[2025-08-22 09:54:35] [Rank 0] step:7261/10000 train_time:640902ms step_avg:88.27ms +[2025-08-22 09:54:37] [Rank 0] step:7281/10000 train_time:642766ms step_avg:88.28ms +[2025-08-22 09:54:37] [Rank 0] step:7281/10000 train_time:642766ms step_avg:88.28ms +[2025-08-22 09:54:39] [Rank 0] step:7301/10000 train_time:644622ms step_avg:88.29ms +[2025-08-22 09:54:39] [Rank 0] step:7301/10000 train_time:644622ms step_avg:88.29ms +[2025-08-22 09:54:41] [Rank 0] step:7321/10000 train_time:646490ms step_avg:88.31ms +[2025-08-22 09:54:41] [Rank 0] step:7321/10000 train_time:646490ms step_avg:88.31ms +[2025-08-22 09:54:43] [Rank 0] step:7341/10000 train_time:648347ms step_avg:88.32ms +[2025-08-22 09:54:43] [Rank 0] step:7341/10000 train_time:648347ms step_avg:88.32ms +[2025-08-22 09:54:45] [Rank 0] step:7361/10000 train_time:650215ms step_avg:88.33ms +[2025-08-22 09:54:45] [Rank 0] step:7361/10000 train_time:650215ms step_avg:88.33ms +[2025-08-22 09:54:46] [Rank 0] step:7381/10000 train_time:652081ms step_avg:88.35ms +[2025-08-22 09:54:46] [Rank 0] step:7381/10000 train_time:652081ms step_avg:88.35ms +[2025-08-22 09:54:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:54:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:55:02] [Rank 0] PRINT: step:7400/10000 val_loss:3.7275 svd_entropy: attn_qk:H=0.7774,top10E=0.23,eRank=201.6,q75/q25=81.48 attn_vo:H=0.8061,top10E=0.07,eRank=334.8,q75/q25=inf mlp_w1:H=0.8703,top10E=0.15,eRank=337.0,q75/q25=8.04 mlp_w2:H=0.9309,top10E=0.10,eRank=487.0,q75/q25=4.65 vo_prod:H=0.6573,top10E=0.14,eRank=175.1,q75/q25=inf train_time:654015ms step_avg:88.38ms +[2025-08-22 09:55:02] [Rank 0] PRINT: step:7400/10000 val_loss:3.7275 svd_entropy: attn_qk:H=0.7774,top10E=0.23,eRank=201.6,q75/q25=81.48 attn_vo:H=0.8061,top10E=0.07,eRank=334.8,q75/q25=inf mlp_w1:H=0.8703,top10E=0.15,eRank=337.0,q75/q25=8.04 mlp_w2:H=0.9309,top10E=0.10,eRank=487.0,q75/q25=4.65 vo_prod:H=0.6573,top10E=0.14,eRank=175.1,q75/q25=inf train_time:654015ms step_avg:88.38ms +[2025-08-22 09:55:02] [Rank 0] step:7401/10000 train_time:654031ms step_avg:88.37ms +[2025-08-22 09:55:02] [Rank 0] step:7401/10000 train_time:654031ms step_avg:88.37ms +[2025-08-22 09:55:04] [Rank 0] step:7421/10000 train_time:655798ms step_avg:88.37ms +[2025-08-22 09:55:04] [Rank 0] step:7421/10000 train_time:655798ms step_avg:88.37ms +[2025-08-22 09:55:06] [Rank 0] step:7441/10000 train_time:657651ms step_avg:88.38ms +[2025-08-22 09:55:06] [Rank 0] step:7441/10000 train_time:657651ms step_avg:88.38ms +[2025-08-22 09:55:08] [Rank 0] step:7461/10000 train_time:659509ms step_avg:88.39ms +[2025-08-22 09:55:08] [Rank 0] step:7461/10000 train_time:659509ms step_avg:88.39ms +[2025-08-22 09:55:09] [Rank 0] step:7481/10000 train_time:661374ms step_avg:88.41ms +[2025-08-22 09:55:09] [Rank 0] step:7481/10000 train_time:661374ms step_avg:88.41ms +[2025-08-22 09:55:11] [Rank 0] step:7501/10000 train_time:663239ms step_avg:88.42ms +[2025-08-22 09:55:11] [Rank 0] step:7501/10000 train_time:663239ms step_avg:88.42ms +[2025-08-22 09:55:13] [Rank 0] step:7521/10000 train_time:665102ms step_avg:88.43ms +[2025-08-22 09:55:13] [Rank 0] step:7521/10000 train_time:665102ms step_avg:88.43ms +[2025-08-22 09:55:15] [Rank 0] step:7541/10000 train_time:666978ms step_avg:88.45ms +[2025-08-22 09:55:15] [Rank 0] step:7541/10000 train_time:666978ms step_avg:88.45ms +[2025-08-22 09:55:17] [Rank 0] step:7561/10000 train_time:668831ms step_avg:88.46ms +[2025-08-22 09:55:17] [Rank 0] step:7561/10000 train_time:668831ms step_avg:88.46ms +[2025-08-22 09:55:19] [Rank 0] step:7581/10000 train_time:670705ms step_avg:88.47ms +[2025-08-22 09:55:19] [Rank 0] step:7581/10000 train_time:670705ms step_avg:88.47ms +[2025-08-22 09:55:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:55:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:55:34] [Rank 0] PRINT: step:7600/10000 val_loss:3.7247 svd_entropy: attn_qk:H=0.7782,top10E=0.23,eRank=202.4,q75/q25=81.20 attn_vo:H=0.8065,top10E=0.07,eRank=335.7,q75/q25=inf mlp_w1:H=0.8711,top10E=0.15,eRank=338.7,q75/q25=7.97 mlp_w2:H=0.9312,top10E=0.10,eRank=488.1,q75/q25=4.62 vo_prod:H=0.6580,top10E=0.14,eRank=176.1,q75/q25=inf train_time:672671ms step_avg:88.51ms +[2025-08-22 09:55:34] [Rank 0] PRINT: step:7600/10000 val_loss:3.7247 svd_entropy: attn_qk:H=0.7782,top10E=0.23,eRank=202.4,q75/q25=81.20 attn_vo:H=0.8065,top10E=0.07,eRank=335.7,q75/q25=inf mlp_w1:H=0.8711,top10E=0.15,eRank=338.7,q75/q25=7.97 mlp_w2:H=0.9312,top10E=0.10,eRank=488.1,q75/q25=4.62 vo_prod:H=0.6580,top10E=0.14,eRank=176.1,q75/q25=inf train_time:672671ms step_avg:88.51ms +[2025-08-22 09:55:34] [Rank 0] step:7601/10000 train_time:672687ms step_avg:88.50ms +[2025-08-22 09:55:34] [Rank 0] step:7601/10000 train_time:672687ms step_avg:88.50ms +[2025-08-22 09:55:36] [Rank 0] step:7621/10000 train_time:674459ms step_avg:88.50ms +[2025-08-22 09:55:36] [Rank 0] step:7621/10000 train_time:674459ms step_avg:88.50ms +[2025-08-22 09:55:38] [Rank 0] step:7641/10000 train_time:676320ms step_avg:88.51ms +[2025-08-22 09:55:38] [Rank 0] step:7641/10000 train_time:676320ms step_avg:88.51ms +[2025-08-22 09:55:40] [Rank 0] step:7661/10000 train_time:678184ms step_avg:88.52ms +[2025-08-22 09:55:40] [Rank 0] step:7661/10000 train_time:678184ms step_avg:88.52ms +[2025-08-22 09:55:42] [Rank 0] step:7681/10000 train_time:680044ms step_avg:88.54ms +[2025-08-22 09:55:42] [Rank 0] step:7681/10000 train_time:680044ms step_avg:88.54ms +[2025-08-22 09:55:44] [Rank 0] step:7701/10000 train_time:681910ms step_avg:88.55ms +[2025-08-22 09:55:44] [Rank 0] step:7701/10000 train_time:681910ms step_avg:88.55ms +[2025-08-22 09:55:46] [Rank 0] step:7721/10000 train_time:683786ms step_avg:88.56ms +[2025-08-22 09:55:46] [Rank 0] step:7721/10000 train_time:683786ms step_avg:88.56ms +[2025-08-22 09:55:48] [Rank 0] step:7741/10000 train_time:685652ms step_avg:88.57ms +[2025-08-22 09:55:48] [Rank 0] step:7741/10000 train_time:685652ms step_avg:88.57ms +[2025-08-22 09:55:49] [Rank 0] step:7761/10000 train_time:687525ms step_avg:88.59ms +[2025-08-22 09:55:49] [Rank 0] step:7761/10000 train_time:687525ms step_avg:88.59ms +[2025-08-22 09:55:51] [Rank 0] step:7781/10000 train_time:689395ms step_avg:88.60ms +[2025-08-22 09:55:51] [Rank 0] step:7781/10000 train_time:689395ms step_avg:88.60ms +[2025-08-22 09:55:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:55:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:56:07] [Rank 0] PRINT: step:7800/10000 val_loss:3.7066 svd_entropy: attn_qk:H=0.7788,top10E=0.23,eRank=203.0,q75/q25=81.04 attn_vo:H=0.8069,top10E=0.07,eRank=336.6,q75/q25=inf mlp_w1:H=0.8717,top10E=0.15,eRank=340.2,q75/q25=7.93 mlp_w2:H=0.9316,top10E=0.10,eRank=489.2,q75/q25=4.59 vo_prod:H=0.6587,top10E=0.14,eRank=176.9,q75/q25=inf train_time:691369ms step_avg:88.64ms +[2025-08-22 09:56:07] [Rank 0] PRINT: step:7800/10000 val_loss:3.7066 svd_entropy: attn_qk:H=0.7788,top10E=0.23,eRank=203.0,q75/q25=81.04 attn_vo:H=0.8069,top10E=0.07,eRank=336.6,q75/q25=inf mlp_w1:H=0.8717,top10E=0.15,eRank=340.2,q75/q25=7.93 mlp_w2:H=0.9316,top10E=0.10,eRank=489.2,q75/q25=4.59 vo_prod:H=0.6587,top10E=0.14,eRank=176.9,q75/q25=inf train_time:691369ms step_avg:88.64ms +[2025-08-22 09:56:07] [Rank 0] step:7801/10000 train_time:691384ms step_avg:88.63ms +[2025-08-22 09:56:07] [Rank 0] step:7801/10000 train_time:691384ms step_avg:88.63ms +[2025-08-22 09:56:09] [Rank 0] step:7821/10000 train_time:693161ms step_avg:88.63ms +[2025-08-22 09:56:09] [Rank 0] step:7821/10000 train_time:693161ms step_avg:88.63ms +[2025-08-22 09:56:11] [Rank 0] step:7841/10000 train_time:695017ms step_avg:88.64ms +[2025-08-22 09:56:11] [Rank 0] step:7841/10000 train_time:695017ms step_avg:88.64ms +[2025-08-22 09:56:13] [Rank 0] step:7861/10000 train_time:696884ms step_avg:88.65ms +[2025-08-22 09:56:13] [Rank 0] step:7861/10000 train_time:696884ms step_avg:88.65ms +[2025-08-22 09:56:14] [Rank 0] step:7881/10000 train_time:698751ms step_avg:88.66ms +[2025-08-22 09:56:14] [Rank 0] step:7881/10000 train_time:698751ms step_avg:88.66ms +[2025-08-22 09:56:16] [Rank 0] step:7901/10000 train_time:700614ms step_avg:88.67ms +[2025-08-22 09:56:16] [Rank 0] step:7901/10000 train_time:700614ms step_avg:88.67ms +[2025-08-22 09:56:18] [Rank 0] step:7921/10000 train_time:702481ms step_avg:88.69ms +[2025-08-22 09:56:18] [Rank 0] step:7921/10000 train_time:702481ms step_avg:88.69ms +[2025-08-22 09:56:20] [Rank 0] step:7941/10000 train_time:704350ms step_avg:88.70ms +[2025-08-22 09:56:20] [Rank 0] step:7941/10000 train_time:704350ms step_avg:88.70ms +[2025-08-22 09:56:22] [Rank 0] step:7961/10000 train_time:706219ms step_avg:88.71ms +[2025-08-22 09:56:22] [Rank 0] step:7961/10000 train_time:706219ms step_avg:88.71ms +[2025-08-22 09:56:24] [Rank 0] step:7981/10000 train_time:708079ms step_avg:88.72ms +[2025-08-22 09:56:24] [Rank 0] step:7981/10000 train_time:708079ms step_avg:88.72ms +[2025-08-22 09:56:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:56:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:56:39] [Rank 0] PRINT: step:8000/10000 val_loss:3.6867 svd_entropy: attn_qk:H=0.7794,top10E=0.23,eRank=203.5,q75/q25=80.67 attn_vo:H=0.8073,top10E=0.07,eRank=337.3,q75/q25=inf mlp_w1:H=0.8724,top10E=0.15,eRank=341.7,q75/q25=7.88 mlp_w2:H=0.9319,top10E=0.10,eRank=490.2,q75/q25=4.58 vo_prod:H=0.6592,top10E=0.14,eRank=177.6,q75/q25=inf train_time:710041ms step_avg:88.76ms +[2025-08-22 09:56:39] [Rank 0] PRINT: step:8000/10000 val_loss:3.6867 svd_entropy: attn_qk:H=0.7794,top10E=0.23,eRank=203.5,q75/q25=80.67 attn_vo:H=0.8073,top10E=0.07,eRank=337.3,q75/q25=inf mlp_w1:H=0.8724,top10E=0.15,eRank=341.7,q75/q25=7.88 mlp_w2:H=0.9319,top10E=0.10,eRank=490.2,q75/q25=4.58 vo_prod:H=0.6592,top10E=0.14,eRank=177.6,q75/q25=inf train_time:710041ms step_avg:88.76ms +[2025-08-22 09:56:39] [Rank 0] step:8001/10000 train_time:710056ms step_avg:88.75ms +[2025-08-22 09:56:39] [Rank 0] step:8001/10000 train_time:710056ms step_avg:88.75ms +[2025-08-22 09:56:41] [Rank 0] step:8021/10000 train_time:711840ms step_avg:88.75ms +[2025-08-22 09:56:41] [Rank 0] step:8021/10000 train_time:711840ms step_avg:88.75ms +[2025-08-22 09:56:43] [Rank 0] step:8041/10000 train_time:713707ms step_avg:88.76ms +[2025-08-22 09:56:43] [Rank 0] step:8041/10000 train_time:713707ms step_avg:88.76ms +[2025-08-22 09:56:45] [Rank 0] step:8061/10000 train_time:715572ms step_avg:88.77ms +[2025-08-22 09:56:45] [Rank 0] step:8061/10000 train_time:715572ms step_avg:88.77ms +[2025-08-22 09:56:47] [Rank 0] step:8081/10000 train_time:717426ms step_avg:88.78ms +[2025-08-22 09:56:47] [Rank 0] step:8081/10000 train_time:717426ms step_avg:88.78ms +[2025-08-22 09:56:49] [Rank 0] step:8101/10000 train_time:719296ms step_avg:88.79ms +[2025-08-22 09:56:49] [Rank 0] step:8101/10000 train_time:719296ms step_avg:88.79ms +[2025-08-22 09:56:51] [Rank 0] step:8121/10000 train_time:721157ms step_avg:88.80ms +[2025-08-22 09:56:51] [Rank 0] step:8121/10000 train_time:721157ms step_avg:88.80ms +[2025-08-22 09:56:53] [Rank 0] step:8141/10000 train_time:723588ms step_avg:88.88ms +[2025-08-22 09:56:53] [Rank 0] step:8141/10000 train_time:723588ms step_avg:88.88ms +[2025-08-22 09:56:55] [Rank 0] step:8161/10000 train_time:725463ms step_avg:88.89ms +[2025-08-22 09:56:55] [Rank 0] step:8161/10000 train_time:725463ms step_avg:88.89ms +[2025-08-22 09:56:57] [Rank 0] step:8181/10000 train_time:727357ms step_avg:88.91ms +[2025-08-22 09:56:57] [Rank 0] step:8181/10000 train_time:727357ms step_avg:88.91ms +[2025-08-22 09:56:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:56:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:57:12] [Rank 0] PRINT: step:8200/10000 val_loss:3.6753 svd_entropy: attn_qk:H=0.7799,top10E=0.23,eRank=204.0,q75/q25=80.53 attn_vo:H=0.8076,top10E=0.07,eRank=338.0,q75/q25=inf mlp_w1:H=0.8730,top10E=0.15,eRank=343.0,q75/q25=7.84 mlp_w2:H=0.9321,top10E=0.10,eRank=491.1,q75/q25=4.56 vo_prod:H=0.6597,top10E=0.14,eRank=178.3,q75/q25=inf train_time:729366ms step_avg:88.95ms +[2025-08-22 09:57:12] [Rank 0] PRINT: step:8200/10000 val_loss:3.6753 svd_entropy: attn_qk:H=0.7799,top10E=0.23,eRank=204.0,q75/q25=80.53 attn_vo:H=0.8076,top10E=0.07,eRank=338.0,q75/q25=inf mlp_w1:H=0.8730,top10E=0.15,eRank=343.0,q75/q25=7.84 mlp_w2:H=0.9321,top10E=0.10,eRank=491.1,q75/q25=4.56 vo_prod:H=0.6597,top10E=0.14,eRank=178.3,q75/q25=inf train_time:729366ms step_avg:88.95ms +[2025-08-22 09:57:12] [Rank 0] step:8201/10000 train_time:729382ms step_avg:88.94ms +[2025-08-22 09:57:12] [Rank 0] step:8201/10000 train_time:729382ms step_avg:88.94ms +[2025-08-22 09:57:14] [Rank 0] step:8221/10000 train_time:731185ms step_avg:88.94ms +[2025-08-22 09:57:14] [Rank 0] step:8221/10000 train_time:731185ms step_avg:88.94ms +[2025-08-22 09:57:16] [Rank 0] step:8241/10000 train_time:733083ms step_avg:88.96ms +[2025-08-22 09:57:16] [Rank 0] step:8241/10000 train_time:733083ms step_avg:88.96ms +[2025-08-22 09:57:18] [Rank 0] step:8261/10000 train_time:734978ms step_avg:88.97ms +[2025-08-22 09:57:18] [Rank 0] step:8261/10000 train_time:734978ms step_avg:88.97ms +[2025-08-22 09:57:20] [Rank 0] step:8281/10000 train_time:736870ms step_avg:88.98ms +[2025-08-22 09:57:20] [Rank 0] step:8281/10000 train_time:736870ms step_avg:88.98ms +[2025-08-22 09:57:22] [Rank 0] step:8301/10000 train_time:738761ms step_avg:89.00ms +[2025-08-22 09:57:22] [Rank 0] step:8301/10000 train_time:738761ms step_avg:89.00ms +[2025-08-22 09:57:24] [Rank 0] step:8321/10000 train_time:740643ms step_avg:89.01ms +[2025-08-22 09:57:24] [Rank 0] step:8321/10000 train_time:740643ms step_avg:89.01ms +[2025-08-22 09:57:26] [Rank 0] step:8341/10000 train_time:742540ms step_avg:89.02ms +[2025-08-22 09:57:26] [Rank 0] step:8341/10000 train_time:742540ms step_avg:89.02ms +[2025-08-22 09:57:28] [Rank 0] step:8361/10000 train_time:744431ms step_avg:89.04ms +[2025-08-22 09:57:28] [Rank 0] step:8361/10000 train_time:744431ms step_avg:89.04ms +[2025-08-22 09:57:29] [Rank 0] step:8381/10000 train_time:746320ms step_avg:89.05ms +[2025-08-22 09:57:29] [Rank 0] step:8381/10000 train_time:746320ms step_avg:89.05ms +[2025-08-22 09:57:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:57:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:57:45] [Rank 0] PRINT: step:8400/10000 val_loss:3.6636 svd_entropy: attn_qk:H=0.7804,top10E=0.23,eRank=204.5,q75/q25=80.47 attn_vo:H=0.8078,top10E=0.07,eRank=338.6,q75/q25=inf mlp_w1:H=0.8735,top10E=0.15,eRank=344.2,q75/q25=7.80 mlp_w2:H=0.9324,top10E=0.10,eRank=492.0,q75/q25=4.54 vo_prod:H=0.6602,top10E=0.14,eRank=178.9,q75/q25=inf train_time:748305ms step_avg:89.08ms +[2025-08-22 09:57:45] [Rank 0] PRINT: step:8400/10000 val_loss:3.6636 svd_entropy: attn_qk:H=0.7804,top10E=0.23,eRank=204.5,q75/q25=80.47 attn_vo:H=0.8078,top10E=0.07,eRank=338.6,q75/q25=inf mlp_w1:H=0.8735,top10E=0.15,eRank=344.2,q75/q25=7.80 mlp_w2:H=0.9324,top10E=0.10,eRank=492.0,q75/q25=4.54 vo_prod:H=0.6602,top10E=0.14,eRank=178.9,q75/q25=inf train_time:748305ms step_avg:89.08ms +[2025-08-22 09:57:45] [Rank 0] step:8401/10000 train_time:748320ms step_avg:89.08ms +[2025-08-22 09:57:45] [Rank 0] step:8401/10000 train_time:748320ms step_avg:89.08ms +[2025-08-22 09:57:47] [Rank 0] step:8421/10000 train_time:750111ms step_avg:89.08ms +[2025-08-22 09:57:47] [Rank 0] step:8421/10000 train_time:750111ms step_avg:89.08ms +[2025-08-22 09:57:49] [Rank 0] step:8441/10000 train_time:751998ms step_avg:89.09ms +[2025-08-22 09:57:49] [Rank 0] step:8441/10000 train_time:751998ms step_avg:89.09ms +[2025-08-22 09:57:51] [Rank 0] step:8461/10000 train_time:753888ms step_avg:89.10ms +[2025-08-22 09:57:51] [Rank 0] step:8461/10000 train_time:753888ms step_avg:89.10ms +[2025-08-22 09:57:52] [Rank 0] step:8481/10000 train_time:755787ms step_avg:89.12ms +[2025-08-22 09:57:52] [Rank 0] step:8481/10000 train_time:755787ms step_avg:89.12ms +[2025-08-22 09:57:54] [Rank 0] step:8501/10000 train_time:757704ms step_avg:89.13ms +[2025-08-22 09:57:54] [Rank 0] step:8501/10000 train_time:757704ms step_avg:89.13ms +[2025-08-22 09:57:56] [Rank 0] step:8521/10000 train_time:759605ms step_avg:89.15ms +[2025-08-22 09:57:56] [Rank 0] step:8521/10000 train_time:759605ms step_avg:89.15ms +[2025-08-22 09:57:58] [Rank 0] step:8541/10000 train_time:761514ms step_avg:89.16ms +[2025-08-22 09:57:58] [Rank 0] step:8541/10000 train_time:761514ms step_avg:89.16ms +[2025-08-22 09:58:00] [Rank 0] step:8561/10000 train_time:763412ms step_avg:89.17ms +[2025-08-22 09:58:00] [Rank 0] step:8561/10000 train_time:763412ms step_avg:89.17ms +[2025-08-22 09:58:02] [Rank 0] step:8581/10000 train_time:765312ms step_avg:89.19ms +[2025-08-22 09:58:02] [Rank 0] step:8581/10000 train_time:765312ms step_avg:89.19ms +[2025-08-22 09:58:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:58:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:58:17] [Rank 0] PRINT: step:8600/10000 val_loss:3.6504 svd_entropy: attn_qk:H=0.7808,top10E=0.23,eRank=204.9,q75/q25=80.41 attn_vo:H=0.8081,top10E=0.07,eRank=339.1,q75/q25=inf mlp_w1:H=0.8739,top10E=0.15,eRank=345.2,q75/q25=7.76 mlp_w2:H=0.9326,top10E=0.10,eRank=492.7,q75/q25=4.53 vo_prod:H=0.6606,top10E=0.14,eRank=179.5,q75/q25=inf train_time:767296ms step_avg:89.22ms +[2025-08-22 09:58:17] [Rank 0] PRINT: step:8600/10000 val_loss:3.6504 svd_entropy: attn_qk:H=0.7808,top10E=0.23,eRank=204.9,q75/q25=80.41 attn_vo:H=0.8081,top10E=0.07,eRank=339.1,q75/q25=inf mlp_w1:H=0.8739,top10E=0.15,eRank=345.2,q75/q25=7.76 mlp_w2:H=0.9326,top10E=0.10,eRank=492.7,q75/q25=4.53 vo_prod:H=0.6606,top10E=0.14,eRank=179.5,q75/q25=inf train_time:767296ms step_avg:89.22ms +[2025-08-22 09:58:18] [Rank 0] step:8601/10000 train_time:767310ms step_avg:89.21ms +[2025-08-22 09:58:18] [Rank 0] step:8601/10000 train_time:767310ms step_avg:89.21ms +[2025-08-22 09:58:19] [Rank 0] step:8621/10000 train_time:769135ms step_avg:89.22ms +[2025-08-22 09:58:19] [Rank 0] step:8621/10000 train_time:769135ms step_avg:89.22ms +[2025-08-22 09:58:21] [Rank 0] step:8641/10000 train_time:771025ms step_avg:89.23ms +[2025-08-22 09:58:21] [Rank 0] step:8641/10000 train_time:771025ms step_avg:89.23ms +[2025-08-22 09:58:23] [Rank 0] step:8661/10000 train_time:772918ms step_avg:89.24ms +[2025-08-22 09:58:23] [Rank 0] step:8661/10000 train_time:772918ms step_avg:89.24ms +[2025-08-22 09:58:25] [Rank 0] step:8681/10000 train_time:774810ms step_avg:89.25ms +[2025-08-22 09:58:25] [Rank 0] step:8681/10000 train_time:774810ms step_avg:89.25ms +[2025-08-22 09:58:27] [Rank 0] step:8701/10000 train_time:776697ms step_avg:89.27ms +[2025-08-22 09:58:27] [Rank 0] step:8701/10000 train_time:776697ms step_avg:89.27ms +[2025-08-22 09:58:29] [Rank 0] step:8721/10000 train_time:778591ms step_avg:89.28ms +[2025-08-22 09:58:29] [Rank 0] step:8721/10000 train_time:778591ms step_avg:89.28ms +[2025-08-22 09:58:31] [Rank 0] step:8741/10000 train_time:780474ms step_avg:89.29ms +[2025-08-22 09:58:31] [Rank 0] step:8741/10000 train_time:780474ms step_avg:89.29ms +[2025-08-22 09:58:33] [Rank 0] step:8761/10000 train_time:782368ms step_avg:89.30ms +[2025-08-22 09:58:33] [Rank 0] step:8761/10000 train_time:782368ms step_avg:89.30ms +[2025-08-22 09:58:35] [Rank 0] step:8781/10000 train_time:784263ms step_avg:89.31ms +[2025-08-22 09:58:35] [Rank 0] step:8781/10000 train_time:784263ms step_avg:89.31ms +[2025-08-22 09:58:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:58:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:58:50] [Rank 0] PRINT: step:8800/10000 val_loss:3.6387 svd_entropy: attn_qk:H=0.7811,top10E=0.23,eRank=205.2,q75/q25=80.20 attn_vo:H=0.8083,top10E=0.07,eRank=339.6,q75/q25=inf mlp_w1:H=0.8743,top10E=0.15,eRank=346.1,q75/q25=7.73 mlp_w2:H=0.9328,top10E=0.10,eRank=493.3,q75/q25=4.51 vo_prod:H=0.6610,top10E=0.14,eRank=180.1,q75/q25=inf train_time:786291ms step_avg:89.35ms +[2025-08-22 09:58:50] [Rank 0] PRINT: step:8800/10000 val_loss:3.6387 svd_entropy: attn_qk:H=0.7811,top10E=0.23,eRank=205.2,q75/q25=80.20 attn_vo:H=0.8083,top10E=0.07,eRank=339.6,q75/q25=inf mlp_w1:H=0.8743,top10E=0.15,eRank=346.1,q75/q25=7.73 mlp_w2:H=0.9328,top10E=0.10,eRank=493.3,q75/q25=4.51 vo_prod:H=0.6610,top10E=0.14,eRank=180.1,q75/q25=inf train_time:786291ms step_avg:89.35ms +[2025-08-22 09:58:50] [Rank 0] step:8801/10000 train_time:786307ms step_avg:89.34ms +[2025-08-22 09:58:50] [Rank 0] step:8801/10000 train_time:786307ms step_avg:89.34ms +[2025-08-22 09:58:52] [Rank 0] step:8821/10000 train_time:788110ms step_avg:89.34ms +[2025-08-22 09:58:52] [Rank 0] step:8821/10000 train_time:788110ms step_avg:89.34ms +[2025-08-22 09:58:54] [Rank 0] step:8841/10000 train_time:790021ms step_avg:89.36ms +[2025-08-22 09:58:54] [Rank 0] step:8841/10000 train_time:790021ms step_avg:89.36ms +[2025-08-22 09:58:56] [Rank 0] step:8861/10000 train_time:791906ms step_avg:89.37ms +[2025-08-22 09:58:56] [Rank 0] step:8861/10000 train_time:791906ms step_avg:89.37ms +[2025-08-22 09:58:58] [Rank 0] step:8881/10000 train_time:793795ms step_avg:89.38ms +[2025-08-22 09:58:58] [Rank 0] step:8881/10000 train_time:793795ms step_avg:89.38ms +[2025-08-22 09:59:00] [Rank 0] step:8901/10000 train_time:795688ms step_avg:89.39ms +[2025-08-22 09:59:00] [Rank 0] step:8901/10000 train_time:795688ms step_avg:89.39ms +[2025-08-22 09:59:02] [Rank 0] step:8921/10000 train_time:797590ms step_avg:89.41ms +[2025-08-22 09:59:02] [Rank 0] step:8921/10000 train_time:797590ms step_avg:89.41ms +[2025-08-22 09:59:04] [Rank 0] step:8941/10000 train_time:799492ms step_avg:89.42ms +[2025-08-22 09:59:04] [Rank 0] step:8941/10000 train_time:799492ms step_avg:89.42ms +[2025-08-22 09:59:05] [Rank 0] step:8961/10000 train_time:801385ms step_avg:89.43ms +[2025-08-22 09:59:05] [Rank 0] step:8961/10000 train_time:801385ms step_avg:89.43ms +[2025-08-22 09:59:07] [Rank 0] step:8981/10000 train_time:803280ms step_avg:89.44ms +[2025-08-22 09:59:07] [Rank 0] step:8981/10000 train_time:803280ms step_avg:89.44ms +[2025-08-22 09:59:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:59:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:59:23] [Rank 0] PRINT: step:9000/10000 val_loss:3.6268 svd_entropy: attn_qk:H=0.7815,top10E=0.23,eRank=205.5,q75/q25=80.24 attn_vo:H=0.8085,top10E=0.07,eRank=340.0,q75/q25=inf mlp_w1:H=0.8747,top10E=0.15,eRank=346.9,q75/q25=7.70 mlp_w2:H=0.9329,top10E=0.10,eRank=493.9,q75/q25=4.49 vo_prod:H=0.6614,top10E=0.13,eRank=180.6,q75/q25=inf train_time:805268ms step_avg:89.47ms +[2025-08-22 09:59:23] [Rank 0] PRINT: step:9000/10000 val_loss:3.6268 svd_entropy: attn_qk:H=0.7815,top10E=0.23,eRank=205.5,q75/q25=80.24 attn_vo:H=0.8085,top10E=0.07,eRank=340.0,q75/q25=inf mlp_w1:H=0.8747,top10E=0.15,eRank=346.9,q75/q25=7.70 mlp_w2:H=0.9329,top10E=0.10,eRank=493.9,q75/q25=4.49 vo_prod:H=0.6614,top10E=0.13,eRank=180.6,q75/q25=inf train_time:805268ms step_avg:89.47ms +[2025-08-22 09:59:23] [Rank 0] step:9001/10000 train_time:805284ms step_avg:89.47ms +[2025-08-22 09:59:23] [Rank 0] step:9001/10000 train_time:805284ms step_avg:89.47ms +[2025-08-22 09:59:25] [Rank 0] step:9021/10000 train_time:807094ms step_avg:89.47ms +[2025-08-22 09:59:25] [Rank 0] step:9021/10000 train_time:807094ms step_avg:89.47ms +[2025-08-22 09:59:27] [Rank 0] step:9041/10000 train_time:808992ms step_avg:89.48ms +[2025-08-22 09:59:27] [Rank 0] step:9041/10000 train_time:808992ms step_avg:89.48ms +[2025-08-22 09:59:29] [Rank 0] step:9061/10000 train_time:810897ms step_avg:89.49ms +[2025-08-22 09:59:29] [Rank 0] step:9061/10000 train_time:810897ms step_avg:89.49ms +[2025-08-22 09:59:31] [Rank 0] step:9081/10000 train_time:812799ms step_avg:89.51ms +[2025-08-22 09:59:31] [Rank 0] step:9081/10000 train_time:812799ms step_avg:89.51ms +[2025-08-22 09:59:33] [Rank 0] step:9101/10000 train_time:814711ms step_avg:89.52ms +[2025-08-22 09:59:33] [Rank 0] step:9101/10000 train_time:814711ms step_avg:89.52ms +[2025-08-22 09:59:34] [Rank 0] step:9121/10000 train_time:816613ms step_avg:89.53ms +[2025-08-22 09:59:34] [Rank 0] step:9121/10000 train_time:816613ms step_avg:89.53ms +[2025-08-22 09:59:36] [Rank 0] step:9141/10000 train_time:818502ms step_avg:89.54ms +[2025-08-22 09:59:36] [Rank 0] step:9141/10000 train_time:818502ms step_avg:89.54ms +[2025-08-22 09:59:38] [Rank 0] step:9161/10000 train_time:820399ms step_avg:89.55ms +[2025-08-22 09:59:38] [Rank 0] step:9161/10000 train_time:820399ms step_avg:89.55ms +[2025-08-22 09:59:40] [Rank 0] step:9181/10000 train_time:822388ms step_avg:89.57ms +[2025-08-22 09:59:40] [Rank 0] step:9181/10000 train_time:822388ms step_avg:89.57ms +[2025-08-22 09:59:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:59:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 09:59:56] [Rank 0] PRINT: step:9200/10000 val_loss:3.6172 svd_entropy: attn_qk:H=0.7817,top10E=0.23,eRank=205.8,q75/q25=79.93 attn_vo:H=0.8086,top10E=0.07,eRank=340.3,q75/q25=inf mlp_w1:H=0.8750,top10E=0.15,eRank=347.6,q75/q25=7.67 mlp_w2:H=0.9331,top10E=0.10,eRank=494.4,q75/q25=4.49 vo_prod:H=0.6617,top10E=0.13,eRank=181.0,q75/q25=inf train_time:824385ms step_avg:89.61ms +[2025-08-22 09:59:56] [Rank 0] PRINT: step:9200/10000 val_loss:3.6172 svd_entropy: attn_qk:H=0.7817,top10E=0.23,eRank=205.8,q75/q25=79.93 attn_vo:H=0.8086,top10E=0.07,eRank=340.3,q75/q25=inf mlp_w1:H=0.8750,top10E=0.15,eRank=347.6,q75/q25=7.67 mlp_w2:H=0.9331,top10E=0.10,eRank=494.4,q75/q25=4.49 vo_prod:H=0.6617,top10E=0.13,eRank=181.0,q75/q25=inf train_time:824385ms step_avg:89.61ms +[2025-08-22 09:59:56] [Rank 0] step:9201/10000 train_time:824401ms step_avg:89.60ms +[2025-08-22 09:59:56] [Rank 0] step:9201/10000 train_time:824401ms step_avg:89.60ms +[2025-08-22 09:59:58] [Rank 0] step:9221/10000 train_time:826230ms step_avg:89.60ms +[2025-08-22 09:59:58] [Rank 0] step:9221/10000 train_time:826230ms step_avg:89.60ms +[2025-08-22 09:59:59] [Rank 0] step:9241/10000 train_time:828130ms step_avg:89.61ms +[2025-08-22 09:59:59] [Rank 0] step:9241/10000 train_time:828130ms step_avg:89.61ms +[2025-08-22 10:00:01] [Rank 0] step:9261/10000 train_time:830066ms step_avg:89.63ms +[2025-08-22 10:00:01] [Rank 0] step:9261/10000 train_time:830066ms step_avg:89.63ms +[2025-08-22 10:00:03] [Rank 0] step:9281/10000 train_time:831917ms step_avg:89.64ms +[2025-08-22 10:00:03] [Rank 0] step:9281/10000 train_time:831917ms step_avg:89.64ms +[2025-08-22 10:00:05] [Rank 0] step:9301/10000 train_time:833806ms step_avg:89.65ms +[2025-08-22 10:00:05] [Rank 0] step:9301/10000 train_time:833806ms step_avg:89.65ms +[2025-08-22 10:00:07] [Rank 0] step:9321/10000 train_time:835705ms step_avg:89.66ms +[2025-08-22 10:00:07] [Rank 0] step:9321/10000 train_time:835705ms step_avg:89.66ms +[2025-08-22 10:00:09] [Rank 0] step:9341/10000 train_time:837597ms step_avg:89.67ms +[2025-08-22 10:00:09] [Rank 0] step:9341/10000 train_time:837597ms step_avg:89.67ms +[2025-08-22 10:00:11] [Rank 0] step:9361/10000 train_time:839499ms step_avg:89.68ms +[2025-08-22 10:00:11] [Rank 0] step:9361/10000 train_time:839499ms step_avg:89.68ms +[2025-08-22 10:00:13] [Rank 0] step:9381/10000 train_time:841408ms step_avg:89.69ms +[2025-08-22 10:00:13] [Rank 0] step:9381/10000 train_time:841408ms step_avg:89.69ms +[2025-08-22 10:00:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:00:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:00:28] [Rank 0] PRINT: step:9400/10000 val_loss:3.6080 svd_entropy: attn_qk:H=0.7819,top10E=0.23,eRank=206.0,q75/q25=79.80 attn_vo:H=0.8088,top10E=0.07,eRank=340.6,q75/q25=inf mlp_w1:H=0.8753,top10E=0.15,eRank=348.2,q75/q25=7.65 mlp_w2:H=0.9332,top10E=0.10,eRank=494.7,q75/q25=4.47 vo_prod:H=0.6619,top10E=0.13,eRank=181.3,q75/q25=inf train_time:843403ms step_avg:89.72ms +[2025-08-22 10:00:28] [Rank 0] PRINT: step:9400/10000 val_loss:3.6080 svd_entropy: attn_qk:H=0.7819,top10E=0.23,eRank=206.0,q75/q25=79.80 attn_vo:H=0.8088,top10E=0.07,eRank=340.6,q75/q25=inf mlp_w1:H=0.8753,top10E=0.15,eRank=348.2,q75/q25=7.65 mlp_w2:H=0.9332,top10E=0.10,eRank=494.7,q75/q25=4.47 vo_prod:H=0.6619,top10E=0.13,eRank=181.3,q75/q25=inf train_time:843403ms step_avg:89.72ms +[2025-08-22 10:00:28] [Rank 0] step:9401/10000 train_time:843418ms step_avg:89.72ms +[2025-08-22 10:00:28] [Rank 0] step:9401/10000 train_time:843418ms step_avg:89.72ms +[2025-08-22 10:00:30] [Rank 0] step:9421/10000 train_time:845226ms step_avg:89.72ms +[2025-08-22 10:00:30] [Rank 0] step:9421/10000 train_time:845226ms step_avg:89.72ms +[2025-08-22 10:00:32] [Rank 0] step:9441/10000 train_time:847121ms step_avg:89.73ms +[2025-08-22 10:00:32] [Rank 0] step:9441/10000 train_time:847121ms step_avg:89.73ms +[2025-08-22 10:00:34] [Rank 0] step:9461/10000 train_time:849018ms step_avg:89.74ms +[2025-08-22 10:00:34] [Rank 0] step:9461/10000 train_time:849018ms step_avg:89.74ms +[2025-08-22 10:00:36] [Rank 0] step:9481/10000 train_time:850913ms step_avg:89.75ms +[2025-08-22 10:00:36] [Rank 0] step:9481/10000 train_time:850913ms step_avg:89.75ms +[2025-08-22 10:00:38] [Rank 0] step:9501/10000 train_time:852822ms step_avg:89.76ms +[2025-08-22 10:00:38] [Rank 0] step:9501/10000 train_time:852822ms step_avg:89.76ms +[2025-08-22 10:00:40] [Rank 0] step:9521/10000 train_time:854707ms step_avg:89.77ms +[2025-08-22 10:00:40] [Rank 0] step:9521/10000 train_time:854707ms step_avg:89.77ms +[2025-08-22 10:00:42] [Rank 0] step:9541/10000 train_time:856602ms step_avg:89.78ms +[2025-08-22 10:00:42] [Rank 0] step:9541/10000 train_time:856602ms step_avg:89.78ms +[2025-08-22 10:00:44] [Rank 0] step:9561/10000 train_time:858488ms step_avg:89.79ms +[2025-08-22 10:00:44] [Rank 0] step:9561/10000 train_time:858488ms step_avg:89.79ms +[2025-08-22 10:00:45] [Rank 0] step:9581/10000 train_time:860384ms step_avg:89.80ms +[2025-08-22 10:00:45] [Rank 0] step:9581/10000 train_time:860384ms step_avg:89.80ms +[2025-08-22 10:00:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:00:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:01:01] [Rank 0] PRINT: step:9600/10000 val_loss:3.5991 svd_entropy: attn_qk:H=0.7821,top10E=0.23,eRank=206.2,q75/q25=79.88 attn_vo:H=0.8089,top10E=0.07,eRank=340.9,q75/q25=inf mlp_w1:H=0.8755,top10E=0.15,eRank=348.6,q75/q25=7.64 mlp_w2:H=0.9333,top10E=0.10,eRank=495.1,q75/q25=4.47 vo_prod:H=0.6621,top10E=0.13,eRank=181.6,q75/q25=inf train_time:862385ms step_avg:89.83ms +[2025-08-22 10:01:01] [Rank 0] PRINT: step:9600/10000 val_loss:3.5991 svd_entropy: attn_qk:H=0.7821,top10E=0.23,eRank=206.2,q75/q25=79.88 attn_vo:H=0.8089,top10E=0.07,eRank=340.9,q75/q25=inf mlp_w1:H=0.8755,top10E=0.15,eRank=348.6,q75/q25=7.64 mlp_w2:H=0.9333,top10E=0.10,eRank=495.1,q75/q25=4.47 vo_prod:H=0.6621,top10E=0.13,eRank=181.6,q75/q25=inf train_time:862385ms step_avg:89.83ms +[2025-08-22 10:01:01] [Rank 0] step:9601/10000 train_time:862401ms step_avg:89.82ms +[2025-08-22 10:01:01] [Rank 0] step:9601/10000 train_time:862401ms step_avg:89.82ms +[2025-08-22 10:01:03] [Rank 0] step:9621/10000 train_time:864197ms step_avg:89.82ms +[2025-08-22 10:01:03] [Rank 0] step:9621/10000 train_time:864197ms step_avg:89.82ms +[2025-08-22 10:01:05] [Rank 0] step:9641/10000 train_time:866093ms step_avg:89.83ms +[2025-08-22 10:01:05] [Rank 0] step:9641/10000 train_time:866093ms step_avg:89.83ms +[2025-08-22 10:01:07] [Rank 0] step:9661/10000 train_time:868017ms step_avg:89.85ms +[2025-08-22 10:01:07] [Rank 0] step:9661/10000 train_time:868017ms step_avg:89.85ms +[2025-08-22 10:01:09] [Rank 0] step:9681/10000 train_time:869930ms step_avg:89.86ms +[2025-08-22 10:01:09] [Rank 0] step:9681/10000 train_time:869930ms step_avg:89.86ms +[2025-08-22 10:01:11] [Rank 0] step:9701/10000 train_time:871861ms step_avg:89.87ms +[2025-08-22 10:01:11] [Rank 0] step:9701/10000 train_time:871861ms step_avg:89.87ms +[2025-08-22 10:01:13] [Rank 0] step:9721/10000 train_time:873773ms step_avg:89.89ms +[2025-08-22 10:01:13] [Rank 0] step:9721/10000 train_time:873773ms step_avg:89.89ms +[2025-08-22 10:01:15] [Rank 0] step:9741/10000 train_time:875713ms step_avg:89.90ms +[2025-08-22 10:01:15] [Rank 0] step:9741/10000 train_time:875713ms step_avg:89.90ms +[2025-08-22 10:01:17] [Rank 0] step:9761/10000 train_time:877639ms step_avg:89.91ms +[2025-08-22 10:01:17] [Rank 0] step:9761/10000 train_time:877639ms step_avg:89.91ms +[2025-08-22 10:01:19] [Rank 0] step:9781/10000 train_time:879569ms step_avg:89.93ms +[2025-08-22 10:01:19] [Rank 0] step:9781/10000 train_time:879569ms step_avg:89.93ms +[2025-08-22 10:01:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:01:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:01:34] [Rank 0] PRINT: step:9800/10000 val_loss:3.5913 svd_entropy: attn_qk:H=0.7823,top10E=0.23,eRank=206.3,q75/q25=79.82 attn_vo:H=0.8089,top10E=0.07,eRank=341.0,q75/q25=inf mlp_w1:H=0.8757,top10E=0.15,eRank=349.0,q75/q25=7.63 mlp_w2:H=0.9334,top10E=0.10,eRank=495.4,q75/q25=4.46 vo_prod:H=0.6623,top10E=0.13,eRank=181.9,q75/q25=inf train_time:881603ms step_avg:89.96ms +[2025-08-22 10:01:34] [Rank 0] PRINT: step:9800/10000 val_loss:3.5913 svd_entropy: attn_qk:H=0.7823,top10E=0.23,eRank=206.3,q75/q25=79.82 attn_vo:H=0.8089,top10E=0.07,eRank=341.0,q75/q25=inf mlp_w1:H=0.8757,top10E=0.15,eRank=349.0,q75/q25=7.63 mlp_w2:H=0.9334,top10E=0.10,eRank=495.4,q75/q25=4.46 vo_prod:H=0.6623,top10E=0.13,eRank=181.9,q75/q25=inf train_time:881603ms step_avg:89.96ms +[2025-08-22 10:01:35] [Rank 0] step:9801/10000 train_time:881619ms step_avg:89.95ms +[2025-08-22 10:01:35] [Rank 0] step:9801/10000 train_time:881619ms step_avg:89.95ms +[2025-08-22 10:01:36] [Rank 0] step:9821/10000 train_time:883447ms step_avg:89.95ms +[2025-08-22 10:01:36] [Rank 0] step:9821/10000 train_time:883447ms step_avg:89.95ms +[2025-08-22 10:01:38] [Rank 0] step:9841/10000 train_time:885379ms step_avg:89.97ms +[2025-08-22 10:01:38] [Rank 0] step:9841/10000 train_time:885379ms step_avg:89.97ms +[2025-08-22 10:01:40] [Rank 0] step:9861/10000 train_time:887289ms step_avg:89.98ms +[2025-08-22 10:01:40] [Rank 0] step:9861/10000 train_time:887289ms step_avg:89.98ms +[2025-08-22 10:01:42] [Rank 0] step:9881/10000 train_time:889203ms step_avg:89.99ms +[2025-08-22 10:01:42] [Rank 0] step:9881/10000 train_time:889203ms step_avg:89.99ms +[2025-08-22 10:01:44] [Rank 0] step:9901/10000 train_time:891130ms step_avg:90.00ms +[2025-08-22 10:01:44] [Rank 0] step:9901/10000 train_time:891130ms step_avg:90.00ms +[2025-08-22 10:01:46] [Rank 0] step:9921/10000 train_time:893053ms step_avg:90.02ms +[2025-08-22 10:01:46] [Rank 0] step:9921/10000 train_time:893053ms step_avg:90.02ms +[2025-08-22 10:01:48] [Rank 0] step:9941/10000 train_time:894981ms step_avg:90.03ms +[2025-08-22 10:01:48] [Rank 0] step:9941/10000 train_time:894981ms step_avg:90.03ms +[2025-08-22 10:01:50] [Rank 0] step:9961/10000 train_time:896903ms step_avg:90.04ms +[2025-08-22 10:01:50] [Rank 0] step:9961/10000 train_time:896903ms step_avg:90.04ms +[2025-08-22 10:01:52] [Rank 0] step:9981/10000 train_time:898829ms step_avg:90.05ms +[2025-08-22 10:01:52] [Rank 0] step:9981/10000 train_time:898829ms step_avg:90.05ms +[2025-08-22 10:01:54] [Rank 0] step:10000/10000 train_time:900660ms step_avg:90.07ms +[2025-08-22 10:01:54] [Rank 0] step:10000/10000 train_time:900660ms step_avg:90.07ms +[2025-08-22 10:01:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:01:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:02:08] [Rank 0] PRINT: step:10000/10000 val_loss:3.5839 svd_entropy: attn_qk:H=0.7823,top10E=0.23,eRank=206.4,q75/q25=79.86 attn_vo:H=0.8090,top10E=0.07,eRank=341.2,q75/q25=inf mlp_w1:H=0.8758,top10E=0.15,eRank=349.3,q75/q25=7.62 mlp_w2:H=0.9334,top10E=0.10,eRank=495.6,q75/q25=4.46 vo_prod:H=0.6624,top10E=0.13,eRank=182.0,q75/q25=inf train_time:900862ms step_avg:90.09ms +[2025-08-22 10:02:08] [Rank 0] PRINT: step:10000/10000 val_loss:3.5839 svd_entropy: attn_qk:H=0.7823,top10E=0.23,eRank=206.4,q75/q25=79.86 attn_vo:H=0.8090,top10E=0.07,eRank=341.2,q75/q25=inf mlp_w1:H=0.8758,top10E=0.15,eRank=349.3,q75/q25=7.62 mlp_w2:H=0.9334,top10E=0.10,eRank=495.6,q75/q25=4.46 vo_prod:H=0.6624,top10E=0.13,eRank=182.0,q75/q25=inf train_time:900862ms step_avg:90.09ms +[2025-08-22 10:02:08] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 10:02:08 2025 --- +[2025-08-22 10:02:08] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 10:02:08 2025 --- +[2025-08-22 10:02:08] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15556 MiB +[2025-08-22 10:02:08] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15556 MiB diff --git a/logs_svd_gated/mode_2_param_gated_seed_42/config.json b/logs_svd_gated/mode_2_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f544c7e7b56b7ac78d8e6e71fd3f62f7b9db6c1c --- /dev/null +++ b/logs_svd_gated/mode_2_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 2, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "041f64a6-830c-405d-8372-1d8a5d7eb2b0", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_2_param_gated_seed_42/training_log_041f64a6-830c-405d-8372-1d8a5d7eb2b0.txt b/logs_svd_gated/mode_2_param_gated_seed_42/training_log_041f64a6-830c-405d-8372-1d8a5d7eb2b0.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb60e9df8af799193e3f13b5dfb5b4431822fdd6 --- /dev/null +++ b/logs_svd_gated/mode_2_param_gated_seed_42/training_log_041f64a6-830c-405d-8372-1d8a5d7eb2b0.txt @@ -0,0 +1,2926 @@ +[2025-08-22 14:49:02] [Rank 0] PRINT: --- Script Start: Fri Aug 22 14:49:02 2025 --- +[2025-08-22 14:49:02] [Rank 0] PRINT: --- Script Start: Fri Aug 22 14:49:02 2025 --- +[2025-08-22 14:49:02] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=2, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 14:49:02] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=2, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 14:49:02] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 14:49:02] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 14:49:02] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 14:49:02] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 14:49:02] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_2_param_gated_seed_42 +[2025-08-22 14:49:02] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_2_param_gated_seed_42 +[2025-08-22 14:49:02] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 14:49:02] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 14:49:02] [Rank 0] PRINT: Constructing model... +[2025-08-22 14:49:02] [Rank 0] PRINT: Constructing model... +[2025-08-22 14:49:04] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 14:49:04] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 14:49:04] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 14:49:04] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 14:49:04] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 14:49:04] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 14:49:04] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-08-22 14:49:04] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-08-22 14:49:04] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.05). +[2025-08-22 14:49:04] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.05). +[2025-08-22 14:49:04] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 14:49:04] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 14:49:04] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 14:49:04] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 14:49:04] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 14:49:04] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 14:49:04] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 14:49:04] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 14:49:04] [Rank 0] PRINT: Starting warmup... +[2025-08-22 14:49:04] [Rank 0] PRINT: Starting warmup... +[2025-08-22 14:49:47] [Rank 0] PRINT: Warmup complete. +[2025-08-22 14:49:47] [Rank 0] PRINT: Warmup complete. +[2025-08-22 14:49:47] [Rank 0] PRINT: Starting training... +[2025-08-22 14:49:47] [Rank 0] PRINT: Starting training... +[2025-08-22 14:49:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:49:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:50:05] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 14:50:05] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 14:50:07] [Rank 0] step:21/10000 train_time:1665ms step_avg:79.28ms +[2025-08-22 14:50:07] [Rank 0] step:21/10000 train_time:1665ms step_avg:79.28ms +[2025-08-22 14:50:08] [Rank 0] step:41/10000 train_time:3342ms step_avg:81.52ms +[2025-08-22 14:50:08] [Rank 0] step:41/10000 train_time:3342ms step_avg:81.52ms +[2025-08-22 14:50:10] [Rank 0] step:61/10000 train_time:5022ms step_avg:82.32ms +[2025-08-22 14:50:10] [Rank 0] step:61/10000 train_time:5022ms step_avg:82.32ms +[2025-08-22 14:50:12] [Rank 0] step:81/10000 train_time:6703ms step_avg:82.75ms +[2025-08-22 14:50:12] [Rank 0] step:81/10000 train_time:6703ms step_avg:82.75ms +[2025-08-22 14:50:13] [Rank 0] step:101/10000 train_time:8385ms step_avg:83.02ms +[2025-08-22 14:50:13] [Rank 0] step:101/10000 train_time:8385ms step_avg:83.02ms +[2025-08-22 14:50:15] [Rank 0] step:121/10000 train_time:10068ms step_avg:83.21ms +[2025-08-22 14:50:15] [Rank 0] step:121/10000 train_time:10068ms step_avg:83.21ms +[2025-08-22 14:50:17] [Rank 0] step:141/10000 train_time:11752ms step_avg:83.35ms +[2025-08-22 14:50:17] [Rank 0] step:141/10000 train_time:11752ms step_avg:83.35ms +[2025-08-22 14:50:18] [Rank 0] step:161/10000 train_time:13437ms step_avg:83.46ms +[2025-08-22 14:50:18] [Rank 0] step:161/10000 train_time:13437ms step_avg:83.46ms +[2025-08-22 14:50:20] [Rank 0] step:181/10000 train_time:15125ms step_avg:83.56ms +[2025-08-22 14:50:20] [Rank 0] step:181/10000 train_time:15125ms step_avg:83.56ms +[2025-08-22 14:50:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:50:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:50:35] [Rank 0] PRINT: step:200/10000 val_loss:6.6293 svd_entropy: attn_qk:H=0.5243,top10E=0.63,eRank=105.5,q75/q25=33.03 attn_vo:H=0.6956,top10E=0.24,eRank=173.8,q75/q25=inf mlp_w1:H=0.4108,top10E=0.82,eRank=18.0,q75/q25=6.96 mlp_w2:H=0.4299,top10E=0.80,eRank=18.2,q75/q25=7.13 vo_prod:H=0.4195,top10E=0.53,eRank=33.7,q75/q25=inf train_time:16896ms step_avg:84.48ms +[2025-08-22 14:50:35] [Rank 0] PRINT: step:200/10000 val_loss:6.6293 svd_entropy: attn_qk:H=0.5243,top10E=0.63,eRank=105.5,q75/q25=33.03 attn_vo:H=0.6956,top10E=0.24,eRank=173.8,q75/q25=inf mlp_w1:H=0.4108,top10E=0.82,eRank=18.0,q75/q25=6.96 mlp_w2:H=0.4299,top10E=0.80,eRank=18.2,q75/q25=7.13 vo_prod:H=0.4195,top10E=0.53,eRank=33.7,q75/q25=inf train_time:16896ms step_avg:84.48ms +[2025-08-22 14:50:35] [Rank 0] step:201/10000 train_time:16914ms step_avg:84.15ms +[2025-08-22 14:50:35] [Rank 0] step:201/10000 train_time:16914ms step_avg:84.15ms +[2025-08-22 14:50:37] [Rank 0] step:221/10000 train_time:18577ms step_avg:84.06ms +[2025-08-22 14:50:37] [Rank 0] step:221/10000 train_time:18577ms step_avg:84.06ms +[2025-08-22 14:50:39] [Rank 0] step:241/10000 train_time:20258ms step_avg:84.06ms +[2025-08-22 14:50:39] [Rank 0] step:241/10000 train_time:20258ms step_avg:84.06ms +[2025-08-22 14:50:41] [Rank 0] step:261/10000 train_time:21942ms step_avg:84.07ms +[2025-08-22 14:50:41] [Rank 0] step:261/10000 train_time:21942ms step_avg:84.07ms +[2025-08-22 14:50:42] [Rank 0] step:281/10000 train_time:23625ms step_avg:84.08ms +[2025-08-22 14:50:42] [Rank 0] step:281/10000 train_time:23625ms step_avg:84.08ms +[2025-08-22 14:50:44] [Rank 0] step:301/10000 train_time:25309ms step_avg:84.08ms +[2025-08-22 14:50:44] [Rank 0] step:301/10000 train_time:25309ms step_avg:84.08ms +[2025-08-22 14:50:46] [Rank 0] step:321/10000 train_time:26993ms step_avg:84.09ms +[2025-08-22 14:50:46] [Rank 0] step:321/10000 train_time:26993ms step_avg:84.09ms +[2025-08-22 14:50:47] [Rank 0] step:341/10000 train_time:28676ms step_avg:84.09ms +[2025-08-22 14:50:47] [Rank 0] step:341/10000 train_time:28676ms step_avg:84.09ms +[2025-08-22 14:50:49] [Rank 0] step:361/10000 train_time:30361ms step_avg:84.10ms +[2025-08-22 14:50:49] [Rank 0] step:361/10000 train_time:30361ms step_avg:84.10ms +[2025-08-22 14:50:51] [Rank 0] step:381/10000 train_time:32045ms step_avg:84.11ms +[2025-08-22 14:50:51] [Rank 0] step:381/10000 train_time:32045ms step_avg:84.11ms +[2025-08-22 14:50:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:50:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:51:06] [Rank 0] PRINT: step:400/10000 val_loss:5.9844 svd_entropy: attn_qk:H=0.6300,top10E=0.48,eRank=122.0,q75/q25=51.45 attn_vo:H=0.6572,top10E=0.25,eRank=146.2,q75/q25=inf mlp_w1:H=0.5824,top10E=0.57,eRank=51.7,q75/q25=7.72 mlp_w2:H=0.5998,top10E=0.54,eRank=56.0,q75/q25=8.04 vo_prod:H=0.4338,top10E=0.48,eRank=40.7,q75/q25=inf train_time:33815ms step_avg:84.54ms +[2025-08-22 14:51:06] [Rank 0] PRINT: step:400/10000 val_loss:5.9844 svd_entropy: attn_qk:H=0.6300,top10E=0.48,eRank=122.0,q75/q25=51.45 attn_vo:H=0.6572,top10E=0.25,eRank=146.2,q75/q25=inf mlp_w1:H=0.5824,top10E=0.57,eRank=51.7,q75/q25=7.72 mlp_w2:H=0.5998,top10E=0.54,eRank=56.0,q75/q25=8.04 vo_prod:H=0.4338,top10E=0.48,eRank=40.7,q75/q25=inf train_time:33815ms step_avg:84.54ms +[2025-08-22 14:51:06] [Rank 0] step:401/10000 train_time:33831ms step_avg:84.37ms +[2025-08-22 14:51:06] [Rank 0] step:401/10000 train_time:33831ms step_avg:84.37ms +[2025-08-22 14:51:07] [Rank 0] step:421/10000 train_time:35423ms step_avg:84.14ms +[2025-08-22 14:51:07] [Rank 0] step:421/10000 train_time:35423ms step_avg:84.14ms +[2025-08-22 14:51:09] [Rank 0] step:441/10000 train_time:37102ms step_avg:84.13ms +[2025-08-22 14:51:09] [Rank 0] step:441/10000 train_time:37102ms step_avg:84.13ms +[2025-08-22 14:51:11] [Rank 0] step:461/10000 train_time:38783ms step_avg:84.13ms +[2025-08-22 14:51:11] [Rank 0] step:461/10000 train_time:38783ms step_avg:84.13ms +[2025-08-22 14:51:13] [Rank 0] step:481/10000 train_time:40465ms step_avg:84.13ms +[2025-08-22 14:51:13] [Rank 0] step:481/10000 train_time:40465ms step_avg:84.13ms +[2025-08-22 14:51:14] [Rank 0] step:501/10000 train_time:42146ms step_avg:84.12ms +[2025-08-22 14:51:14] [Rank 0] step:501/10000 train_time:42146ms step_avg:84.12ms +[2025-08-22 14:51:16] [Rank 0] step:521/10000 train_time:43828ms step_avg:84.12ms +[2025-08-22 14:51:16] [Rank 0] step:521/10000 train_time:43828ms step_avg:84.12ms +[2025-08-22 14:51:18] [Rank 0] step:541/10000 train_time:45510ms step_avg:84.12ms +[2025-08-22 14:51:18] [Rank 0] step:541/10000 train_time:45510ms step_avg:84.12ms +[2025-08-22 14:51:19] [Rank 0] step:561/10000 train_time:47193ms step_avg:84.12ms +[2025-08-22 14:51:19] [Rank 0] step:561/10000 train_time:47193ms step_avg:84.12ms +[2025-08-22 14:51:21] [Rank 0] step:581/10000 train_time:48877ms step_avg:84.13ms +[2025-08-22 14:51:21] [Rank 0] step:581/10000 train_time:48877ms step_avg:84.13ms +[2025-08-22 14:51:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:51:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:51:36] [Rank 0] PRINT: step:600/10000 val_loss:5.5970 svd_entropy: attn_qk:H=0.6506,top10E=0.44,eRank=127.3,q75/q25=45.03 attn_vo:H=0.6588,top10E=0.22,eRank=146.4,q75/q25=inf mlp_w1:H=0.6605,top10E=0.42,eRank=88.5,q75/q25=6.48 mlp_w2:H=0.7121,top10E=0.36,eRank=117.4,q75/q25=8.58 vo_prod:H=0.4652,top10E=0.41,eRank=47.1,q75/q25=inf train_time:50644ms step_avg:84.41ms +[2025-08-22 14:51:36] [Rank 0] PRINT: step:600/10000 val_loss:5.5970 svd_entropy: attn_qk:H=0.6506,top10E=0.44,eRank=127.3,q75/q25=45.03 attn_vo:H=0.6588,top10E=0.22,eRank=146.4,q75/q25=inf mlp_w1:H=0.6605,top10E=0.42,eRank=88.5,q75/q25=6.48 mlp_w2:H=0.7121,top10E=0.36,eRank=117.4,q75/q25=8.58 vo_prod:H=0.4652,top10E=0.41,eRank=47.1,q75/q25=inf train_time:50644ms step_avg:84.41ms +[2025-08-22 14:51:36] [Rank 0] step:601/10000 train_time:50660ms step_avg:84.29ms +[2025-08-22 14:51:36] [Rank 0] step:601/10000 train_time:50660ms step_avg:84.29ms +[2025-08-22 14:51:38] [Rank 0] step:621/10000 train_time:52294ms step_avg:84.21ms +[2025-08-22 14:51:38] [Rank 0] step:621/10000 train_time:52294ms step_avg:84.21ms +[2025-08-22 14:51:40] [Rank 0] step:641/10000 train_time:53971ms step_avg:84.20ms +[2025-08-22 14:51:40] [Rank 0] step:641/10000 train_time:53971ms step_avg:84.20ms +[2025-08-22 14:51:41] [Rank 0] step:661/10000 train_time:55703ms step_avg:84.27ms +[2025-08-22 14:51:41] [Rank 0] step:661/10000 train_time:55703ms step_avg:84.27ms +[2025-08-22 14:51:43] [Rank 0] step:681/10000 train_time:57382ms step_avg:84.26ms +[2025-08-22 14:51:43] [Rank 0] step:681/10000 train_time:57382ms step_avg:84.26ms +[2025-08-22 14:51:45] [Rank 0] step:701/10000 train_time:59062ms step_avg:84.25ms +[2025-08-22 14:51:45] [Rank 0] step:701/10000 train_time:59062ms step_avg:84.25ms +[2025-08-22 14:51:46] [Rank 0] step:721/10000 train_time:60743ms step_avg:84.25ms +[2025-08-22 14:51:46] [Rank 0] step:721/10000 train_time:60743ms step_avg:84.25ms +[2025-08-22 14:51:48] [Rank 0] step:741/10000 train_time:62424ms step_avg:84.24ms +[2025-08-22 14:51:48] [Rank 0] step:741/10000 train_time:62424ms step_avg:84.24ms +[2025-08-22 14:51:50] [Rank 0] step:761/10000 train_time:64117ms step_avg:84.25ms +[2025-08-22 14:51:50] [Rank 0] step:761/10000 train_time:64117ms step_avg:84.25ms +[2025-08-22 14:51:51] [Rank 0] step:781/10000 train_time:65811ms step_avg:84.27ms +[2025-08-22 14:51:51] [Rank 0] step:781/10000 train_time:65811ms step_avg:84.27ms +[2025-08-22 14:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:52:06] [Rank 0] PRINT: step:800/10000 val_loss:5.3113 svd_entropy: attn_qk:H=0.6652,top10E=0.41,eRank=132.0,q75/q25=43.99 attn_vo:H=0.6748,top10E=0.19,eRank=156.9,q75/q25=inf mlp_w1:H=0.6976,top10E=0.35,eRank=112.8,q75/q25=6.77 mlp_w2:H=0.7679,top10E=0.27,eRank=167.7,q75/q25=9.57 vo_prod:H=0.4925,top10E=0.35,eRank=54.6,q75/q25=inf train_time:67589ms step_avg:84.49ms +[2025-08-22 14:52:06] [Rank 0] PRINT: step:800/10000 val_loss:5.3113 svd_entropy: attn_qk:H=0.6652,top10E=0.41,eRank=132.0,q75/q25=43.99 attn_vo:H=0.6748,top10E=0.19,eRank=156.9,q75/q25=inf mlp_w1:H=0.6976,top10E=0.35,eRank=112.8,q75/q25=6.77 mlp_w2:H=0.7679,top10E=0.27,eRank=167.7,q75/q25=9.57 vo_prod:H=0.4925,top10E=0.35,eRank=54.6,q75/q25=inf train_time:67589ms step_avg:84.49ms +[2025-08-22 14:52:07] [Rank 0] step:801/10000 train_time:67606ms step_avg:84.40ms +[2025-08-22 14:52:07] [Rank 0] step:801/10000 train_time:67606ms step_avg:84.40ms +[2025-08-22 14:52:08] [Rank 0] step:821/10000 train_time:69231ms step_avg:84.32ms +[2025-08-22 14:52:08] [Rank 0] step:821/10000 train_time:69231ms step_avg:84.32ms +[2025-08-22 14:52:10] [Rank 0] step:841/10000 train_time:70921ms step_avg:84.33ms +[2025-08-22 14:52:10] [Rank 0] step:841/10000 train_time:70921ms step_avg:84.33ms +[2025-08-22 14:52:12] [Rank 0] step:861/10000 train_time:72610ms step_avg:84.33ms +[2025-08-22 14:52:12] [Rank 0] step:861/10000 train_time:72610ms step_avg:84.33ms +[2025-08-22 14:52:13] [Rank 0] step:881/10000 train_time:74300ms step_avg:84.34ms +[2025-08-22 14:52:13] [Rank 0] step:881/10000 train_time:74300ms step_avg:84.34ms +[2025-08-22 14:52:15] [Rank 0] step:901/10000 train_time:75993ms step_avg:84.34ms +[2025-08-22 14:52:15] [Rank 0] step:901/10000 train_time:75993ms step_avg:84.34ms +[2025-08-22 14:52:17] [Rank 0] step:921/10000 train_time:77684ms step_avg:84.35ms +[2025-08-22 14:52:17] [Rank 0] step:921/10000 train_time:77684ms step_avg:84.35ms +[2025-08-22 14:52:18] [Rank 0] step:941/10000 train_time:79377ms step_avg:84.35ms +[2025-08-22 14:52:18] [Rank 0] step:941/10000 train_time:79377ms step_avg:84.35ms +[2025-08-22 14:52:20] [Rank 0] step:961/10000 train_time:81069ms step_avg:84.36ms +[2025-08-22 14:52:20] [Rank 0] step:961/10000 train_time:81069ms step_avg:84.36ms +[2025-08-22 14:52:22] [Rank 0] step:981/10000 train_time:82765ms step_avg:84.37ms +[2025-08-22 14:52:22] [Rank 0] step:981/10000 train_time:82765ms step_avg:84.37ms +[2025-08-22 14:52:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:52:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:52:37] [Rank 0] PRINT: step:1000/10000 val_loss:5.1388 svd_entropy: attn_qk:H=0.6768,top10E=0.39,eRank=136.5,q75/q25=45.46 attn_vo:H=0.6910,top10E=0.17,eRank=169.3,q75/q25=inf mlp_w1:H=0.7225,top10E=0.31,eRank=131.9,q75/q25=7.34 mlp_w2:H=0.8016,top10E=0.23,eRank=208.3,q75/q25=10.32 vo_prod:H=0.5115,top10E=0.31,eRank=61.1,q75/q25=inf train_time:84544ms step_avg:84.54ms +[2025-08-22 14:52:37] [Rank 0] PRINT: step:1000/10000 val_loss:5.1388 svd_entropy: attn_qk:H=0.6768,top10E=0.39,eRank=136.5,q75/q25=45.46 attn_vo:H=0.6910,top10E=0.17,eRank=169.3,q75/q25=inf mlp_w1:H=0.7225,top10E=0.31,eRank=131.9,q75/q25=7.34 mlp_w2:H=0.8016,top10E=0.23,eRank=208.3,q75/q25=10.32 vo_prod:H=0.5115,top10E=0.31,eRank=61.1,q75/q25=inf train_time:84544ms step_avg:84.54ms +[2025-08-22 14:52:37] [Rank 0] step:1001/10000 train_time:84560ms step_avg:84.48ms +[2025-08-22 14:52:37] [Rank 0] step:1001/10000 train_time:84560ms step_avg:84.48ms +[2025-08-22 14:52:39] [Rank 0] step:1021/10000 train_time:86186ms step_avg:84.41ms +[2025-08-22 14:52:39] [Rank 0] step:1021/10000 train_time:86186ms step_avg:84.41ms +[2025-08-22 14:52:40] [Rank 0] step:1041/10000 train_time:88025ms step_avg:84.56ms +[2025-08-22 14:52:40] [Rank 0] step:1041/10000 train_time:88025ms step_avg:84.56ms +[2025-08-22 14:52:42] [Rank 0] step:1061/10000 train_time:89627ms step_avg:84.47ms +[2025-08-22 14:52:42] [Rank 0] step:1061/10000 train_time:89627ms step_avg:84.47ms +[2025-08-22 14:52:44] [Rank 0] step:1081/10000 train_time:91408ms step_avg:84.56ms +[2025-08-22 14:52:44] [Rank 0] step:1081/10000 train_time:91408ms step_avg:84.56ms +[2025-08-22 14:52:46] [Rank 0] step:1101/10000 train_time:93102ms step_avg:84.56ms +[2025-08-22 14:52:46] [Rank 0] step:1101/10000 train_time:93102ms step_avg:84.56ms +[2025-08-22 14:52:47] [Rank 0] step:1121/10000 train_time:94794ms step_avg:84.56ms +[2025-08-22 14:52:47] [Rank 0] step:1121/10000 train_time:94794ms step_avg:84.56ms +[2025-08-22 14:52:49] [Rank 0] step:1141/10000 train_time:96485ms step_avg:84.56ms +[2025-08-22 14:52:49] [Rank 0] step:1141/10000 train_time:96485ms step_avg:84.56ms +[2025-08-22 14:52:51] [Rank 0] step:1161/10000 train_time:98181ms step_avg:84.57ms +[2025-08-22 14:52:51] [Rank 0] step:1161/10000 train_time:98181ms step_avg:84.57ms +[2025-08-22 14:52:52] [Rank 0] step:1181/10000 train_time:99873ms step_avg:84.57ms +[2025-08-22 14:52:52] [Rank 0] step:1181/10000 train_time:99873ms step_avg:84.57ms +[2025-08-22 14:52:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:52:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:53:08] [Rank 0] PRINT: step:1200/10000 val_loss:4.9998 svd_entropy: attn_qk:H=0.6869,top10E=0.37,eRank=140.8,q75/q25=49.28 attn_vo:H=0.7064,top10E=0.15,eRank=183.1,q75/q25=inf mlp_w1:H=0.7410,top10E=0.29,eRank=147.6,q75/q25=7.94 mlp_w2:H=0.8241,top10E=0.20,eRank=240.9,q75/q25=10.75 vo_prod:H=0.5296,top10E=0.28,eRank=68.7,q75/q25=inf train_time:101653ms step_avg:84.71ms +[2025-08-22 14:53:08] [Rank 0] PRINT: step:1200/10000 val_loss:4.9998 svd_entropy: attn_qk:H=0.6869,top10E=0.37,eRank=140.8,q75/q25=49.28 attn_vo:H=0.7064,top10E=0.15,eRank=183.1,q75/q25=inf mlp_w1:H=0.7410,top10E=0.29,eRank=147.6,q75/q25=7.94 mlp_w2:H=0.8241,top10E=0.20,eRank=240.9,q75/q25=10.75 vo_prod:H=0.5296,top10E=0.28,eRank=68.7,q75/q25=inf train_time:101653ms step_avg:84.71ms +[2025-08-22 14:53:08] [Rank 0] step:1201/10000 train_time:101669ms step_avg:84.65ms +[2025-08-22 14:53:08] [Rank 0] step:1201/10000 train_time:101669ms step_avg:84.65ms +[2025-08-22 14:53:09] [Rank 0] step:1221/10000 train_time:103298ms step_avg:84.60ms +[2025-08-22 14:53:09] [Rank 0] step:1221/10000 train_time:103298ms step_avg:84.60ms +[2025-08-22 14:53:11] [Rank 0] step:1241/10000 train_time:104990ms step_avg:84.60ms +[2025-08-22 14:53:11] [Rank 0] step:1241/10000 train_time:104990ms step_avg:84.60ms +[2025-08-22 14:53:13] [Rank 0] step:1261/10000 train_time:106683ms step_avg:84.60ms +[2025-08-22 14:53:13] [Rank 0] step:1261/10000 train_time:106683ms step_avg:84.60ms +[2025-08-22 14:53:14] [Rank 0] step:1281/10000 train_time:108377ms step_avg:84.60ms +[2025-08-22 14:53:14] [Rank 0] step:1281/10000 train_time:108377ms step_avg:84.60ms +[2025-08-22 14:53:16] [Rank 0] step:1301/10000 train_time:110073ms step_avg:84.61ms +[2025-08-22 14:53:16] [Rank 0] step:1301/10000 train_time:110073ms step_avg:84.61ms +[2025-08-22 14:53:18] [Rank 0] step:1321/10000 train_time:111768ms step_avg:84.61ms +[2025-08-22 14:53:18] [Rank 0] step:1321/10000 train_time:111768ms step_avg:84.61ms +[2025-08-22 14:53:20] [Rank 0] step:1341/10000 train_time:113464ms step_avg:84.61ms +[2025-08-22 14:53:20] [Rank 0] step:1341/10000 train_time:113464ms step_avg:84.61ms +[2025-08-22 14:53:21] [Rank 0] step:1361/10000 train_time:115162ms step_avg:84.62ms +[2025-08-22 14:53:21] [Rank 0] step:1361/10000 train_time:115162ms step_avg:84.62ms +[2025-08-22 14:53:23] [Rank 0] step:1381/10000 train_time:116860ms step_avg:84.62ms +[2025-08-22 14:53:23] [Rank 0] step:1381/10000 train_time:116860ms step_avg:84.62ms +[2025-08-22 14:53:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:53:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:53:38] [Rank 0] PRINT: step:1400/10000 val_loss:4.9086 svd_entropy: attn_qk:H=0.6960,top10E=0.36,eRank=145.1,q75/q25=53.40 attn_vo:H=0.7198,top10E=0.14,eRank=196.7,q75/q25=inf mlp_w1:H=0.7557,top10E=0.27,eRank=161.5,q75/q25=8.59 mlp_w2:H=0.8409,top10E=0.18,eRank=268.7,q75/q25=10.85 vo_prod:H=0.5448,top10E=0.26,eRank=76.4,q75/q25=inf train_time:118643ms step_avg:84.75ms +[2025-08-22 14:53:38] [Rank 0] PRINT: step:1400/10000 val_loss:4.9086 svd_entropy: attn_qk:H=0.6960,top10E=0.36,eRank=145.1,q75/q25=53.40 attn_vo:H=0.7198,top10E=0.14,eRank=196.7,q75/q25=inf mlp_w1:H=0.7557,top10E=0.27,eRank=161.5,q75/q25=8.59 mlp_w2:H=0.8409,top10E=0.18,eRank=268.7,q75/q25=10.85 vo_prod:H=0.5448,top10E=0.26,eRank=76.4,q75/q25=inf train_time:118643ms step_avg:84.75ms +[2025-08-22 14:53:38] [Rank 0] step:1401/10000 train_time:118659ms step_avg:84.70ms +[2025-08-22 14:53:38] [Rank 0] step:1401/10000 train_time:118659ms step_avg:84.70ms +[2025-08-22 14:53:40] [Rank 0] step:1421/10000 train_time:120283ms step_avg:84.65ms +[2025-08-22 14:53:40] [Rank 0] step:1421/10000 train_time:120283ms step_avg:84.65ms +[2025-08-22 14:53:42] [Rank 0] step:1441/10000 train_time:121974ms step_avg:84.65ms +[2025-08-22 14:53:42] [Rank 0] step:1441/10000 train_time:121974ms step_avg:84.65ms +[2025-08-22 14:53:43] [Rank 0] step:1461/10000 train_time:123668ms step_avg:84.65ms +[2025-08-22 14:53:43] [Rank 0] step:1461/10000 train_time:123668ms step_avg:84.65ms +[2025-08-22 14:53:45] [Rank 0] step:1481/10000 train_time:125403ms step_avg:84.67ms +[2025-08-22 14:53:45] [Rank 0] step:1481/10000 train_time:125403ms step_avg:84.67ms +[2025-08-22 14:53:47] [Rank 0] step:1501/10000 train_time:127107ms step_avg:84.68ms +[2025-08-22 14:53:47] [Rank 0] step:1501/10000 train_time:127107ms step_avg:84.68ms +[2025-08-22 14:53:48] [Rank 0] step:1521/10000 train_time:128811ms step_avg:84.69ms +[2025-08-22 14:53:48] [Rank 0] step:1521/10000 train_time:128811ms step_avg:84.69ms +[2025-08-22 14:53:50] [Rank 0] step:1541/10000 train_time:130516ms step_avg:84.70ms +[2025-08-22 14:53:50] [Rank 0] step:1541/10000 train_time:130516ms step_avg:84.70ms +[2025-08-22 14:53:52] [Rank 0] step:1561/10000 train_time:132221ms step_avg:84.70ms +[2025-08-22 14:53:52] [Rank 0] step:1561/10000 train_time:132221ms step_avg:84.70ms +[2025-08-22 14:53:54] [Rank 0] step:1581/10000 train_time:133927ms step_avg:84.71ms +[2025-08-22 14:53:54] [Rank 0] step:1581/10000 train_time:133927ms step_avg:84.71ms +[2025-08-22 14:53:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:53:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:54:09] [Rank 0] PRINT: step:1600/10000 val_loss:4.7951 svd_entropy: attn_qk:H=0.7036,top10E=0.34,eRank=148.9,q75/q25=57.17 attn_vo:H=0.7311,top10E=0.13,eRank=209.7,q75/q25=inf mlp_w1:H=0.7682,top10E=0.25,eRank=174.2,q75/q25=9.11 mlp_w2:H=0.8538,top10E=0.16,eRank=292.3,q75/q25=10.65 vo_prod:H=0.5576,top10E=0.25,eRank=83.7,q75/q25=inf train_time:135719ms step_avg:84.82ms +[2025-08-22 14:54:09] [Rank 0] PRINT: step:1600/10000 val_loss:4.7951 svd_entropy: attn_qk:H=0.7036,top10E=0.34,eRank=148.9,q75/q25=57.17 attn_vo:H=0.7311,top10E=0.13,eRank=209.7,q75/q25=inf mlp_w1:H=0.7682,top10E=0.25,eRank=174.2,q75/q25=9.11 mlp_w2:H=0.8538,top10E=0.16,eRank=292.3,q75/q25=10.65 vo_prod:H=0.5576,top10E=0.25,eRank=83.7,q75/q25=inf train_time:135719ms step_avg:84.82ms +[2025-08-22 14:54:09] [Rank 0] step:1601/10000 train_time:135735ms step_avg:84.78ms +[2025-08-22 14:54:09] [Rank 0] step:1601/10000 train_time:135735ms step_avg:84.78ms +[2025-08-22 14:54:10] [Rank 0] step:1621/10000 train_time:137356ms step_avg:84.74ms +[2025-08-22 14:54:10] [Rank 0] step:1621/10000 train_time:137356ms step_avg:84.74ms +[2025-08-22 14:54:12] [Rank 0] step:1641/10000 train_time:139058ms step_avg:84.74ms +[2025-08-22 14:54:12] [Rank 0] step:1641/10000 train_time:139058ms step_avg:84.74ms +[2025-08-22 14:54:14] [Rank 0] step:1661/10000 train_time:140762ms step_avg:84.75ms +[2025-08-22 14:54:14] [Rank 0] step:1661/10000 train_time:140762ms step_avg:84.75ms +[2025-08-22 14:54:16] [Rank 0] step:1681/10000 train_time:142464ms step_avg:84.75ms +[2025-08-22 14:54:16] [Rank 0] step:1681/10000 train_time:142464ms step_avg:84.75ms +[2025-08-22 14:54:17] [Rank 0] step:1701/10000 train_time:144169ms step_avg:84.76ms +[2025-08-22 14:54:17] [Rank 0] step:1701/10000 train_time:144169ms step_avg:84.76ms +[2025-08-22 14:54:19] [Rank 0] step:1721/10000 train_time:145874ms step_avg:84.76ms +[2025-08-22 14:54:19] [Rank 0] step:1721/10000 train_time:145874ms step_avg:84.76ms +[2025-08-22 14:54:21] [Rank 0] step:1741/10000 train_time:147581ms step_avg:84.77ms +[2025-08-22 14:54:21] [Rank 0] step:1741/10000 train_time:147581ms step_avg:84.77ms +[2025-08-22 14:54:22] [Rank 0] step:1761/10000 train_time:149287ms step_avg:84.77ms +[2025-08-22 14:54:22] [Rank 0] step:1761/10000 train_time:149287ms step_avg:84.77ms +[2025-08-22 14:54:24] [Rank 0] step:1781/10000 train_time:150996ms step_avg:84.78ms +[2025-08-22 14:54:24] [Rank 0] step:1781/10000 train_time:150996ms step_avg:84.78ms +[2025-08-22 14:54:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:54:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:54:39] [Rank 0] PRINT: step:1800/10000 val_loss:4.6757 svd_entropy: attn_qk:H=0.7103,top10E=0.33,eRank=152.5,q75/q25=60.33 attn_vo:H=0.7407,top10E=0.12,eRank=221.8,q75/q25=inf mlp_w1:H=0.7789,top10E=0.24,eRank=185.9,q75/q25=9.53 mlp_w2:H=0.8644,top10E=0.15,eRank=313.1,q75/q25=10.29 vo_prod:H=0.5680,top10E=0.23,eRank=90.5,q75/q25=inf train_time:152789ms step_avg:84.88ms +[2025-08-22 14:54:39] [Rank 0] PRINT: step:1800/10000 val_loss:4.6757 svd_entropy: attn_qk:H=0.7103,top10E=0.33,eRank=152.5,q75/q25=60.33 attn_vo:H=0.7407,top10E=0.12,eRank=221.8,q75/q25=inf mlp_w1:H=0.7789,top10E=0.24,eRank=185.9,q75/q25=9.53 mlp_w2:H=0.8644,top10E=0.15,eRank=313.1,q75/q25=10.29 vo_prod:H=0.5680,top10E=0.23,eRank=90.5,q75/q25=inf train_time:152789ms step_avg:84.88ms +[2025-08-22 14:54:39] [Rank 0] step:1801/10000 train_time:152803ms step_avg:84.84ms +[2025-08-22 14:54:39] [Rank 0] step:1801/10000 train_time:152803ms step_avg:84.84ms +[2025-08-22 14:54:41] [Rank 0] step:1821/10000 train_time:154427ms step_avg:84.80ms +[2025-08-22 14:54:41] [Rank 0] step:1821/10000 train_time:154427ms step_avg:84.80ms +[2025-08-22 14:54:43] [Rank 0] step:1841/10000 train_time:156132ms step_avg:84.81ms +[2025-08-22 14:54:43] [Rank 0] step:1841/10000 train_time:156132ms step_avg:84.81ms +[2025-08-22 14:54:45] [Rank 0] step:1861/10000 train_time:157839ms step_avg:84.81ms +[2025-08-22 14:54:45] [Rank 0] step:1861/10000 train_time:157839ms step_avg:84.81ms +[2025-08-22 14:54:46] [Rank 0] step:1881/10000 train_time:159547ms step_avg:84.82ms +[2025-08-22 14:54:46] [Rank 0] step:1881/10000 train_time:159547ms step_avg:84.82ms +[2025-08-22 14:54:48] [Rank 0] step:1901/10000 train_time:161309ms step_avg:84.85ms +[2025-08-22 14:54:48] [Rank 0] step:1901/10000 train_time:161309ms step_avg:84.85ms +[2025-08-22 14:54:50] [Rank 0] step:1921/10000 train_time:163018ms step_avg:84.86ms +[2025-08-22 14:54:50] [Rank 0] step:1921/10000 train_time:163018ms step_avg:84.86ms +[2025-08-22 14:54:51] [Rank 0] step:1941/10000 train_time:164727ms step_avg:84.87ms +[2025-08-22 14:54:51] [Rank 0] step:1941/10000 train_time:164727ms step_avg:84.87ms +[2025-08-22 14:54:53] [Rank 0] step:1961/10000 train_time:166437ms step_avg:84.87ms +[2025-08-22 14:54:53] [Rank 0] step:1961/10000 train_time:166437ms step_avg:84.87ms +[2025-08-22 14:54:55] [Rank 0] step:1981/10000 train_time:168148ms step_avg:84.88ms +[2025-08-22 14:54:55] [Rank 0] step:1981/10000 train_time:168148ms step_avg:84.88ms +[2025-08-22 14:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:55:10] [Rank 0] PRINT: step:2000/10000 val_loss:4.5614 svd_entropy: attn_qk:H=0.7161,top10E=0.32,eRank=155.8,q75/q25=64.10 attn_vo:H=0.7485,top10E=0.12,eRank=232.6,q75/q25=inf mlp_w1:H=0.7883,top10E=0.23,eRank=197.0,q75/q25=9.85 mlp_w2:H=0.8727,top10E=0.14,eRank=330.7,q75/q25=9.86 vo_prod:H=0.5764,top10E=0.22,eRank=96.4,q75/q25=inf train_time:169945ms step_avg:84.97ms +[2025-08-22 14:55:10] [Rank 0] PRINT: step:2000/10000 val_loss:4.5614 svd_entropy: attn_qk:H=0.7161,top10E=0.32,eRank=155.8,q75/q25=64.10 attn_vo:H=0.7485,top10E=0.12,eRank=232.6,q75/q25=inf mlp_w1:H=0.7883,top10E=0.23,eRank=197.0,q75/q25=9.85 mlp_w2:H=0.8727,top10E=0.14,eRank=330.7,q75/q25=9.86 vo_prod:H=0.5764,top10E=0.22,eRank=96.4,q75/q25=inf train_time:169945ms step_avg:84.97ms +[2025-08-22 14:55:10] [Rank 0] step:2001/10000 train_time:169961ms step_avg:84.94ms +[2025-08-22 14:55:10] [Rank 0] step:2001/10000 train_time:169961ms step_avg:84.94ms +[2025-08-22 14:55:12] [Rank 0] step:2021/10000 train_time:171585ms step_avg:84.90ms +[2025-08-22 14:55:12] [Rank 0] step:2021/10000 train_time:171585ms step_avg:84.90ms +[2025-08-22 14:55:13] [Rank 0] step:2041/10000 train_time:173290ms step_avg:84.90ms +[2025-08-22 14:55:13] [Rank 0] step:2041/10000 train_time:173290ms step_avg:84.90ms +[2025-08-22 14:55:15] [Rank 0] step:2061/10000 train_time:174993ms step_avg:84.91ms +[2025-08-22 14:55:15] [Rank 0] step:2061/10000 train_time:174993ms step_avg:84.91ms +[2025-08-22 14:55:17] [Rank 0] step:2081/10000 train_time:176699ms step_avg:84.91ms +[2025-08-22 14:55:17] [Rank 0] step:2081/10000 train_time:176699ms step_avg:84.91ms +[2025-08-22 14:55:19] [Rank 0] step:2101/10000 train_time:178404ms step_avg:84.91ms +[2025-08-22 14:55:19] [Rank 0] step:2101/10000 train_time:178404ms step_avg:84.91ms +[2025-08-22 14:55:20] [Rank 0] step:2121/10000 train_time:180111ms step_avg:84.92ms +[2025-08-22 14:55:20] [Rank 0] step:2121/10000 train_time:180111ms step_avg:84.92ms +[2025-08-22 14:55:22] [Rank 0] step:2141/10000 train_time:181817ms step_avg:84.92ms +[2025-08-22 14:55:22] [Rank 0] step:2141/10000 train_time:181817ms step_avg:84.92ms +[2025-08-22 14:55:24] [Rank 0] step:2161/10000 train_time:183525ms step_avg:84.93ms +[2025-08-22 14:55:24] [Rank 0] step:2161/10000 train_time:183525ms step_avg:84.93ms +[2025-08-22 14:55:25] [Rank 0] step:2181/10000 train_time:185234ms step_avg:84.93ms +[2025-08-22 14:55:25] [Rank 0] step:2181/10000 train_time:185234ms step_avg:84.93ms +[2025-08-22 14:55:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:55:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:55:41] [Rank 0] PRINT: step:2200/10000 val_loss:4.4496 svd_entropy: attn_qk:H=0.7207,top10E=0.32,eRank=158.5,q75/q25=65.75 attn_vo:H=0.7547,top10E=0.11,eRank=241.8,q75/q25=inf mlp_w1:H=0.7964,top10E=0.22,eRank=207.0,q75/q25=10.10 mlp_w2:H=0.8795,top10E=0.14,eRank=345.8,q75/q25=9.39 vo_prod:H=0.5835,top10E=0.21,eRank=101.7,q75/q25=inf train_time:187027ms step_avg:85.01ms +[2025-08-22 14:55:41] [Rank 0] PRINT: step:2200/10000 val_loss:4.4496 svd_entropy: attn_qk:H=0.7207,top10E=0.32,eRank=158.5,q75/q25=65.75 attn_vo:H=0.7547,top10E=0.11,eRank=241.8,q75/q25=inf mlp_w1:H=0.7964,top10E=0.22,eRank=207.0,q75/q25=10.10 mlp_w2:H=0.8795,top10E=0.14,eRank=345.8,q75/q25=9.39 vo_prod:H=0.5835,top10E=0.21,eRank=101.7,q75/q25=inf train_time:187027ms step_avg:85.01ms +[2025-08-22 14:55:41] [Rank 0] step:2201/10000 train_time:187042ms step_avg:84.98ms +[2025-08-22 14:55:41] [Rank 0] step:2201/10000 train_time:187042ms step_avg:84.98ms +[2025-08-22 14:55:42] [Rank 0] step:2221/10000 train_time:188671ms step_avg:84.95ms +[2025-08-22 14:55:42] [Rank 0] step:2221/10000 train_time:188671ms step_avg:84.95ms +[2025-08-22 14:55:44] [Rank 0] step:2241/10000 train_time:190410ms step_avg:84.97ms +[2025-08-22 14:55:44] [Rank 0] step:2241/10000 train_time:190410ms step_avg:84.97ms +[2025-08-22 14:55:46] [Rank 0] step:2261/10000 train_time:192157ms step_avg:84.99ms +[2025-08-22 14:55:46] [Rank 0] step:2261/10000 train_time:192157ms step_avg:84.99ms +[2025-08-22 14:55:48] [Rank 0] step:2281/10000 train_time:193906ms step_avg:85.01ms +[2025-08-22 14:55:48] [Rank 0] step:2281/10000 train_time:193906ms step_avg:85.01ms +[2025-08-22 14:55:49] [Rank 0] step:2301/10000 train_time:195654ms step_avg:85.03ms +[2025-08-22 14:55:49] [Rank 0] step:2301/10000 train_time:195654ms step_avg:85.03ms +[2025-08-22 14:55:51] [Rank 0] step:2321/10000 train_time:197447ms step_avg:85.07ms +[2025-08-22 14:55:51] [Rank 0] step:2321/10000 train_time:197447ms step_avg:85.07ms +[2025-08-22 14:55:53] [Rank 0] step:2341/10000 train_time:199199ms step_avg:85.09ms +[2025-08-22 14:55:53] [Rank 0] step:2341/10000 train_time:199199ms step_avg:85.09ms +[2025-08-22 14:55:55] [Rank 0] step:2361/10000 train_time:200951ms step_avg:85.11ms +[2025-08-22 14:55:55] [Rank 0] step:2361/10000 train_time:200951ms step_avg:85.11ms +[2025-08-22 14:55:56] [Rank 0] step:2381/10000 train_time:202705ms step_avg:85.13ms +[2025-08-22 14:55:56] [Rank 0] step:2381/10000 train_time:202705ms step_avg:85.13ms +[2025-08-22 14:55:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:55:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:56:12] [Rank 0] PRINT: step:2400/10000 val_loss:4.3675 svd_entropy: attn_qk:H=0.7245,top10E=0.31,eRank=160.9,q75/q25=67.42 attn_vo:H=0.7599,top10E=0.11,eRank=249.8,q75/q25=inf mlp_w1:H=0.8036,top10E=0.21,eRank=216.3,q75/q25=10.23 mlp_w2:H=0.8852,top10E=0.13,eRank=359.0,q75/q25=8.89 vo_prod:H=0.5897,top10E=0.21,eRank=106.5,q75/q25=inf train_time:204545ms step_avg:85.23ms +[2025-08-22 14:56:12] [Rank 0] PRINT: step:2400/10000 val_loss:4.3675 svd_entropy: attn_qk:H=0.7245,top10E=0.31,eRank=160.9,q75/q25=67.42 attn_vo:H=0.7599,top10E=0.11,eRank=249.8,q75/q25=inf mlp_w1:H=0.8036,top10E=0.21,eRank=216.3,q75/q25=10.23 mlp_w2:H=0.8852,top10E=0.13,eRank=359.0,q75/q25=8.89 vo_prod:H=0.5897,top10E=0.21,eRank=106.5,q75/q25=inf train_time:204545ms step_avg:85.23ms +[2025-08-22 14:56:12] [Rank 0] step:2401/10000 train_time:204560ms step_avg:85.20ms +[2025-08-22 14:56:12] [Rank 0] step:2401/10000 train_time:204560ms step_avg:85.20ms +[2025-08-22 14:56:13] [Rank 0] step:2421/10000 train_time:206238ms step_avg:85.19ms +[2025-08-22 14:56:13] [Rank 0] step:2421/10000 train_time:206238ms step_avg:85.19ms +[2025-08-22 14:56:15] [Rank 0] step:2441/10000 train_time:207988ms step_avg:85.21ms +[2025-08-22 14:56:15] [Rank 0] step:2441/10000 train_time:207988ms step_avg:85.21ms +[2025-08-22 14:56:17] [Rank 0] step:2461/10000 train_time:209741ms step_avg:85.23ms +[2025-08-22 14:56:17] [Rank 0] step:2461/10000 train_time:209741ms step_avg:85.23ms +[2025-08-22 14:56:19] [Rank 0] step:2481/10000 train_time:211493ms step_avg:85.25ms +[2025-08-22 14:56:19] [Rank 0] step:2481/10000 train_time:211493ms step_avg:85.25ms +[2025-08-22 14:56:20] [Rank 0] step:2501/10000 train_time:213245ms step_avg:85.26ms +[2025-08-22 14:56:20] [Rank 0] step:2501/10000 train_time:213245ms step_avg:85.26ms +[2025-08-22 14:56:22] [Rank 0] step:2521/10000 train_time:215000ms step_avg:85.28ms +[2025-08-22 14:56:22] [Rank 0] step:2521/10000 train_time:215000ms step_avg:85.28ms +[2025-08-22 14:56:24] [Rank 0] step:2541/10000 train_time:216754ms step_avg:85.30ms +[2025-08-22 14:56:24] [Rank 0] step:2541/10000 train_time:216754ms step_avg:85.30ms +[2025-08-22 14:56:26] [Rank 0] step:2561/10000 train_time:218510ms step_avg:85.32ms +[2025-08-22 14:56:26] [Rank 0] step:2561/10000 train_time:218510ms step_avg:85.32ms +[2025-08-22 14:56:28] [Rank 0] step:2581/10000 train_time:220264ms step_avg:85.34ms +[2025-08-22 14:56:28] [Rank 0] step:2581/10000 train_time:220264ms step_avg:85.34ms +[2025-08-22 14:56:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:56:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:56:43] [Rank 0] PRINT: step:2600/10000 val_loss:4.2889 svd_entropy: attn_qk:H=0.7282,top10E=0.30,eRank=163.3,q75/q25=69.21 attn_vo:H=0.7643,top10E=0.10,eRank=256.8,q75/q25=inf mlp_w1:H=0.8099,top10E=0.21,eRank=225.0,q75/q25=10.31 mlp_w2:H=0.8900,top10E=0.13,eRank=370.7,q75/q25=8.44 vo_prod:H=0.5954,top10E=0.20,eRank=111.0,q75/q25=inf train_time:222106ms step_avg:85.43ms +[2025-08-22 14:56:43] [Rank 0] PRINT: step:2600/10000 val_loss:4.2889 svd_entropy: attn_qk:H=0.7282,top10E=0.30,eRank=163.3,q75/q25=69.21 attn_vo:H=0.7643,top10E=0.10,eRank=256.8,q75/q25=inf mlp_w1:H=0.8099,top10E=0.21,eRank=225.0,q75/q25=10.31 mlp_w2:H=0.8900,top10E=0.13,eRank=370.7,q75/q25=8.44 vo_prod:H=0.5954,top10E=0.20,eRank=111.0,q75/q25=inf train_time:222106ms step_avg:85.43ms +[2025-08-22 14:56:43] [Rank 0] step:2601/10000 train_time:222122ms step_avg:85.40ms +[2025-08-22 14:56:43] [Rank 0] step:2601/10000 train_time:222122ms step_avg:85.40ms +[2025-08-22 14:56:45] [Rank 0] step:2621/10000 train_time:223795ms step_avg:85.39ms +[2025-08-22 14:56:45] [Rank 0] step:2621/10000 train_time:223795ms step_avg:85.39ms +[2025-08-22 14:56:46] [Rank 0] step:2641/10000 train_time:225542ms step_avg:85.40ms +[2025-08-22 14:56:46] [Rank 0] step:2641/10000 train_time:225542ms step_avg:85.40ms +[2025-08-22 14:56:48] [Rank 0] step:2661/10000 train_time:227290ms step_avg:85.42ms +[2025-08-22 14:56:48] [Rank 0] step:2661/10000 train_time:227290ms step_avg:85.42ms +[2025-08-22 14:56:50] [Rank 0] step:2681/10000 train_time:229039ms step_avg:85.43ms +[2025-08-22 14:56:50] [Rank 0] step:2681/10000 train_time:229039ms step_avg:85.43ms +[2025-08-22 14:56:52] [Rank 0] step:2701/10000 train_time:230788ms step_avg:85.45ms +[2025-08-22 14:56:52] [Rank 0] step:2701/10000 train_time:230788ms step_avg:85.45ms +[2025-08-22 14:56:53] [Rank 0] step:2721/10000 train_time:232536ms step_avg:85.46ms +[2025-08-22 14:56:53] [Rank 0] step:2721/10000 train_time:232536ms step_avg:85.46ms +[2025-08-22 14:56:55] [Rank 0] step:2741/10000 train_time:234293ms step_avg:85.48ms +[2025-08-22 14:56:55] [Rank 0] step:2741/10000 train_time:234293ms step_avg:85.48ms +[2025-08-22 14:56:57] [Rank 0] step:2761/10000 train_time:236056ms step_avg:85.50ms +[2025-08-22 14:56:57] [Rank 0] step:2761/10000 train_time:236056ms step_avg:85.50ms +[2025-08-22 14:56:59] [Rank 0] step:2781/10000 train_time:237805ms step_avg:85.51ms +[2025-08-22 14:56:59] [Rank 0] step:2781/10000 train_time:237805ms step_avg:85.51ms +[2025-08-22 14:57:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:57:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:57:14] [Rank 0] PRINT: step:2800/10000 val_loss:4.2464 svd_entropy: attn_qk:H=0.7318,top10E=0.30,eRank=165.6,q75/q25=70.63 attn_vo:H=0.7682,top10E=0.10,eRank=263.1,q75/q25=inf mlp_w1:H=0.8156,top10E=0.20,eRank=233.0,q75/q25=10.33 mlp_w2:H=0.8942,top10E=0.13,eRank=381.0,q75/q25=8.01 vo_prod:H=0.6008,top10E=0.19,eRank=115.5,q75/q25=inf train_time:239643ms step_avg:85.59ms +[2025-08-22 14:57:14] [Rank 0] PRINT: step:2800/10000 val_loss:4.2464 svd_entropy: attn_qk:H=0.7318,top10E=0.30,eRank=165.6,q75/q25=70.63 attn_vo:H=0.7682,top10E=0.10,eRank=263.1,q75/q25=inf mlp_w1:H=0.8156,top10E=0.20,eRank=233.0,q75/q25=10.33 mlp_w2:H=0.8942,top10E=0.13,eRank=381.0,q75/q25=8.01 vo_prod:H=0.6008,top10E=0.19,eRank=115.5,q75/q25=inf train_time:239643ms step_avg:85.59ms +[2025-08-22 14:57:14] [Rank 0] step:2801/10000 train_time:239659ms step_avg:85.56ms +[2025-08-22 14:57:14] [Rank 0] step:2801/10000 train_time:239659ms step_avg:85.56ms +[2025-08-22 14:57:16] [Rank 0] step:2821/10000 train_time:241328ms step_avg:85.55ms +[2025-08-22 14:57:16] [Rank 0] step:2821/10000 train_time:241328ms step_avg:85.55ms +[2025-08-22 14:57:17] [Rank 0] step:2841/10000 train_time:243074ms step_avg:85.56ms +[2025-08-22 14:57:17] [Rank 0] step:2841/10000 train_time:243074ms step_avg:85.56ms +[2025-08-22 14:57:19] [Rank 0] step:2861/10000 train_time:244821ms step_avg:85.57ms +[2025-08-22 14:57:19] [Rank 0] step:2861/10000 train_time:244821ms step_avg:85.57ms +[2025-08-22 14:57:21] [Rank 0] step:2881/10000 train_time:246572ms step_avg:85.59ms +[2025-08-22 14:57:21] [Rank 0] step:2881/10000 train_time:246572ms step_avg:85.59ms +[2025-08-22 14:57:23] [Rank 0] step:2901/10000 train_time:248322ms step_avg:85.60ms +[2025-08-22 14:57:23] [Rank 0] step:2901/10000 train_time:248322ms step_avg:85.60ms +[2025-08-22 14:57:24] [Rank 0] step:2921/10000 train_time:250071ms step_avg:85.61ms +[2025-08-22 14:57:24] [Rank 0] step:2921/10000 train_time:250071ms step_avg:85.61ms +[2025-08-22 14:57:26] [Rank 0] step:2941/10000 train_time:251822ms step_avg:85.62ms +[2025-08-22 14:57:26] [Rank 0] step:2941/10000 train_time:251822ms step_avg:85.62ms +[2025-08-22 14:57:28] [Rank 0] step:2961/10000 train_time:253572ms step_avg:85.64ms +[2025-08-22 14:57:28] [Rank 0] step:2961/10000 train_time:253572ms step_avg:85.64ms +[2025-08-22 14:57:30] [Rank 0] step:2981/10000 train_time:255329ms step_avg:85.65ms +[2025-08-22 14:57:30] [Rank 0] step:2981/10000 train_time:255329ms step_avg:85.65ms +[2025-08-22 14:57:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:57:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:57:45] [Rank 0] PRINT: step:3000/10000 val_loss:4.1962 svd_entropy: attn_qk:H=0.7349,top10E=0.29,eRank=167.7,q75/q25=72.40 attn_vo:H=0.7716,top10E=0.10,eRank=268.8,q75/q25=inf mlp_w1:H=0.8206,top10E=0.19,eRank=240.4,q75/q25=10.34 mlp_w2:H=0.8977,top10E=0.12,eRank=390.0,q75/q25=7.64 vo_prod:H=0.6058,top10E=0.19,eRank=119.8,q75/q25=inf train_time:257175ms step_avg:85.73ms +[2025-08-22 14:57:45] [Rank 0] PRINT: step:3000/10000 val_loss:4.1962 svd_entropy: attn_qk:H=0.7349,top10E=0.29,eRank=167.7,q75/q25=72.40 attn_vo:H=0.7716,top10E=0.10,eRank=268.8,q75/q25=inf mlp_w1:H=0.8206,top10E=0.19,eRank=240.4,q75/q25=10.34 mlp_w2:H=0.8977,top10E=0.12,eRank=390.0,q75/q25=7.64 vo_prod:H=0.6058,top10E=0.19,eRank=119.8,q75/q25=inf train_time:257175ms step_avg:85.73ms +[2025-08-22 14:57:45] [Rank 0] step:3001/10000 train_time:257191ms step_avg:85.70ms +[2025-08-22 14:57:45] [Rank 0] step:3001/10000 train_time:257191ms step_avg:85.70ms +[2025-08-22 14:57:47] [Rank 0] step:3021/10000 train_time:258864ms step_avg:85.69ms +[2025-08-22 14:57:47] [Rank 0] step:3021/10000 train_time:258864ms step_avg:85.69ms +[2025-08-22 14:57:48] [Rank 0] step:3041/10000 train_time:260619ms step_avg:85.70ms +[2025-08-22 14:57:48] [Rank 0] step:3041/10000 train_time:260619ms step_avg:85.70ms +[2025-08-22 14:57:50] [Rank 0] step:3061/10000 train_time:262376ms step_avg:85.72ms +[2025-08-22 14:57:50] [Rank 0] step:3061/10000 train_time:262376ms step_avg:85.72ms +[2025-08-22 14:57:52] [Rank 0] step:3081/10000 train_time:264134ms step_avg:85.73ms +[2025-08-22 14:57:52] [Rank 0] step:3081/10000 train_time:264134ms step_avg:85.73ms +[2025-08-22 14:57:54] [Rank 0] step:3101/10000 train_time:265892ms step_avg:85.74ms +[2025-08-22 14:57:54] [Rank 0] step:3101/10000 train_time:265892ms step_avg:85.74ms +[2025-08-22 14:57:55] [Rank 0] step:3121/10000 train_time:267649ms step_avg:85.76ms +[2025-08-22 14:57:55] [Rank 0] step:3121/10000 train_time:267649ms step_avg:85.76ms +[2025-08-22 14:57:57] [Rank 0] step:3141/10000 train_time:269406ms step_avg:85.77ms +[2025-08-22 14:57:57] [Rank 0] step:3141/10000 train_time:269406ms step_avg:85.77ms +[2025-08-22 14:57:59] [Rank 0] step:3161/10000 train_time:271192ms step_avg:85.79ms +[2025-08-22 14:57:59] [Rank 0] step:3161/10000 train_time:271192ms step_avg:85.79ms +[2025-08-22 14:58:01] [Rank 0] step:3181/10000 train_time:272980ms step_avg:85.82ms +[2025-08-22 14:58:01] [Rank 0] step:3181/10000 train_time:272980ms step_avg:85.82ms +[2025-08-22 14:58:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:58:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:58:16] [Rank 0] PRINT: step:3200/10000 val_loss:4.1807 svd_entropy: attn_qk:H=0.7378,top10E=0.29,eRank=169.7,q75/q25=73.82 attn_vo:H=0.7746,top10E=0.10,eRank=273.9,q75/q25=inf mlp_w1:H=0.8252,top10E=0.19,eRank=247.5,q75/q25=10.28 mlp_w2:H=0.9009,top10E=0.12,eRank=398.5,q75/q25=7.30 vo_prod:H=0.6104,top10E=0.18,eRank=123.8,q75/q25=inf train_time:274829ms step_avg:85.88ms +[2025-08-22 14:58:16] [Rank 0] PRINT: step:3200/10000 val_loss:4.1807 svd_entropy: attn_qk:H=0.7378,top10E=0.29,eRank=169.7,q75/q25=73.82 attn_vo:H=0.7746,top10E=0.10,eRank=273.9,q75/q25=inf mlp_w1:H=0.8252,top10E=0.19,eRank=247.5,q75/q25=10.28 mlp_w2:H=0.9009,top10E=0.12,eRank=398.5,q75/q25=7.30 vo_prod:H=0.6104,top10E=0.18,eRank=123.8,q75/q25=inf train_time:274829ms step_avg:85.88ms +[2025-08-22 14:58:16] [Rank 0] step:3201/10000 train_time:274844ms step_avg:85.86ms +[2025-08-22 14:58:16] [Rank 0] step:3201/10000 train_time:274844ms step_avg:85.86ms +[2025-08-22 14:58:18] [Rank 0] step:3221/10000 train_time:276526ms step_avg:85.85ms +[2025-08-22 14:58:18] [Rank 0] step:3221/10000 train_time:276526ms step_avg:85.85ms +[2025-08-22 14:58:20] [Rank 0] step:3241/10000 train_time:278284ms step_avg:85.86ms +[2025-08-22 14:58:20] [Rank 0] step:3241/10000 train_time:278284ms step_avg:85.86ms +[2025-08-22 14:58:21] [Rank 0] step:3261/10000 train_time:280041ms step_avg:85.88ms +[2025-08-22 14:58:21] [Rank 0] step:3261/10000 train_time:280041ms step_avg:85.88ms +[2025-08-22 14:58:23] [Rank 0] step:3281/10000 train_time:281801ms step_avg:85.89ms +[2025-08-22 14:58:23] [Rank 0] step:3281/10000 train_time:281801ms step_avg:85.89ms +[2025-08-22 14:58:25] [Rank 0] step:3301/10000 train_time:283560ms step_avg:85.90ms +[2025-08-22 14:58:25] [Rank 0] step:3301/10000 train_time:283560ms step_avg:85.90ms +[2025-08-22 14:58:27] [Rank 0] step:3321/10000 train_time:285322ms step_avg:85.91ms +[2025-08-22 14:58:27] [Rank 0] step:3321/10000 train_time:285322ms step_avg:85.91ms +[2025-08-22 14:58:28] [Rank 0] step:3341/10000 train_time:287083ms step_avg:85.93ms +[2025-08-22 14:58:28] [Rank 0] step:3341/10000 train_time:287083ms step_avg:85.93ms +[2025-08-22 14:58:30] [Rank 0] step:3361/10000 train_time:288844ms step_avg:85.94ms +[2025-08-22 14:58:30] [Rank 0] step:3361/10000 train_time:288844ms step_avg:85.94ms +[2025-08-22 14:58:32] [Rank 0] step:3381/10000 train_time:290606ms step_avg:85.95ms +[2025-08-22 14:58:32] [Rank 0] step:3381/10000 train_time:290606ms step_avg:85.95ms +[2025-08-22 14:58:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:58:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:58:47] [Rank 0] PRINT: step:3400/10000 val_loss:4.1241 svd_entropy: attn_qk:H=0.7406,top10E=0.28,eRank=171.7,q75/q25=74.99 attn_vo:H=0.7774,top10E=0.09,eRank=278.7,q75/q25=inf mlp_w1:H=0.8293,top10E=0.19,eRank=254.1,q75/q25=10.22 mlp_w2:H=0.9038,top10E=0.12,eRank=406.1,q75/q25=7.01 vo_prod:H=0.6147,top10E=0.17,eRank=127.7,q75/q25=inf train_time:292458ms step_avg:86.02ms +[2025-08-22 14:58:47] [Rank 0] PRINT: step:3400/10000 val_loss:4.1241 svd_entropy: attn_qk:H=0.7406,top10E=0.28,eRank=171.7,q75/q25=74.99 attn_vo:H=0.7774,top10E=0.09,eRank=278.7,q75/q25=inf mlp_w1:H=0.8293,top10E=0.19,eRank=254.1,q75/q25=10.22 mlp_w2:H=0.9038,top10E=0.12,eRank=406.1,q75/q25=7.01 vo_prod:H=0.6147,top10E=0.17,eRank=127.7,q75/q25=inf train_time:292458ms step_avg:86.02ms +[2025-08-22 14:58:47] [Rank 0] step:3401/10000 train_time:292473ms step_avg:86.00ms +[2025-08-22 14:58:47] [Rank 0] step:3401/10000 train_time:292473ms step_avg:86.00ms +[2025-08-22 14:58:49] [Rank 0] step:3421/10000 train_time:294163ms step_avg:85.99ms +[2025-08-22 14:58:49] [Rank 0] step:3421/10000 train_time:294163ms step_avg:85.99ms +[2025-08-22 14:58:51] [Rank 0] step:3441/10000 train_time:295920ms step_avg:86.00ms +[2025-08-22 14:58:51] [Rank 0] step:3441/10000 train_time:295920ms step_avg:86.00ms +[2025-08-22 14:58:53] [Rank 0] step:3461/10000 train_time:297678ms step_avg:86.01ms +[2025-08-22 14:58:53] [Rank 0] step:3461/10000 train_time:297678ms step_avg:86.01ms +[2025-08-22 14:58:54] [Rank 0] step:3481/10000 train_time:299437ms step_avg:86.02ms +[2025-08-22 14:58:54] [Rank 0] step:3481/10000 train_time:299437ms step_avg:86.02ms +[2025-08-22 14:58:56] [Rank 0] step:3501/10000 train_time:301198ms step_avg:86.03ms +[2025-08-22 14:58:56] [Rank 0] step:3501/10000 train_time:301198ms step_avg:86.03ms +[2025-08-22 14:58:58] [Rank 0] step:3521/10000 train_time:302958ms step_avg:86.04ms +[2025-08-22 14:58:58] [Rank 0] step:3521/10000 train_time:302958ms step_avg:86.04ms +[2025-08-22 14:59:00] [Rank 0] step:3541/10000 train_time:304717ms step_avg:86.05ms +[2025-08-22 14:59:00] [Rank 0] step:3541/10000 train_time:304717ms step_avg:86.05ms +[2025-08-22 14:59:01] [Rank 0] step:3561/10000 train_time:306526ms step_avg:86.08ms +[2025-08-22 14:59:01] [Rank 0] step:3561/10000 train_time:306526ms step_avg:86.08ms +[2025-08-22 14:59:03] [Rank 0] step:3581/10000 train_time:308286ms step_avg:86.09ms +[2025-08-22 14:59:03] [Rank 0] step:3581/10000 train_time:308286ms step_avg:86.09ms +[2025-08-22 14:59:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:59:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:59:18] [Rank 0] PRINT: step:3600/10000 val_loss:4.1112 svd_entropy: attn_qk:H=0.7432,top10E=0.28,eRank=173.6,q75/q25=76.08 attn_vo:H=0.7798,top10E=0.09,eRank=282.9,q75/q25=inf mlp_w1:H=0.8331,top10E=0.18,eRank=260.3,q75/q25=10.12 mlp_w2:H=0.9064,top10E=0.12,eRank=413.1,q75/q25=6.75 vo_prod:H=0.6186,top10E=0.17,eRank=131.4,q75/q25=inf train_time:310134ms step_avg:86.15ms +[2025-08-22 14:59:18] [Rank 0] PRINT: step:3600/10000 val_loss:4.1112 svd_entropy: attn_qk:H=0.7432,top10E=0.28,eRank=173.6,q75/q25=76.08 attn_vo:H=0.7798,top10E=0.09,eRank=282.9,q75/q25=inf mlp_w1:H=0.8331,top10E=0.18,eRank=260.3,q75/q25=10.12 mlp_w2:H=0.9064,top10E=0.12,eRank=413.1,q75/q25=6.75 vo_prod:H=0.6186,top10E=0.17,eRank=131.4,q75/q25=inf train_time:310134ms step_avg:86.15ms +[2025-08-22 14:59:19] [Rank 0] step:3601/10000 train_time:310150ms step_avg:86.13ms +[2025-08-22 14:59:19] [Rank 0] step:3601/10000 train_time:310150ms step_avg:86.13ms +[2025-08-22 14:59:20] [Rank 0] step:3621/10000 train_time:311838ms step_avg:86.12ms +[2025-08-22 14:59:20] [Rank 0] step:3621/10000 train_time:311838ms step_avg:86.12ms +[2025-08-22 14:59:22] [Rank 0] step:3641/10000 train_time:313593ms step_avg:86.13ms +[2025-08-22 14:59:22] [Rank 0] step:3641/10000 train_time:313593ms step_avg:86.13ms +[2025-08-22 14:59:24] [Rank 0] step:3661/10000 train_time:315348ms step_avg:86.14ms +[2025-08-22 14:59:24] [Rank 0] step:3661/10000 train_time:315348ms step_avg:86.14ms +[2025-08-22 14:59:26] [Rank 0] step:3681/10000 train_time:317104ms step_avg:86.15ms +[2025-08-22 14:59:26] [Rank 0] step:3681/10000 train_time:317104ms step_avg:86.15ms +[2025-08-22 14:59:27] [Rank 0] step:3701/10000 train_time:318861ms step_avg:86.16ms +[2025-08-22 14:59:27] [Rank 0] step:3701/10000 train_time:318861ms step_avg:86.16ms +[2025-08-22 14:59:29] [Rank 0] step:3721/10000 train_time:320645ms step_avg:86.17ms +[2025-08-22 14:59:29] [Rank 0] step:3721/10000 train_time:320645ms step_avg:86.17ms +[2025-08-22 14:59:31] [Rank 0] step:3741/10000 train_time:322440ms step_avg:86.19ms +[2025-08-22 14:59:31] [Rank 0] step:3741/10000 train_time:322440ms step_avg:86.19ms +[2025-08-22 14:59:33] [Rank 0] step:3761/10000 train_time:324235ms step_avg:86.21ms +[2025-08-22 14:59:33] [Rank 0] step:3761/10000 train_time:324235ms step_avg:86.21ms +[2025-08-22 14:59:34] [Rank 0] step:3781/10000 train_time:326033ms step_avg:86.23ms +[2025-08-22 14:59:34] [Rank 0] step:3781/10000 train_time:326033ms step_avg:86.23ms +[2025-08-22 14:59:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:59:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 14:59:50] [Rank 0] PRINT: step:3800/10000 val_loss:4.0595 svd_entropy: attn_qk:H=0.7457,top10E=0.28,eRank=175.4,q75/q25=77.08 attn_vo:H=0.7820,top10E=0.09,eRank=286.9,q75/q25=inf mlp_w1:H=0.8367,top10E=0.18,eRank=266.3,q75/q25=10.00 mlp_w2:H=0.9086,top10E=0.11,eRank=419.5,q75/q25=6.52 vo_prod:H=0.6221,top10E=0.17,eRank=134.7,q75/q25=inf train_time:327921ms step_avg:86.29ms +[2025-08-22 14:59:50] [Rank 0] PRINT: step:3800/10000 val_loss:4.0595 svd_entropy: attn_qk:H=0.7457,top10E=0.28,eRank=175.4,q75/q25=77.08 attn_vo:H=0.7820,top10E=0.09,eRank=286.9,q75/q25=inf mlp_w1:H=0.8367,top10E=0.18,eRank=266.3,q75/q25=10.00 mlp_w2:H=0.9086,top10E=0.11,eRank=419.5,q75/q25=6.52 vo_prod:H=0.6221,top10E=0.17,eRank=134.7,q75/q25=inf train_time:327921ms step_avg:86.29ms +[2025-08-22 14:59:50] [Rank 0] step:3801/10000 train_time:327935ms step_avg:86.28ms +[2025-08-22 14:59:50] [Rank 0] step:3801/10000 train_time:327935ms step_avg:86.28ms +[2025-08-22 14:59:52] [Rank 0] step:3821/10000 train_time:329647ms step_avg:86.27ms +[2025-08-22 14:59:52] [Rank 0] step:3821/10000 train_time:329647ms step_avg:86.27ms +[2025-08-22 14:59:53] [Rank 0] step:3841/10000 train_time:331446ms step_avg:86.29ms +[2025-08-22 14:59:53] [Rank 0] step:3841/10000 train_time:331446ms step_avg:86.29ms +[2025-08-22 14:59:55] [Rank 0] step:3861/10000 train_time:333243ms step_avg:86.31ms +[2025-08-22 14:59:55] [Rank 0] step:3861/10000 train_time:333243ms step_avg:86.31ms +[2025-08-22 14:59:57] [Rank 0] step:3881/10000 train_time:335038ms step_avg:86.33ms +[2025-08-22 14:59:57] [Rank 0] step:3881/10000 train_time:335038ms step_avg:86.33ms +[2025-08-22 14:59:59] [Rank 0] step:3901/10000 train_time:336833ms step_avg:86.35ms +[2025-08-22 14:59:59] [Rank 0] step:3901/10000 train_time:336833ms step_avg:86.35ms +[2025-08-22 15:00:01] [Rank 0] step:3921/10000 train_time:338631ms step_avg:86.36ms +[2025-08-22 15:00:01] [Rank 0] step:3921/10000 train_time:338631ms step_avg:86.36ms +[2025-08-22 15:00:02] [Rank 0] step:3941/10000 train_time:340431ms step_avg:86.38ms +[2025-08-22 15:00:02] [Rank 0] step:3941/10000 train_time:340431ms step_avg:86.38ms +[2025-08-22 15:00:04] [Rank 0] step:3961/10000 train_time:342267ms step_avg:86.41ms +[2025-08-22 15:00:04] [Rank 0] step:3961/10000 train_time:342267ms step_avg:86.41ms +[2025-08-22 15:00:06] [Rank 0] step:3981/10000 train_time:344065ms step_avg:86.43ms +[2025-08-22 15:00:06] [Rank 0] step:3981/10000 train_time:344065ms step_avg:86.43ms +[2025-08-22 15:00:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:00:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:00:21] [Rank 0] PRINT: step:4000/10000 val_loss:4.0268 svd_entropy: attn_qk:H=0.7479,top10E=0.27,eRank=177.1,q75/q25=77.32 attn_vo:H=0.7840,top10E=0.09,eRank=290.5,q75/q25=inf mlp_w1:H=0.8399,top10E=0.18,eRank=272.0,q75/q25=9.89 mlp_w2:H=0.9108,top10E=0.11,eRank=425.5,q75/q25=6.30 vo_prod:H=0.6255,top10E=0.16,eRank=137.9,q75/q25=inf train_time:345952ms step_avg:86.49ms +[2025-08-22 15:00:21] [Rank 0] PRINT: step:4000/10000 val_loss:4.0268 svd_entropy: attn_qk:H=0.7479,top10E=0.27,eRank=177.1,q75/q25=77.32 attn_vo:H=0.7840,top10E=0.09,eRank=290.5,q75/q25=inf mlp_w1:H=0.8399,top10E=0.18,eRank=272.0,q75/q25=9.89 mlp_w2:H=0.9108,top10E=0.11,eRank=425.5,q75/q25=6.30 vo_prod:H=0.6255,top10E=0.16,eRank=137.9,q75/q25=inf train_time:345952ms step_avg:86.49ms +[2025-08-22 15:00:22] [Rank 0] step:4001/10000 train_time:345968ms step_avg:86.47ms +[2025-08-22 15:00:22] [Rank 0] step:4001/10000 train_time:345968ms step_avg:86.47ms +[2025-08-22 15:00:23] [Rank 0] step:4021/10000 train_time:347678ms step_avg:86.47ms +[2025-08-22 15:00:23] [Rank 0] step:4021/10000 train_time:347678ms step_avg:86.47ms +[2025-08-22 15:00:25] [Rank 0] step:4041/10000 train_time:349468ms step_avg:86.48ms +[2025-08-22 15:00:25] [Rank 0] step:4041/10000 train_time:349468ms step_avg:86.48ms +[2025-08-22 15:00:27] [Rank 0] step:4061/10000 train_time:351261ms step_avg:86.50ms +[2025-08-22 15:00:27] [Rank 0] step:4061/10000 train_time:351261ms step_avg:86.50ms +[2025-08-22 15:00:29] [Rank 0] step:4081/10000 train_time:353222ms step_avg:86.55ms +[2025-08-22 15:00:29] [Rank 0] step:4081/10000 train_time:353222ms step_avg:86.55ms +[2025-08-22 15:00:31] [Rank 0] step:4101/10000 train_time:355015ms step_avg:86.57ms +[2025-08-22 15:00:31] [Rank 0] step:4101/10000 train_time:355015ms step_avg:86.57ms +[2025-08-22 15:00:33] [Rank 0] step:4121/10000 train_time:356808ms step_avg:86.58ms +[2025-08-22 15:00:33] [Rank 0] step:4121/10000 train_time:356808ms step_avg:86.58ms +[2025-08-22 15:00:34] [Rank 0] step:4141/10000 train_time:358600ms step_avg:86.60ms +[2025-08-22 15:00:34] [Rank 0] step:4141/10000 train_time:358600ms step_avg:86.60ms +[2025-08-22 15:00:36] [Rank 0] step:4161/10000 train_time:360392ms step_avg:86.61ms +[2025-08-22 15:00:36] [Rank 0] step:4161/10000 train_time:360392ms step_avg:86.61ms +[2025-08-22 15:00:38] [Rank 0] step:4181/10000 train_time:362188ms step_avg:86.63ms +[2025-08-22 15:00:38] [Rank 0] step:4181/10000 train_time:362188ms step_avg:86.63ms +[2025-08-22 15:00:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:00:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:00:53] [Rank 0] PRINT: step:4200/10000 val_loss:4.0129 svd_entropy: attn_qk:H=0.7502,top10E=0.27,eRank=178.8,q75/q25=78.05 attn_vo:H=0.7859,top10E=0.09,eRank=293.9,q75/q25=inf mlp_w1:H=0.8430,top10E=0.17,eRank=277.4,q75/q25=9.77 mlp_w2:H=0.9127,top10E=0.11,eRank=430.9,q75/q25=6.12 vo_prod:H=0.6285,top10E=0.16,eRank=141.0,q75/q25=inf train_time:364070ms step_avg:86.68ms +[2025-08-22 15:00:53] [Rank 0] PRINT: step:4200/10000 val_loss:4.0129 svd_entropy: attn_qk:H=0.7502,top10E=0.27,eRank=178.8,q75/q25=78.05 attn_vo:H=0.7859,top10E=0.09,eRank=293.9,q75/q25=inf mlp_w1:H=0.8430,top10E=0.17,eRank=277.4,q75/q25=9.77 mlp_w2:H=0.9127,top10E=0.11,eRank=430.9,q75/q25=6.12 vo_prod:H=0.6285,top10E=0.16,eRank=141.0,q75/q25=inf train_time:364070ms step_avg:86.68ms +[2025-08-22 15:00:53] [Rank 0] step:4201/10000 train_time:364086ms step_avg:86.67ms +[2025-08-22 15:00:53] [Rank 0] step:4201/10000 train_time:364086ms step_avg:86.67ms +[2025-08-22 15:00:55] [Rank 0] step:4221/10000 train_time:365783ms step_avg:86.66ms +[2025-08-22 15:00:55] [Rank 0] step:4221/10000 train_time:365783ms step_avg:86.66ms +[2025-08-22 15:00:57] [Rank 0] step:4241/10000 train_time:367575ms step_avg:86.67ms +[2025-08-22 15:00:57] [Rank 0] step:4241/10000 train_time:367575ms step_avg:86.67ms +[2025-08-22 15:00:59] [Rank 0] step:4261/10000 train_time:369367ms step_avg:86.69ms +[2025-08-22 15:00:59] [Rank 0] step:4261/10000 train_time:369367ms step_avg:86.69ms +[2025-08-22 15:01:00] [Rank 0] step:4281/10000 train_time:371158ms step_avg:86.70ms +[2025-08-22 15:01:00] [Rank 0] step:4281/10000 train_time:371158ms step_avg:86.70ms +[2025-08-22 15:01:02] [Rank 0] step:4301/10000 train_time:372951ms step_avg:86.71ms +[2025-08-22 15:01:02] [Rank 0] step:4301/10000 train_time:372951ms step_avg:86.71ms +[2025-08-22 15:01:04] [Rank 0] step:4321/10000 train_time:374745ms step_avg:86.73ms +[2025-08-22 15:01:04] [Rank 0] step:4321/10000 train_time:374745ms step_avg:86.73ms +[2025-08-22 15:01:06] [Rank 0] step:4341/10000 train_time:376535ms step_avg:86.74ms +[2025-08-22 15:01:06] [Rank 0] step:4341/10000 train_time:376535ms step_avg:86.74ms +[2025-08-22 15:01:08] [Rank 0] step:4361/10000 train_time:378328ms step_avg:86.75ms +[2025-08-22 15:01:08] [Rank 0] step:4361/10000 train_time:378328ms step_avg:86.75ms +[2025-08-22 15:01:09] [Rank 0] step:4381/10000 train_time:380176ms step_avg:86.78ms +[2025-08-22 15:01:09] [Rank 0] step:4381/10000 train_time:380176ms step_avg:86.78ms +[2025-08-22 15:01:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:01:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:01:25] [Rank 0] PRINT: step:4400/10000 val_loss:4.0063 svd_entropy: attn_qk:H=0.7522,top10E=0.27,eRank=180.4,q75/q25=78.91 attn_vo:H=0.7877,top10E=0.08,eRank=297.2,q75/q25=inf mlp_w1:H=0.8458,top10E=0.17,eRank=282.5,q75/q25=9.65 mlp_w2:H=0.9144,top10E=0.11,eRank=435.9,q75/q25=5.96 vo_prod:H=0.6314,top10E=0.16,eRank=144.0,q75/q25=inf train_time:382061ms step_avg:86.83ms +[2025-08-22 15:01:25] [Rank 0] PRINT: step:4400/10000 val_loss:4.0063 svd_entropy: attn_qk:H=0.7522,top10E=0.27,eRank=180.4,q75/q25=78.91 attn_vo:H=0.7877,top10E=0.08,eRank=297.2,q75/q25=inf mlp_w1:H=0.8458,top10E=0.17,eRank=282.5,q75/q25=9.65 mlp_w2:H=0.9144,top10E=0.11,eRank=435.9,q75/q25=5.96 vo_prod:H=0.6314,top10E=0.16,eRank=144.0,q75/q25=inf train_time:382061ms step_avg:86.83ms +[2025-08-22 15:01:25] [Rank 0] step:4401/10000 train_time:382076ms step_avg:86.82ms +[2025-08-22 15:01:25] [Rank 0] step:4401/10000 train_time:382076ms step_avg:86.82ms +[2025-08-22 15:01:26] [Rank 0] step:4421/10000 train_time:383796ms step_avg:86.81ms +[2025-08-22 15:01:26] [Rank 0] step:4421/10000 train_time:383796ms step_avg:86.81ms +[2025-08-22 15:01:28] [Rank 0] step:4441/10000 train_time:385586ms step_avg:86.82ms +[2025-08-22 15:01:28] [Rank 0] step:4441/10000 train_time:385586ms step_avg:86.82ms +[2025-08-22 15:01:30] [Rank 0] step:4461/10000 train_time:387382ms step_avg:86.84ms +[2025-08-22 15:01:30] [Rank 0] step:4461/10000 train_time:387382ms step_avg:86.84ms +[2025-08-22 15:01:32] [Rank 0] step:4481/10000 train_time:389181ms step_avg:86.85ms +[2025-08-22 15:01:32] [Rank 0] step:4481/10000 train_time:389181ms step_avg:86.85ms +[2025-08-22 15:01:34] [Rank 0] step:4501/10000 train_time:390979ms step_avg:86.86ms +[2025-08-22 15:01:34] [Rank 0] step:4501/10000 train_time:390979ms step_avg:86.86ms +[2025-08-22 15:01:35] [Rank 0] step:4521/10000 train_time:392778ms step_avg:86.88ms +[2025-08-22 15:01:35] [Rank 0] step:4521/10000 train_time:392778ms step_avg:86.88ms +[2025-08-22 15:01:37] [Rank 0] step:4541/10000 train_time:394578ms step_avg:86.89ms +[2025-08-22 15:01:37] [Rank 0] step:4541/10000 train_time:394578ms step_avg:86.89ms +[2025-08-22 15:01:39] [Rank 0] step:4561/10000 train_time:396377ms step_avg:86.91ms +[2025-08-22 15:01:39] [Rank 0] step:4561/10000 train_time:396377ms step_avg:86.91ms +[2025-08-22 15:01:41] [Rank 0] step:4581/10000 train_time:398179ms step_avg:86.92ms +[2025-08-22 15:01:41] [Rank 0] step:4581/10000 train_time:398179ms step_avg:86.92ms +[2025-08-22 15:01:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:01:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:01:56] [Rank 0] PRINT: step:4600/10000 val_loss:3.9595 svd_entropy: attn_qk:H=0.7543,top10E=0.27,eRank=182.0,q75/q25=79.87 attn_vo:H=0.7893,top10E=0.08,eRank=300.2,q75/q25=inf mlp_w1:H=0.8485,top10E=0.17,eRank=287.4,q75/q25=9.53 mlp_w2:H=0.9160,top10E=0.11,eRank=440.7,q75/q25=5.82 vo_prod:H=0.6340,top10E=0.15,eRank=146.7,q75/q25=inf train_time:400071ms step_avg:86.97ms +[2025-08-22 15:01:56] [Rank 0] PRINT: step:4600/10000 val_loss:3.9595 svd_entropy: attn_qk:H=0.7543,top10E=0.27,eRank=182.0,q75/q25=79.87 attn_vo:H=0.7893,top10E=0.08,eRank=300.2,q75/q25=inf mlp_w1:H=0.8485,top10E=0.17,eRank=287.4,q75/q25=9.53 mlp_w2:H=0.9160,top10E=0.11,eRank=440.7,q75/q25=5.82 vo_prod:H=0.6340,top10E=0.15,eRank=146.7,q75/q25=inf train_time:400071ms step_avg:86.97ms +[2025-08-22 15:01:56] [Rank 0] step:4601/10000 train_time:400086ms step_avg:86.96ms +[2025-08-22 15:01:56] [Rank 0] step:4601/10000 train_time:400086ms step_avg:86.96ms +[2025-08-22 15:01:58] [Rank 0] step:4621/10000 train_time:401818ms step_avg:86.95ms +[2025-08-22 15:01:58] [Rank 0] step:4621/10000 train_time:401818ms step_avg:86.95ms +[2025-08-22 15:02:00] [Rank 0] step:4641/10000 train_time:403624ms step_avg:86.97ms +[2025-08-22 15:02:00] [Rank 0] step:4641/10000 train_time:403624ms step_avg:86.97ms +[2025-08-22 15:02:01] [Rank 0] step:4661/10000 train_time:405423ms step_avg:86.98ms +[2025-08-22 15:02:01] [Rank 0] step:4661/10000 train_time:405423ms step_avg:86.98ms +[2025-08-22 15:02:03] [Rank 0] step:4681/10000 train_time:407225ms step_avg:87.00ms +[2025-08-22 15:02:03] [Rank 0] step:4681/10000 train_time:407225ms step_avg:87.00ms +[2025-08-22 15:02:05] [Rank 0] step:4701/10000 train_time:409027ms step_avg:87.01ms +[2025-08-22 15:02:05] [Rank 0] step:4701/10000 train_time:409027ms step_avg:87.01ms +[2025-08-22 15:02:07] [Rank 0] step:4721/10000 train_time:410828ms step_avg:87.02ms +[2025-08-22 15:02:07] [Rank 0] step:4721/10000 train_time:410828ms step_avg:87.02ms +[2025-08-22 15:02:09] [Rank 0] step:4741/10000 train_time:412633ms step_avg:87.03ms +[2025-08-22 15:02:09] [Rank 0] step:4741/10000 train_time:412633ms step_avg:87.03ms +[2025-08-22 15:02:11] [Rank 0] step:4761/10000 train_time:414484ms step_avg:87.06ms +[2025-08-22 15:02:11] [Rank 0] step:4761/10000 train_time:414484ms step_avg:87.06ms +[2025-08-22 15:02:12] [Rank 0] step:4781/10000 train_time:416288ms step_avg:87.07ms +[2025-08-22 15:02:12] [Rank 0] step:4781/10000 train_time:416288ms step_avg:87.07ms +[2025-08-22 15:02:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:02:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:02:27] [Rank 0] PRINT: step:4800/10000 val_loss:3.9568 svd_entropy: attn_qk:H=0.7561,top10E=0.26,eRank=183.5,q75/q25=80.19 attn_vo:H=0.7908,top10E=0.08,eRank=303.1,q75/q25=inf mlp_w1:H=0.8510,top10E=0.17,eRank=292.1,q75/q25=9.39 mlp_w2:H=0.9175,top10E=0.11,eRank=445.1,q75/q25=5.69 vo_prod:H=0.6364,top10E=0.15,eRank=149.2,q75/q25=inf train_time:418183ms step_avg:87.12ms +[2025-08-22 15:02:27] [Rank 0] PRINT: step:4800/10000 val_loss:3.9568 svd_entropy: attn_qk:H=0.7561,top10E=0.26,eRank=183.5,q75/q25=80.19 attn_vo:H=0.7908,top10E=0.08,eRank=303.1,q75/q25=inf mlp_w1:H=0.8510,top10E=0.17,eRank=292.1,q75/q25=9.39 mlp_w2:H=0.9175,top10E=0.11,eRank=445.1,q75/q25=5.69 vo_prod:H=0.6364,top10E=0.15,eRank=149.2,q75/q25=inf train_time:418183ms step_avg:87.12ms +[2025-08-22 15:02:28] [Rank 0] step:4801/10000 train_time:418198ms step_avg:87.11ms +[2025-08-22 15:02:28] [Rank 0] step:4801/10000 train_time:418198ms step_avg:87.11ms +[2025-08-22 15:02:29] [Rank 0] step:4821/10000 train_time:419931ms step_avg:87.10ms +[2025-08-22 15:02:29] [Rank 0] step:4821/10000 train_time:419931ms step_avg:87.10ms +[2025-08-22 15:02:31] [Rank 0] step:4841/10000 train_time:421730ms step_avg:87.12ms +[2025-08-22 15:02:31] [Rank 0] step:4841/10000 train_time:421730ms step_avg:87.12ms +[2025-08-22 15:02:33] [Rank 0] step:4861/10000 train_time:423530ms step_avg:87.13ms +[2025-08-22 15:02:33] [Rank 0] step:4861/10000 train_time:423530ms step_avg:87.13ms +[2025-08-22 15:02:35] [Rank 0] step:4881/10000 train_time:425328ms step_avg:87.14ms +[2025-08-22 15:02:35] [Rank 0] step:4881/10000 train_time:425328ms step_avg:87.14ms +[2025-08-22 15:02:37] [Rank 0] step:4901/10000 train_time:427128ms step_avg:87.15ms +[2025-08-22 15:02:37] [Rank 0] step:4901/10000 train_time:427128ms step_avg:87.15ms +[2025-08-22 15:02:38] [Rank 0] step:4921/10000 train_time:428929ms step_avg:87.16ms +[2025-08-22 15:02:38] [Rank 0] step:4921/10000 train_time:428929ms step_avg:87.16ms +[2025-08-22 15:02:40] [Rank 0] step:4941/10000 train_time:430733ms step_avg:87.18ms +[2025-08-22 15:02:40] [Rank 0] step:4941/10000 train_time:430733ms step_avg:87.18ms +[2025-08-22 15:02:42] [Rank 0] step:4961/10000 train_time:432534ms step_avg:87.19ms +[2025-08-22 15:02:42] [Rank 0] step:4961/10000 train_time:432534ms step_avg:87.19ms +[2025-08-22 15:02:44] [Rank 0] step:4981/10000 train_time:434337ms step_avg:87.20ms +[2025-08-22 15:02:44] [Rank 0] step:4981/10000 train_time:434337ms step_avg:87.20ms +[2025-08-22 15:02:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:02:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:02:59] [Rank 0] PRINT: step:5000/10000 val_loss:3.9295 svd_entropy: attn_qk:H=0.7578,top10E=0.26,eRank=184.9,q75/q25=80.62 attn_vo:H=0.7922,top10E=0.08,eRank=305.7,q75/q25=inf mlp_w1:H=0.8534,top10E=0.16,eRank=296.7,q75/q25=9.27 mlp_w2:H=0.9188,top10E=0.11,eRank=449.1,q75/q25=5.56 vo_prod:H=0.6386,top10E=0.15,eRank=151.6,q75/q25=inf train_time:436231ms step_avg:87.25ms +[2025-08-22 15:02:59] [Rank 0] PRINT: step:5000/10000 val_loss:3.9295 svd_entropy: attn_qk:H=0.7578,top10E=0.26,eRank=184.9,q75/q25=80.62 attn_vo:H=0.7922,top10E=0.08,eRank=305.7,q75/q25=inf mlp_w1:H=0.8534,top10E=0.16,eRank=296.7,q75/q25=9.27 mlp_w2:H=0.9188,top10E=0.11,eRank=449.1,q75/q25=5.56 vo_prod:H=0.6386,top10E=0.15,eRank=151.6,q75/q25=inf train_time:436231ms step_avg:87.25ms +[2025-08-22 15:02:59] [Rank 0] step:5001/10000 train_time:436247ms step_avg:87.23ms +[2025-08-22 15:02:59] [Rank 0] step:5001/10000 train_time:436247ms step_avg:87.23ms +[2025-08-22 15:03:01] [Rank 0] step:5021/10000 train_time:437954ms step_avg:87.22ms +[2025-08-22 15:03:01] [Rank 0] step:5021/10000 train_time:437954ms step_avg:87.22ms +[2025-08-22 15:03:03] [Rank 0] step:5041/10000 train_time:439753ms step_avg:87.24ms +[2025-08-22 15:03:03] [Rank 0] step:5041/10000 train_time:439753ms step_avg:87.24ms +[2025-08-22 15:03:05] [Rank 0] step:5061/10000 train_time:441550ms step_avg:87.25ms +[2025-08-22 15:03:05] [Rank 0] step:5061/10000 train_time:441550ms step_avg:87.25ms +[2025-08-22 15:03:06] [Rank 0] step:5081/10000 train_time:443350ms step_avg:87.26ms +[2025-08-22 15:03:06] [Rank 0] step:5081/10000 train_time:443350ms step_avg:87.26ms +[2025-08-22 15:03:08] [Rank 0] step:5101/10000 train_time:445149ms step_avg:87.27ms +[2025-08-22 15:03:08] [Rank 0] step:5101/10000 train_time:445149ms step_avg:87.27ms +[2025-08-22 15:03:10] [Rank 0] step:5121/10000 train_time:446950ms step_avg:87.28ms +[2025-08-22 15:03:10] [Rank 0] step:5121/10000 train_time:446950ms step_avg:87.28ms +[2025-08-22 15:03:12] [Rank 0] step:5141/10000 train_time:448757ms step_avg:87.29ms +[2025-08-22 15:03:12] [Rank 0] step:5141/10000 train_time:448757ms step_avg:87.29ms +[2025-08-22 15:03:14] [Rank 0] step:5161/10000 train_time:450557ms step_avg:87.30ms +[2025-08-22 15:03:14] [Rank 0] step:5161/10000 train_time:450557ms step_avg:87.30ms +[2025-08-22 15:03:15] [Rank 0] step:5181/10000 train_time:452361ms step_avg:87.31ms +[2025-08-22 15:03:15] [Rank 0] step:5181/10000 train_time:452361ms step_avg:87.31ms +[2025-08-22 15:03:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:03:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:03:31] [Rank 0] PRINT: step:5200/10000 val_loss:3.9109 svd_entropy: attn_qk:H=0.7595,top10E=0.26,eRank=186.3,q75/q25=80.90 attn_vo:H=0.7935,top10E=0.08,eRank=308.3,q75/q25=inf mlp_w1:H=0.8555,top10E=0.16,eRank=300.9,q75/q25=9.17 mlp_w2:H=0.9201,top10E=0.11,eRank=452.8,q75/q25=5.44 vo_prod:H=0.6407,top10E=0.15,eRank=153.7,q75/q25=inf train_time:454279ms step_avg:87.36ms +[2025-08-22 15:03:31] [Rank 0] PRINT: step:5200/10000 val_loss:3.9109 svd_entropy: attn_qk:H=0.7595,top10E=0.26,eRank=186.3,q75/q25=80.90 attn_vo:H=0.7935,top10E=0.08,eRank=308.3,q75/q25=inf mlp_w1:H=0.8555,top10E=0.16,eRank=300.9,q75/q25=9.17 mlp_w2:H=0.9201,top10E=0.11,eRank=452.8,q75/q25=5.44 vo_prod:H=0.6407,top10E=0.15,eRank=153.7,q75/q25=inf train_time:454279ms step_avg:87.36ms +[2025-08-22 15:03:31] [Rank 0] step:5201/10000 train_time:454294ms step_avg:87.35ms +[2025-08-22 15:03:31] [Rank 0] step:5201/10000 train_time:454294ms step_avg:87.35ms +[2025-08-22 15:03:33] [Rank 0] step:5221/10000 train_time:456030ms step_avg:87.35ms +[2025-08-22 15:03:33] [Rank 0] step:5221/10000 train_time:456030ms step_avg:87.35ms +[2025-08-22 15:03:35] [Rank 0] step:5241/10000 train_time:457865ms step_avg:87.36ms +[2025-08-22 15:03:35] [Rank 0] step:5241/10000 train_time:457865ms step_avg:87.36ms +[2025-08-22 15:03:36] [Rank 0] step:5261/10000 train_time:459696ms step_avg:87.38ms +[2025-08-22 15:03:36] [Rank 0] step:5261/10000 train_time:459696ms step_avg:87.38ms +[2025-08-22 15:03:38] [Rank 0] step:5281/10000 train_time:461528ms step_avg:87.39ms +[2025-08-22 15:03:38] [Rank 0] step:5281/10000 train_time:461528ms step_avg:87.39ms +[2025-08-22 15:03:40] [Rank 0] step:5301/10000 train_time:463371ms step_avg:87.41ms +[2025-08-22 15:03:40] [Rank 0] step:5301/10000 train_time:463371ms step_avg:87.41ms +[2025-08-22 15:03:42] [Rank 0] step:5321/10000 train_time:465206ms step_avg:87.43ms +[2025-08-22 15:03:42] [Rank 0] step:5321/10000 train_time:465206ms step_avg:87.43ms +[2025-08-22 15:03:44] [Rank 0] step:5341/10000 train_time:467041ms step_avg:87.44ms +[2025-08-22 15:03:44] [Rank 0] step:5341/10000 train_time:467041ms step_avg:87.44ms +[2025-08-22 15:03:46] [Rank 0] step:5361/10000 train_time:468878ms step_avg:87.46ms +[2025-08-22 15:03:46] [Rank 0] step:5361/10000 train_time:468878ms step_avg:87.46ms +[2025-08-22 15:03:47] [Rank 0] step:5381/10000 train_time:470714ms step_avg:87.48ms +[2025-08-22 15:03:47] [Rank 0] step:5381/10000 train_time:470714ms step_avg:87.48ms +[2025-08-22 15:03:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:03:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:04:03] [Rank 0] PRINT: step:5400/10000 val_loss:3.8965 svd_entropy: attn_qk:H=0.7611,top10E=0.26,eRank=187.6,q75/q25=80.54 attn_vo:H=0.7946,top10E=0.08,eRank=310.6,q75/q25=inf mlp_w1:H=0.8575,top10E=0.16,eRank=304.9,q75/q25=9.05 mlp_w2:H=0.9212,top10E=0.10,eRank=456.2,q75/q25=5.34 vo_prod:H=0.6425,top10E=0.14,eRank=155.8,q75/q25=inf train_time:472638ms step_avg:87.53ms +[2025-08-22 15:04:03] [Rank 0] PRINT: step:5400/10000 val_loss:3.8965 svd_entropy: attn_qk:H=0.7611,top10E=0.26,eRank=187.6,q75/q25=80.54 attn_vo:H=0.7946,top10E=0.08,eRank=310.6,q75/q25=inf mlp_w1:H=0.8575,top10E=0.16,eRank=304.9,q75/q25=9.05 mlp_w2:H=0.9212,top10E=0.10,eRank=456.2,q75/q25=5.34 vo_prod:H=0.6425,top10E=0.14,eRank=155.8,q75/q25=inf train_time:472638ms step_avg:87.53ms +[2025-08-22 15:04:03] [Rank 0] step:5401/10000 train_time:472653ms step_avg:87.51ms +[2025-08-22 15:04:03] [Rank 0] step:5401/10000 train_time:472653ms step_avg:87.51ms +[2025-08-22 15:04:05] [Rank 0] step:5421/10000 train_time:474407ms step_avg:87.51ms +[2025-08-22 15:04:05] [Rank 0] step:5421/10000 train_time:474407ms step_avg:87.51ms +[2025-08-22 15:04:06] [Rank 0] step:5441/10000 train_time:476237ms step_avg:87.53ms +[2025-08-22 15:04:06] [Rank 0] step:5441/10000 train_time:476237ms step_avg:87.53ms +[2025-08-22 15:04:08] [Rank 0] step:5461/10000 train_time:478072ms step_avg:87.54ms +[2025-08-22 15:04:08] [Rank 0] step:5461/10000 train_time:478072ms step_avg:87.54ms +[2025-08-22 15:04:10] [Rank 0] step:5481/10000 train_time:479903ms step_avg:87.56ms +[2025-08-22 15:04:10] [Rank 0] step:5481/10000 train_time:479903ms step_avg:87.56ms +[2025-08-22 15:04:12] [Rank 0] step:5501/10000 train_time:481741ms step_avg:87.57ms +[2025-08-22 15:04:12] [Rank 0] step:5501/10000 train_time:481741ms step_avg:87.57ms +[2025-08-22 15:04:14] [Rank 0] step:5521/10000 train_time:483581ms step_avg:87.59ms +[2025-08-22 15:04:14] [Rank 0] step:5521/10000 train_time:483581ms step_avg:87.59ms +[2025-08-22 15:04:16] [Rank 0] step:5541/10000 train_time:485415ms step_avg:87.60ms +[2025-08-22 15:04:16] [Rank 0] step:5541/10000 train_time:485415ms step_avg:87.60ms +[2025-08-22 15:04:17] [Rank 0] step:5561/10000 train_time:487250ms step_avg:87.62ms +[2025-08-22 15:04:17] [Rank 0] step:5561/10000 train_time:487250ms step_avg:87.62ms +[2025-08-22 15:04:19] [Rank 0] step:5581/10000 train_time:489085ms step_avg:87.63ms +[2025-08-22 15:04:19] [Rank 0] step:5581/10000 train_time:489085ms step_avg:87.63ms +[2025-08-22 15:04:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:04:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:04:35] [Rank 0] PRINT: step:5600/10000 val_loss:3.9009 svd_entropy: attn_qk:H=0.7626,top10E=0.25,eRank=188.9,q75/q25=81.35 attn_vo:H=0.7958,top10E=0.08,eRank=312.8,q75/q25=inf mlp_w1:H=0.8594,top10E=0.16,eRank=308.9,q75/q25=8.94 mlp_w2:H=0.9222,top10E=0.10,eRank=459.3,q75/q25=5.24 vo_prod:H=0.6443,top10E=0.14,eRank=157.7,q75/q25=inf train_time:491015ms step_avg:87.68ms +[2025-08-22 15:04:35] [Rank 0] PRINT: step:5600/10000 val_loss:3.9009 svd_entropy: attn_qk:H=0.7626,top10E=0.25,eRank=188.9,q75/q25=81.35 attn_vo:H=0.7958,top10E=0.08,eRank=312.8,q75/q25=inf mlp_w1:H=0.8594,top10E=0.16,eRank=308.9,q75/q25=8.94 mlp_w2:H=0.9222,top10E=0.10,eRank=459.3,q75/q25=5.24 vo_prod:H=0.6443,top10E=0.14,eRank=157.7,q75/q25=inf train_time:491015ms step_avg:87.68ms +[2025-08-22 15:04:35] [Rank 0] step:5601/10000 train_time:491030ms step_avg:87.67ms +[2025-08-22 15:04:35] [Rank 0] step:5601/10000 train_time:491030ms step_avg:87.67ms +[2025-08-22 15:04:37] [Rank 0] step:5621/10000 train_time:492773ms step_avg:87.67ms +[2025-08-22 15:04:37] [Rank 0] step:5621/10000 train_time:492773ms step_avg:87.67ms +[2025-08-22 15:04:38] [Rank 0] step:5641/10000 train_time:494603ms step_avg:87.68ms +[2025-08-22 15:04:38] [Rank 0] step:5641/10000 train_time:494603ms step_avg:87.68ms +[2025-08-22 15:04:40] [Rank 0] step:5661/10000 train_time:496431ms step_avg:87.69ms +[2025-08-22 15:04:40] [Rank 0] step:5661/10000 train_time:496431ms step_avg:87.69ms +[2025-08-22 15:04:42] [Rank 0] step:5681/10000 train_time:498264ms step_avg:87.71ms +[2025-08-22 15:04:42] [Rank 0] step:5681/10000 train_time:498264ms step_avg:87.71ms +[2025-08-22 15:04:44] [Rank 0] step:5701/10000 train_time:500097ms step_avg:87.72ms +[2025-08-22 15:04:44] [Rank 0] step:5701/10000 train_time:500097ms step_avg:87.72ms +[2025-08-22 15:04:46] [Rank 0] step:5721/10000 train_time:501932ms step_avg:87.74ms +[2025-08-22 15:04:46] [Rank 0] step:5721/10000 train_time:501932ms step_avg:87.74ms +[2025-08-22 15:04:48] [Rank 0] step:5741/10000 train_time:503764ms step_avg:87.75ms +[2025-08-22 15:04:48] [Rank 0] step:5741/10000 train_time:503764ms step_avg:87.75ms +[2025-08-22 15:04:49] [Rank 0] step:5761/10000 train_time:505598ms step_avg:87.76ms +[2025-08-22 15:04:49] [Rank 0] step:5761/10000 train_time:505598ms step_avg:87.76ms +[2025-08-22 15:04:51] [Rank 0] step:5781/10000 train_time:507430ms step_avg:87.78ms +[2025-08-22 15:04:51] [Rank 0] step:5781/10000 train_time:507430ms step_avg:87.78ms +[2025-08-22 15:04:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:04:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:05:07] [Rank 0] PRINT: step:5800/10000 val_loss:3.8785 svd_entropy: attn_qk:H=0.7641,top10E=0.25,eRank=190.2,q75/q25=80.90 attn_vo:H=0.7968,top10E=0.08,eRank=314.9,q75/q25=inf mlp_w1:H=0.8613,top10E=0.16,eRank=312.7,q75/q25=8.83 mlp_w2:H=0.9232,top10E=0.10,eRank=462.4,q75/q25=5.15 vo_prod:H=0.6460,top10E=0.14,eRank=159.5,q75/q25=inf train_time:509354ms step_avg:87.82ms +[2025-08-22 15:05:07] [Rank 0] PRINT: step:5800/10000 val_loss:3.8785 svd_entropy: attn_qk:H=0.7641,top10E=0.25,eRank=190.2,q75/q25=80.90 attn_vo:H=0.7968,top10E=0.08,eRank=314.9,q75/q25=inf mlp_w1:H=0.8613,top10E=0.16,eRank=312.7,q75/q25=8.83 mlp_w2:H=0.9232,top10E=0.10,eRank=462.4,q75/q25=5.15 vo_prod:H=0.6460,top10E=0.14,eRank=159.5,q75/q25=inf train_time:509354ms step_avg:87.82ms +[2025-08-22 15:05:07] [Rank 0] step:5801/10000 train_time:509369ms step_avg:87.81ms +[2025-08-22 15:05:07] [Rank 0] step:5801/10000 train_time:509369ms step_avg:87.81ms +[2025-08-22 15:05:09] [Rank 0] step:5821/10000 train_time:511114ms step_avg:87.81ms +[2025-08-22 15:05:09] [Rank 0] step:5821/10000 train_time:511114ms step_avg:87.81ms +[2025-08-22 15:05:10] [Rank 0] step:5841/10000 train_time:512943ms step_avg:87.82ms +[2025-08-22 15:05:10] [Rank 0] step:5841/10000 train_time:512943ms step_avg:87.82ms +[2025-08-22 15:05:12] [Rank 0] step:5861/10000 train_time:514778ms step_avg:87.83ms +[2025-08-22 15:05:12] [Rank 0] step:5861/10000 train_time:514778ms step_avg:87.83ms +[2025-08-22 15:05:14] [Rank 0] step:5881/10000 train_time:516610ms step_avg:87.84ms +[2025-08-22 15:05:14] [Rank 0] step:5881/10000 train_time:516610ms step_avg:87.84ms +[2025-08-22 15:05:16] [Rank 0] step:5901/10000 train_time:518441ms step_avg:87.86ms +[2025-08-22 15:05:16] [Rank 0] step:5901/10000 train_time:518441ms step_avg:87.86ms +[2025-08-22 15:05:18] [Rank 0] step:5921/10000 train_time:520274ms step_avg:87.87ms +[2025-08-22 15:05:18] [Rank 0] step:5921/10000 train_time:520274ms step_avg:87.87ms +[2025-08-22 15:05:20] [Rank 0] step:5941/10000 train_time:522196ms step_avg:87.90ms +[2025-08-22 15:05:20] [Rank 0] step:5941/10000 train_time:522196ms step_avg:87.90ms +[2025-08-22 15:05:21] [Rank 0] step:5961/10000 train_time:523967ms step_avg:87.90ms +[2025-08-22 15:05:21] [Rank 0] step:5961/10000 train_time:523967ms step_avg:87.90ms +[2025-08-22 15:05:23] [Rank 0] step:5981/10000 train_time:525846ms step_avg:87.92ms +[2025-08-22 15:05:23] [Rank 0] step:5981/10000 train_time:525846ms step_avg:87.92ms +[2025-08-22 15:05:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:05:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:05:39] [Rank 0] PRINT: step:6000/10000 val_loss:3.8531 svd_entropy: attn_qk:H=0.7656,top10E=0.25,eRank=191.4,q75/q25=81.10 attn_vo:H=0.7978,top10E=0.08,eRank=316.9,q75/q25=inf mlp_w1:H=0.8630,top10E=0.16,eRank=316.3,q75/q25=8.73 mlp_w2:H=0.9241,top10E=0.10,eRank=465.3,q75/q25=5.09 vo_prod:H=0.6475,top10E=0.14,eRank=161.3,q75/q25=inf train_time:527772ms step_avg:87.96ms +[2025-08-22 15:05:39] [Rank 0] PRINT: step:6000/10000 val_loss:3.8531 svd_entropy: attn_qk:H=0.7656,top10E=0.25,eRank=191.4,q75/q25=81.10 attn_vo:H=0.7978,top10E=0.08,eRank=316.9,q75/q25=inf mlp_w1:H=0.8630,top10E=0.16,eRank=316.3,q75/q25=8.73 mlp_w2:H=0.9241,top10E=0.10,eRank=465.3,q75/q25=5.09 vo_prod:H=0.6475,top10E=0.14,eRank=161.3,q75/q25=inf train_time:527772ms step_avg:87.96ms +[2025-08-22 15:05:39] [Rank 0] step:6001/10000 train_time:527787ms step_avg:87.95ms +[2025-08-22 15:05:39] [Rank 0] step:6001/10000 train_time:527787ms step_avg:87.95ms +[2025-08-22 15:05:41] [Rank 0] step:6021/10000 train_time:529542ms step_avg:87.95ms +[2025-08-22 15:05:41] [Rank 0] step:6021/10000 train_time:529542ms step_avg:87.95ms +[2025-08-22 15:05:43] [Rank 0] step:6041/10000 train_time:531379ms step_avg:87.96ms +[2025-08-22 15:05:43] [Rank 0] step:6041/10000 train_time:531379ms step_avg:87.96ms +[2025-08-22 15:05:44] [Rank 0] step:6061/10000 train_time:533224ms step_avg:87.98ms +[2025-08-22 15:05:44] [Rank 0] step:6061/10000 train_time:533224ms step_avg:87.98ms +[2025-08-22 15:05:46] [Rank 0] step:6081/10000 train_time:535060ms step_avg:87.99ms +[2025-08-22 15:05:46] [Rank 0] step:6081/10000 train_time:535060ms step_avg:87.99ms +[2025-08-22 15:05:48] [Rank 0] step:6101/10000 train_time:536905ms step_avg:88.00ms +[2025-08-22 15:05:48] [Rank 0] step:6101/10000 train_time:536905ms step_avg:88.00ms +[2025-08-22 15:05:50] [Rank 0] step:6121/10000 train_time:539016ms step_avg:88.06ms +[2025-08-22 15:05:50] [Rank 0] step:6121/10000 train_time:539016ms step_avg:88.06ms +[2025-08-22 15:05:52] [Rank 0] step:6141/10000 train_time:540867ms step_avg:88.07ms +[2025-08-22 15:05:52] [Rank 0] step:6141/10000 train_time:540867ms step_avg:88.07ms +[2025-08-22 15:05:54] [Rank 0] step:6161/10000 train_time:542709ms step_avg:88.09ms +[2025-08-22 15:05:54] [Rank 0] step:6161/10000 train_time:542709ms step_avg:88.09ms +[2025-08-22 15:05:56] [Rank 0] step:6181/10000 train_time:544548ms step_avg:88.10ms +[2025-08-22 15:05:56] [Rank 0] step:6181/10000 train_time:544548ms step_avg:88.10ms +[2025-08-22 15:05:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:05:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:06:11] [Rank 0] PRINT: step:6200/10000 val_loss:3.8503 svd_entropy: attn_qk:H=0.7668,top10E=0.25,eRank=192.5,q75/q25=80.97 attn_vo:H=0.7988,top10E=0.08,eRank=318.8,q75/q25=inf mlp_w1:H=0.8646,top10E=0.15,eRank=319.7,q75/q25=8.62 mlp_w2:H=0.9251,top10E=0.10,eRank=468.4,q75/q25=5.01 vo_prod:H=0.6490,top10E=0.14,eRank=163.1,q75/q25=inf train_time:546484ms step_avg:88.14ms +[2025-08-22 15:06:11] [Rank 0] PRINT: step:6200/10000 val_loss:3.8503 svd_entropy: attn_qk:H=0.7668,top10E=0.25,eRank=192.5,q75/q25=80.97 attn_vo:H=0.7988,top10E=0.08,eRank=318.8,q75/q25=inf mlp_w1:H=0.8646,top10E=0.15,eRank=319.7,q75/q25=8.62 mlp_w2:H=0.9251,top10E=0.10,eRank=468.4,q75/q25=5.01 vo_prod:H=0.6490,top10E=0.14,eRank=163.1,q75/q25=inf train_time:546484ms step_avg:88.14ms +[2025-08-22 15:06:11] [Rank 0] step:6201/10000 train_time:546500ms step_avg:88.13ms +[2025-08-22 15:06:11] [Rank 0] step:6201/10000 train_time:546500ms step_avg:88.13ms +[2025-08-22 15:06:13] [Rank 0] step:6221/10000 train_time:548252ms step_avg:88.13ms +[2025-08-22 15:06:13] [Rank 0] step:6221/10000 train_time:548252ms step_avg:88.13ms +[2025-08-22 15:06:15] [Rank 0] step:6241/10000 train_time:550083ms step_avg:88.14ms +[2025-08-22 15:06:15] [Rank 0] step:6241/10000 train_time:550083ms step_avg:88.14ms +[2025-08-22 15:06:17] [Rank 0] step:6261/10000 train_time:551921ms step_avg:88.15ms +[2025-08-22 15:06:17] [Rank 0] step:6261/10000 train_time:551921ms step_avg:88.15ms +[2025-08-22 15:06:19] [Rank 0] step:6281/10000 train_time:553762ms step_avg:88.16ms +[2025-08-22 15:06:19] [Rank 0] step:6281/10000 train_time:553762ms step_avg:88.16ms +[2025-08-22 15:06:20] [Rank 0] step:6301/10000 train_time:555600ms step_avg:88.18ms +[2025-08-22 15:06:20] [Rank 0] step:6301/10000 train_time:555600ms step_avg:88.18ms +[2025-08-22 15:06:22] [Rank 0] step:6321/10000 train_time:557435ms step_avg:88.19ms +[2025-08-22 15:06:22] [Rank 0] step:6321/10000 train_time:557435ms step_avg:88.19ms +[2025-08-22 15:06:24] [Rank 0] step:6341/10000 train_time:559277ms step_avg:88.20ms +[2025-08-22 15:06:24] [Rank 0] step:6341/10000 train_time:559277ms step_avg:88.20ms +[2025-08-22 15:06:26] [Rank 0] step:6361/10000 train_time:561120ms step_avg:88.21ms +[2025-08-22 15:06:26] [Rank 0] step:6361/10000 train_time:561120ms step_avg:88.21ms +[2025-08-22 15:06:28] [Rank 0] step:6381/10000 train_time:562961ms step_avg:88.22ms +[2025-08-22 15:06:28] [Rank 0] step:6381/10000 train_time:562961ms step_avg:88.22ms +[2025-08-22 15:06:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:06:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:06:43] [Rank 0] PRINT: step:6400/10000 val_loss:3.8229 svd_entropy: attn_qk:H=0.7680,top10E=0.25,eRank=193.6,q75/q25=81.05 attn_vo:H=0.7996,top10E=0.08,eRank=320.5,q75/q25=inf mlp_w1:H=0.8661,top10E=0.15,eRank=322.8,q75/q25=8.53 mlp_w2:H=0.9258,top10E=0.10,eRank=470.6,q75/q25=4.95 vo_prod:H=0.6503,top10E=0.14,eRank=164.5,q75/q25=inf train_time:564888ms step_avg:88.26ms +[2025-08-22 15:06:43] [Rank 0] PRINT: step:6400/10000 val_loss:3.8229 svd_entropy: attn_qk:H=0.7680,top10E=0.25,eRank=193.6,q75/q25=81.05 attn_vo:H=0.7996,top10E=0.08,eRank=320.5,q75/q25=inf mlp_w1:H=0.8661,top10E=0.15,eRank=322.8,q75/q25=8.53 mlp_w2:H=0.9258,top10E=0.10,eRank=470.6,q75/q25=4.95 vo_prod:H=0.6503,top10E=0.14,eRank=164.5,q75/q25=inf train_time:564888ms step_avg:88.26ms +[2025-08-22 15:06:43] [Rank 0] step:6401/10000 train_time:564903ms step_avg:88.25ms +[2025-08-22 15:06:43] [Rank 0] step:6401/10000 train_time:564903ms step_avg:88.25ms +[2025-08-22 15:06:45] [Rank 0] step:6421/10000 train_time:566658ms step_avg:88.25ms +[2025-08-22 15:06:45] [Rank 0] step:6421/10000 train_time:566658ms step_avg:88.25ms +[2025-08-22 15:06:47] [Rank 0] step:6441/10000 train_time:568491ms step_avg:88.26ms +[2025-08-22 15:06:47] [Rank 0] step:6441/10000 train_time:568491ms step_avg:88.26ms +[2025-08-22 15:06:49] [Rank 0] step:6461/10000 train_time:570335ms step_avg:88.27ms +[2025-08-22 15:06:49] [Rank 0] step:6461/10000 train_time:570335ms step_avg:88.27ms +[2025-08-22 15:06:51] [Rank 0] step:6481/10000 train_time:572182ms step_avg:88.29ms +[2025-08-22 15:06:51] [Rank 0] step:6481/10000 train_time:572182ms step_avg:88.29ms +[2025-08-22 15:06:52] [Rank 0] step:6501/10000 train_time:574019ms step_avg:88.30ms +[2025-08-22 15:06:52] [Rank 0] step:6501/10000 train_time:574019ms step_avg:88.30ms +[2025-08-22 15:06:54] [Rank 0] step:6521/10000 train_time:575853ms step_avg:88.31ms +[2025-08-22 15:06:54] [Rank 0] step:6521/10000 train_time:575853ms step_avg:88.31ms +[2025-08-22 15:06:56] [Rank 0] step:6541/10000 train_time:577692ms step_avg:88.32ms +[2025-08-22 15:06:56] [Rank 0] step:6541/10000 train_time:577692ms step_avg:88.32ms +[2025-08-22 15:06:58] [Rank 0] step:6561/10000 train_time:579532ms step_avg:88.33ms +[2025-08-22 15:06:58] [Rank 0] step:6561/10000 train_time:579532ms step_avg:88.33ms +[2025-08-22 15:07:00] [Rank 0] step:6581/10000 train_time:581367ms step_avg:88.34ms +[2025-08-22 15:07:00] [Rank 0] step:6581/10000 train_time:581367ms step_avg:88.34ms +[2025-08-22 15:07:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:07:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:07:15] [Rank 0] PRINT: step:6600/10000 val_loss:3.8093 svd_entropy: attn_qk:H=0.7692,top10E=0.25,eRank=194.6,q75/q25=81.40 attn_vo:H=0.8004,top10E=0.08,eRank=322.1,q75/q25=inf mlp_w1:H=0.8674,top10E=0.15,eRank=325.6,q75/q25=8.47 mlp_w2:H=0.9265,top10E=0.10,eRank=472.5,q75/q25=4.90 vo_prod:H=0.6514,top10E=0.14,eRank=165.8,q75/q25=inf train_time:583298ms step_avg:88.38ms +[2025-08-22 15:07:15] [Rank 0] PRINT: step:6600/10000 val_loss:3.8093 svd_entropy: attn_qk:H=0.7692,top10E=0.25,eRank=194.6,q75/q25=81.40 attn_vo:H=0.8004,top10E=0.08,eRank=322.1,q75/q25=inf mlp_w1:H=0.8674,top10E=0.15,eRank=325.6,q75/q25=8.47 mlp_w2:H=0.9265,top10E=0.10,eRank=472.5,q75/q25=4.90 vo_prod:H=0.6514,top10E=0.14,eRank=165.8,q75/q25=inf train_time:583298ms step_avg:88.38ms +[2025-08-22 15:07:15] [Rank 0] step:6601/10000 train_time:583313ms step_avg:88.37ms +[2025-08-22 15:07:15] [Rank 0] step:6601/10000 train_time:583313ms step_avg:88.37ms +[2025-08-22 15:07:17] [Rank 0] step:6621/10000 train_time:585056ms step_avg:88.36ms +[2025-08-22 15:07:17] [Rank 0] step:6621/10000 train_time:585056ms step_avg:88.36ms +[2025-08-22 15:07:19] [Rank 0] step:6641/10000 train_time:586898ms step_avg:88.37ms +[2025-08-22 15:07:19] [Rank 0] step:6641/10000 train_time:586898ms step_avg:88.37ms +[2025-08-22 15:07:21] [Rank 0] step:6661/10000 train_time:588734ms step_avg:88.39ms +[2025-08-22 15:07:21] [Rank 0] step:6661/10000 train_time:588734ms step_avg:88.39ms +[2025-08-22 15:07:23] [Rank 0] step:6681/10000 train_time:590588ms step_avg:88.40ms +[2025-08-22 15:07:23] [Rank 0] step:6681/10000 train_time:590588ms step_avg:88.40ms +[2025-08-22 15:07:24] [Rank 0] step:6701/10000 train_time:592461ms step_avg:88.41ms +[2025-08-22 15:07:24] [Rank 0] step:6701/10000 train_time:592461ms step_avg:88.41ms +[2025-08-22 15:07:26] [Rank 0] step:6721/10000 train_time:594331ms step_avg:88.43ms +[2025-08-22 15:07:26] [Rank 0] step:6721/10000 train_time:594331ms step_avg:88.43ms +[2025-08-22 15:07:28] [Rank 0] step:6741/10000 train_time:596198ms step_avg:88.44ms +[2025-08-22 15:07:28] [Rank 0] step:6741/10000 train_time:596198ms step_avg:88.44ms +[2025-08-22 15:07:30] [Rank 0] step:6761/10000 train_time:598060ms step_avg:88.46ms +[2025-08-22 15:07:30] [Rank 0] step:6761/10000 train_time:598060ms step_avg:88.46ms +[2025-08-22 15:07:32] [Rank 0] step:6781/10000 train_time:599930ms step_avg:88.47ms +[2025-08-22 15:07:32] [Rank 0] step:6781/10000 train_time:599930ms step_avg:88.47ms +[2025-08-22 15:07:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:07:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:07:48] [Rank 0] PRINT: step:6800/10000 val_loss:3.7958 svd_entropy: attn_qk:H=0.7701,top10E=0.24,eRank=195.5,q75/q25=81.20 attn_vo:H=0.8011,top10E=0.07,eRank=323.5,q75/q25=inf mlp_w1:H=0.8685,top10E=0.15,eRank=328.1,q75/q25=8.38 mlp_w2:H=0.9270,top10E=0.10,eRank=474.3,q75/q25=4.86 vo_prod:H=0.6524,top10E=0.14,eRank=167.0,q75/q25=inf train_time:601898ms step_avg:88.51ms +[2025-08-22 15:07:48] [Rank 0] PRINT: step:6800/10000 val_loss:3.7958 svd_entropy: attn_qk:H=0.7701,top10E=0.24,eRank=195.5,q75/q25=81.20 attn_vo:H=0.8011,top10E=0.07,eRank=323.5,q75/q25=inf mlp_w1:H=0.8685,top10E=0.15,eRank=328.1,q75/q25=8.38 mlp_w2:H=0.9270,top10E=0.10,eRank=474.3,q75/q25=4.86 vo_prod:H=0.6524,top10E=0.14,eRank=167.0,q75/q25=inf train_time:601898ms step_avg:88.51ms +[2025-08-22 15:07:48] [Rank 0] step:6801/10000 train_time:601913ms step_avg:88.50ms +[2025-08-22 15:07:48] [Rank 0] step:6801/10000 train_time:601913ms step_avg:88.50ms +[2025-08-22 15:07:50] [Rank 0] step:6821/10000 train_time:603682ms step_avg:88.50ms +[2025-08-22 15:07:50] [Rank 0] step:6821/10000 train_time:603682ms step_avg:88.50ms +[2025-08-22 15:07:51] [Rank 0] step:6841/10000 train_time:605547ms step_avg:88.52ms +[2025-08-22 15:07:51] [Rank 0] step:6841/10000 train_time:605547ms step_avg:88.52ms +[2025-08-22 15:07:53] [Rank 0] step:6861/10000 train_time:607410ms step_avg:88.53ms +[2025-08-22 15:07:53] [Rank 0] step:6861/10000 train_time:607410ms step_avg:88.53ms +[2025-08-22 15:07:55] [Rank 0] step:6881/10000 train_time:609279ms step_avg:88.55ms +[2025-08-22 15:07:55] [Rank 0] step:6881/10000 train_time:609279ms step_avg:88.55ms +[2025-08-22 15:07:57] [Rank 0] step:6901/10000 train_time:611147ms step_avg:88.56ms +[2025-08-22 15:07:57] [Rank 0] step:6901/10000 train_time:611147ms step_avg:88.56ms +[2025-08-22 15:07:59] [Rank 0] step:6921/10000 train_time:613015ms step_avg:88.57ms +[2025-08-22 15:07:59] [Rank 0] step:6921/10000 train_time:613015ms step_avg:88.57ms +[2025-08-22 15:08:01] [Rank 0] step:6941/10000 train_time:614889ms step_avg:88.59ms +[2025-08-22 15:08:01] [Rank 0] step:6941/10000 train_time:614889ms step_avg:88.59ms +[2025-08-22 15:08:03] [Rank 0] step:6961/10000 train_time:616774ms step_avg:88.60ms +[2025-08-22 15:08:03] [Rank 0] step:6961/10000 train_time:616774ms step_avg:88.60ms +[2025-08-22 15:08:04] [Rank 0] step:6981/10000 train_time:618648ms step_avg:88.62ms +[2025-08-22 15:08:04] [Rank 0] step:6981/10000 train_time:618648ms step_avg:88.62ms +[2025-08-22 15:08:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:08:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:08:20] [Rank 0] PRINT: step:7000/10000 val_loss:3.7749 svd_entropy: attn_qk:H=0.7711,top10E=0.24,eRank=196.3,q75/q25=81.21 attn_vo:H=0.8017,top10E=0.07,eRank=324.8,q75/q25=inf mlp_w1:H=0.8696,top10E=0.15,eRank=330.5,q75/q25=8.32 mlp_w2:H=0.9275,top10E=0.10,eRank=475.8,q75/q25=4.81 vo_prod:H=0.6534,top10E=0.13,eRank=168.1,q75/q25=inf train_time:620618ms step_avg:88.66ms +[2025-08-22 15:08:20] [Rank 0] PRINT: step:7000/10000 val_loss:3.7749 svd_entropy: attn_qk:H=0.7711,top10E=0.24,eRank=196.3,q75/q25=81.21 attn_vo:H=0.8017,top10E=0.07,eRank=324.8,q75/q25=inf mlp_w1:H=0.8696,top10E=0.15,eRank=330.5,q75/q25=8.32 mlp_w2:H=0.9275,top10E=0.10,eRank=475.8,q75/q25=4.81 vo_prod:H=0.6534,top10E=0.13,eRank=168.1,q75/q25=inf train_time:620618ms step_avg:88.66ms +[2025-08-22 15:08:20] [Rank 0] step:7001/10000 train_time:620633ms step_avg:88.65ms +[2025-08-22 15:08:20] [Rank 0] step:7001/10000 train_time:620633ms step_avg:88.65ms +[2025-08-22 15:08:22] [Rank 0] step:7021/10000 train_time:622410ms step_avg:88.65ms +[2025-08-22 15:08:22] [Rank 0] step:7021/10000 train_time:622410ms step_avg:88.65ms +[2025-08-22 15:08:24] [Rank 0] step:7041/10000 train_time:624276ms step_avg:88.66ms +[2025-08-22 15:08:24] [Rank 0] step:7041/10000 train_time:624276ms step_avg:88.66ms +[2025-08-22 15:08:26] [Rank 0] step:7061/10000 train_time:626142ms step_avg:88.68ms +[2025-08-22 15:08:26] [Rank 0] step:7061/10000 train_time:626142ms step_avg:88.68ms +[2025-08-22 15:08:28] [Rank 0] step:7081/10000 train_time:628008ms step_avg:88.69ms +[2025-08-22 15:08:28] [Rank 0] step:7081/10000 train_time:628008ms step_avg:88.69ms +[2025-08-22 15:08:29] [Rank 0] step:7101/10000 train_time:629883ms step_avg:88.70ms +[2025-08-22 15:08:29] [Rank 0] step:7101/10000 train_time:629883ms step_avg:88.70ms +[2025-08-22 15:08:31] [Rank 0] step:7121/10000 train_time:631746ms step_avg:88.72ms +[2025-08-22 15:08:31] [Rank 0] step:7121/10000 train_time:631746ms step_avg:88.72ms +[2025-08-22 15:08:33] [Rank 0] step:7141/10000 train_time:633612ms step_avg:88.73ms +[2025-08-22 15:08:33] [Rank 0] step:7141/10000 train_time:633612ms step_avg:88.73ms +[2025-08-22 15:08:35] [Rank 0] step:7161/10000 train_time:635482ms step_avg:88.74ms +[2025-08-22 15:08:35] [Rank 0] step:7161/10000 train_time:635482ms step_avg:88.74ms +[2025-08-22 15:08:37] [Rank 0] step:7181/10000 train_time:637353ms step_avg:88.76ms +[2025-08-22 15:08:37] [Rank 0] step:7181/10000 train_time:637353ms step_avg:88.76ms +[2025-08-22 15:08:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:08:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:08:53] [Rank 0] PRINT: step:7200/10000 val_loss:3.7625 svd_entropy: attn_qk:H=0.7720,top10E=0.24,eRank=197.1,q75/q25=81.10 attn_vo:H=0.8022,top10E=0.07,eRank=326.0,q75/q25=inf mlp_w1:H=0.8706,top10E=0.15,eRank=332.6,q75/q25=8.25 mlp_w2:H=0.9280,top10E=0.10,eRank=477.3,q75/q25=4.77 vo_prod:H=0.6542,top10E=0.13,eRank=169.1,q75/q25=inf train_time:639317ms step_avg:88.79ms +[2025-08-22 15:08:53] [Rank 0] PRINT: step:7200/10000 val_loss:3.7625 svd_entropy: attn_qk:H=0.7720,top10E=0.24,eRank=197.1,q75/q25=81.10 attn_vo:H=0.8022,top10E=0.07,eRank=326.0,q75/q25=inf mlp_w1:H=0.8706,top10E=0.15,eRank=332.6,q75/q25=8.25 mlp_w2:H=0.9280,top10E=0.10,eRank=477.3,q75/q25=4.77 vo_prod:H=0.6542,top10E=0.13,eRank=169.1,q75/q25=inf train_time:639317ms step_avg:88.79ms +[2025-08-22 15:08:53] [Rank 0] step:7201/10000 train_time:639332ms step_avg:88.78ms +[2025-08-22 15:08:53] [Rank 0] step:7201/10000 train_time:639332ms step_avg:88.78ms +[2025-08-22 15:08:55] [Rank 0] step:7221/10000 train_time:641125ms step_avg:88.79ms +[2025-08-22 15:08:55] [Rank 0] step:7221/10000 train_time:641125ms step_avg:88.79ms +[2025-08-22 15:08:56] [Rank 0] step:7241/10000 train_time:642988ms step_avg:88.80ms +[2025-08-22 15:08:56] [Rank 0] step:7241/10000 train_time:642988ms step_avg:88.80ms +[2025-08-22 15:08:58] [Rank 0] step:7261/10000 train_time:644852ms step_avg:88.81ms +[2025-08-22 15:08:58] [Rank 0] step:7261/10000 train_time:644852ms step_avg:88.81ms +[2025-08-22 15:09:00] [Rank 0] step:7281/10000 train_time:646728ms step_avg:88.82ms +[2025-08-22 15:09:00] [Rank 0] step:7281/10000 train_time:646728ms step_avg:88.82ms +[2025-08-22 15:09:02] [Rank 0] step:7301/10000 train_time:648595ms step_avg:88.84ms +[2025-08-22 15:09:02] [Rank 0] step:7301/10000 train_time:648595ms step_avg:88.84ms +[2025-08-22 15:09:04] [Rank 0] step:7321/10000 train_time:650474ms step_avg:88.85ms +[2025-08-22 15:09:04] [Rank 0] step:7321/10000 train_time:650474ms step_avg:88.85ms +[2025-08-22 15:09:06] [Rank 0] step:7341/10000 train_time:652341ms step_avg:88.86ms +[2025-08-22 15:09:06] [Rank 0] step:7341/10000 train_time:652341ms step_avg:88.86ms +[2025-08-22 15:09:08] [Rank 0] step:7361/10000 train_time:654218ms step_avg:88.88ms +[2025-08-22 15:09:08] [Rank 0] step:7361/10000 train_time:654218ms step_avg:88.88ms +[2025-08-22 15:09:10] [Rank 0] step:7381/10000 train_time:656095ms step_avg:88.89ms +[2025-08-22 15:09:10] [Rank 0] step:7381/10000 train_time:656095ms step_avg:88.89ms +[2025-08-22 15:09:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:09:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:09:25] [Rank 0] PRINT: step:7400/10000 val_loss:3.7400 svd_entropy: attn_qk:H=0.7727,top10E=0.24,eRank=197.8,q75/q25=81.09 attn_vo:H=0.8027,top10E=0.07,eRank=327.0,q75/q25=inf mlp_w1:H=0.8714,top10E=0.15,eRank=334.5,q75/q25=8.17 mlp_w2:H=0.9284,top10E=0.10,eRank=478.7,q75/q25=4.74 vo_prod:H=0.6550,top10E=0.13,eRank=170.1,q75/q25=inf train_time:658044ms step_avg:88.92ms +[2025-08-22 15:09:25] [Rank 0] PRINT: step:7400/10000 val_loss:3.7400 svd_entropy: attn_qk:H=0.7727,top10E=0.24,eRank=197.8,q75/q25=81.09 attn_vo:H=0.8027,top10E=0.07,eRank=327.0,q75/q25=inf mlp_w1:H=0.8714,top10E=0.15,eRank=334.5,q75/q25=8.17 mlp_w2:H=0.9284,top10E=0.10,eRank=478.7,q75/q25=4.74 vo_prod:H=0.6550,top10E=0.13,eRank=170.1,q75/q25=inf train_time:658044ms step_avg:88.92ms +[2025-08-22 15:09:25] [Rank 0] step:7401/10000 train_time:658059ms step_avg:88.91ms +[2025-08-22 15:09:25] [Rank 0] step:7401/10000 train_time:658059ms step_avg:88.91ms +[2025-08-22 15:09:27] [Rank 0] step:7421/10000 train_time:659857ms step_avg:88.92ms +[2025-08-22 15:09:27] [Rank 0] step:7421/10000 train_time:659857ms step_avg:88.92ms +[2025-08-22 15:09:29] [Rank 0] step:7441/10000 train_time:661726ms step_avg:88.93ms +[2025-08-22 15:09:29] [Rank 0] step:7441/10000 train_time:661726ms step_avg:88.93ms +[2025-08-22 15:09:31] [Rank 0] step:7461/10000 train_time:663591ms step_avg:88.94ms +[2025-08-22 15:09:31] [Rank 0] step:7461/10000 train_time:663591ms step_avg:88.94ms +[2025-08-22 15:09:33] [Rank 0] step:7481/10000 train_time:665468ms step_avg:88.95ms +[2025-08-22 15:09:33] [Rank 0] step:7481/10000 train_time:665468ms step_avg:88.95ms +[2025-08-22 15:09:35] [Rank 0] step:7501/10000 train_time:667341ms step_avg:88.97ms +[2025-08-22 15:09:35] [Rank 0] step:7501/10000 train_time:667341ms step_avg:88.97ms +[2025-08-22 15:09:37] [Rank 0] step:7521/10000 train_time:669216ms step_avg:88.98ms +[2025-08-22 15:09:37] [Rank 0] step:7521/10000 train_time:669216ms step_avg:88.98ms +[2025-08-22 15:09:39] [Rank 0] step:7541/10000 train_time:671101ms step_avg:88.99ms +[2025-08-22 15:09:39] [Rank 0] step:7541/10000 train_time:671101ms step_avg:88.99ms +[2025-08-22 15:09:40] [Rank 0] step:7561/10000 train_time:672962ms step_avg:89.00ms +[2025-08-22 15:09:40] [Rank 0] step:7561/10000 train_time:672962ms step_avg:89.00ms +[2025-08-22 15:09:42] [Rank 0] step:7581/10000 train_time:674849ms step_avg:89.02ms +[2025-08-22 15:09:42] [Rank 0] step:7581/10000 train_time:674849ms step_avg:89.02ms +[2025-08-22 15:09:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:09:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:09:58] [Rank 0] PRINT: step:7600/10000 val_loss:3.7330 svd_entropy: attn_qk:H=0.7735,top10E=0.24,eRank=198.5,q75/q25=80.68 attn_vo:H=0.8032,top10E=0.07,eRank=328.0,q75/q25=inf mlp_w1:H=0.8723,top10E=0.15,eRank=336.3,q75/q25=8.12 mlp_w2:H=0.9288,top10E=0.10,eRank=480.0,q75/q25=4.71 vo_prod:H=0.6557,top10E=0.13,eRank=170.9,q75/q25=inf train_time:676828ms step_avg:89.06ms +[2025-08-22 15:09:58] [Rank 0] PRINT: step:7600/10000 val_loss:3.7330 svd_entropy: attn_qk:H=0.7735,top10E=0.24,eRank=198.5,q75/q25=80.68 attn_vo:H=0.8032,top10E=0.07,eRank=328.0,q75/q25=inf mlp_w1:H=0.8723,top10E=0.15,eRank=336.3,q75/q25=8.12 mlp_w2:H=0.9288,top10E=0.10,eRank=480.0,q75/q25=4.71 vo_prod:H=0.6557,top10E=0.13,eRank=170.9,q75/q25=inf train_time:676828ms step_avg:89.06ms +[2025-08-22 15:09:58] [Rank 0] step:7601/10000 train_time:676843ms step_avg:89.05ms +[2025-08-22 15:09:58] [Rank 0] step:7601/10000 train_time:676843ms step_avg:89.05ms +[2025-08-22 15:10:00] [Rank 0] step:7621/10000 train_time:678618ms step_avg:89.05ms +[2025-08-22 15:10:00] [Rank 0] step:7621/10000 train_time:678618ms step_avg:89.05ms +[2025-08-22 15:10:02] [Rank 0] step:7641/10000 train_time:680490ms step_avg:89.06ms +[2025-08-22 15:10:02] [Rank 0] step:7641/10000 train_time:680490ms step_avg:89.06ms +[2025-08-22 15:10:03] [Rank 0] step:7661/10000 train_time:682364ms step_avg:89.07ms +[2025-08-22 15:10:03] [Rank 0] step:7661/10000 train_time:682364ms step_avg:89.07ms +[2025-08-22 15:10:05] [Rank 0] step:7681/10000 train_time:684233ms step_avg:89.08ms +[2025-08-22 15:10:05] [Rank 0] step:7681/10000 train_time:684233ms step_avg:89.08ms +[2025-08-22 15:10:07] [Rank 0] step:7701/10000 train_time:686104ms step_avg:89.09ms +[2025-08-22 15:10:07] [Rank 0] step:7701/10000 train_time:686104ms step_avg:89.09ms +[2025-08-22 15:10:09] [Rank 0] step:7721/10000 train_time:687992ms step_avg:89.11ms +[2025-08-22 15:10:09] [Rank 0] step:7721/10000 train_time:687992ms step_avg:89.11ms +[2025-08-22 15:10:11] [Rank 0] step:7741/10000 train_time:689866ms step_avg:89.12ms +[2025-08-22 15:10:11] [Rank 0] step:7741/10000 train_time:689866ms step_avg:89.12ms +[2025-08-22 15:10:13] [Rank 0] step:7761/10000 train_time:691749ms step_avg:89.13ms +[2025-08-22 15:10:13] [Rank 0] step:7761/10000 train_time:691749ms step_avg:89.13ms +[2025-08-22 15:10:15] [Rank 0] step:7781/10000 train_time:693629ms step_avg:89.14ms +[2025-08-22 15:10:15] [Rank 0] step:7781/10000 train_time:693629ms step_avg:89.14ms +[2025-08-22 15:10:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:10:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:10:30] [Rank 0] PRINT: step:7800/10000 val_loss:3.7163 svd_entropy: attn_qk:H=0.7742,top10E=0.24,eRank=199.1,q75/q25=80.61 attn_vo:H=0.8036,top10E=0.07,eRank=328.8,q75/q25=inf mlp_w1:H=0.8730,top10E=0.15,eRank=337.9,q75/q25=8.07 mlp_w2:H=0.9292,top10E=0.10,eRank=481.1,q75/q25=4.68 vo_prod:H=0.6563,top10E=0.13,eRank=171.7,q75/q25=inf train_time:695612ms step_avg:89.18ms +[2025-08-22 15:10:30] [Rank 0] PRINT: step:7800/10000 val_loss:3.7163 svd_entropy: attn_qk:H=0.7742,top10E=0.24,eRank=199.1,q75/q25=80.61 attn_vo:H=0.8036,top10E=0.07,eRank=328.8,q75/q25=inf mlp_w1:H=0.8730,top10E=0.15,eRank=337.9,q75/q25=8.07 mlp_w2:H=0.9292,top10E=0.10,eRank=481.1,q75/q25=4.68 vo_prod:H=0.6563,top10E=0.13,eRank=171.7,q75/q25=inf train_time:695612ms step_avg:89.18ms +[2025-08-22 15:10:30] [Rank 0] step:7801/10000 train_time:695627ms step_avg:89.17ms +[2025-08-22 15:10:30] [Rank 0] step:7801/10000 train_time:695627ms step_avg:89.17ms +[2025-08-22 15:10:32] [Rank 0] step:7821/10000 train_time:697420ms step_avg:89.17ms +[2025-08-22 15:10:32] [Rank 0] step:7821/10000 train_time:697420ms step_avg:89.17ms +[2025-08-22 15:10:34] [Rank 0] step:7841/10000 train_time:699287ms step_avg:89.18ms +[2025-08-22 15:10:34] [Rank 0] step:7841/10000 train_time:699287ms step_avg:89.18ms +[2025-08-22 15:10:36] [Rank 0] step:7861/10000 train_time:701229ms step_avg:89.20ms +[2025-08-22 15:10:36] [Rank 0] step:7861/10000 train_time:701229ms step_avg:89.20ms +[2025-08-22 15:10:38] [Rank 0] step:7881/10000 train_time:703037ms step_avg:89.21ms +[2025-08-22 15:10:38] [Rank 0] step:7881/10000 train_time:703037ms step_avg:89.21ms +[2025-08-22 15:10:40] [Rank 0] step:7901/10000 train_time:704967ms step_avg:89.23ms +[2025-08-22 15:10:40] [Rank 0] step:7901/10000 train_time:704967ms step_avg:89.23ms +[2025-08-22 15:10:41] [Rank 0] step:7921/10000 train_time:706844ms step_avg:89.24ms +[2025-08-22 15:10:41] [Rank 0] step:7921/10000 train_time:706844ms step_avg:89.24ms +[2025-08-22 15:10:43] [Rank 0] step:7941/10000 train_time:708724ms step_avg:89.25ms +[2025-08-22 15:10:43] [Rank 0] step:7941/10000 train_time:708724ms step_avg:89.25ms +[2025-08-22 15:10:45] [Rank 0] step:7961/10000 train_time:710601ms step_avg:89.26ms +[2025-08-22 15:10:45] [Rank 0] step:7961/10000 train_time:710601ms step_avg:89.26ms +[2025-08-22 15:10:47] [Rank 0] step:7981/10000 train_time:712467ms step_avg:89.27ms +[2025-08-22 15:10:47] [Rank 0] step:7981/10000 train_time:712467ms step_avg:89.27ms +[2025-08-22 15:10:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:10:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:11:02] [Rank 0] PRINT: step:8000/10000 val_loss:3.6979 svd_entropy: attn_qk:H=0.7747,top10E=0.24,eRank=199.7,q75/q25=80.58 attn_vo:H=0.8040,top10E=0.07,eRank=329.6,q75/q25=inf mlp_w1:H=0.8736,top10E=0.15,eRank=339.4,q75/q25=8.03 mlp_w2:H=0.9295,top10E=0.10,eRank=482.2,q75/q25=4.65 vo_prod:H=0.6569,top10E=0.13,eRank=172.5,q75/q25=inf train_time:714440ms step_avg:89.30ms +[2025-08-22 15:11:02] [Rank 0] PRINT: step:8000/10000 val_loss:3.6979 svd_entropy: attn_qk:H=0.7747,top10E=0.24,eRank=199.7,q75/q25=80.58 attn_vo:H=0.8040,top10E=0.07,eRank=329.6,q75/q25=inf mlp_w1:H=0.8736,top10E=0.15,eRank=339.4,q75/q25=8.03 mlp_w2:H=0.9295,top10E=0.10,eRank=482.2,q75/q25=4.65 vo_prod:H=0.6569,top10E=0.13,eRank=172.5,q75/q25=inf train_time:714440ms step_avg:89.30ms +[2025-08-22 15:11:03] [Rank 0] step:8001/10000 train_time:714456ms step_avg:89.30ms +[2025-08-22 15:11:03] [Rank 0] step:8001/10000 train_time:714456ms step_avg:89.30ms +[2025-08-22 15:11:04] [Rank 0] step:8021/10000 train_time:716228ms step_avg:89.29ms +[2025-08-22 15:11:04] [Rank 0] step:8021/10000 train_time:716228ms step_avg:89.29ms +[2025-08-22 15:11:06] [Rank 0] step:8041/10000 train_time:718106ms step_avg:89.31ms +[2025-08-22 15:11:06] [Rank 0] step:8041/10000 train_time:718106ms step_avg:89.31ms +[2025-08-22 15:11:08] [Rank 0] step:8061/10000 train_time:719979ms step_avg:89.32ms +[2025-08-22 15:11:08] [Rank 0] step:8061/10000 train_time:719979ms step_avg:89.32ms +[2025-08-22 15:11:10] [Rank 0] step:8081/10000 train_time:721841ms step_avg:89.33ms +[2025-08-22 15:11:10] [Rank 0] step:8081/10000 train_time:721841ms step_avg:89.33ms +[2025-08-22 15:11:12] [Rank 0] step:8101/10000 train_time:723720ms step_avg:89.34ms +[2025-08-22 15:11:12] [Rank 0] step:8101/10000 train_time:723720ms step_avg:89.34ms +[2025-08-22 15:11:14] [Rank 0] step:8121/10000 train_time:725589ms step_avg:89.35ms +[2025-08-22 15:11:14] [Rank 0] step:8121/10000 train_time:725589ms step_avg:89.35ms +[2025-08-22 15:11:16] [Rank 0] step:8141/10000 train_time:727619ms step_avg:89.38ms +[2025-08-22 15:11:16] [Rank 0] step:8141/10000 train_time:727619ms step_avg:89.38ms +[2025-08-22 15:11:18] [Rank 0] step:8161/10000 train_time:729505ms step_avg:89.39ms +[2025-08-22 15:11:18] [Rank 0] step:8161/10000 train_time:729505ms step_avg:89.39ms +[2025-08-22 15:11:20] [Rank 0] step:8181/10000 train_time:731408ms step_avg:89.40ms +[2025-08-22 15:11:20] [Rank 0] step:8181/10000 train_time:731408ms step_avg:89.40ms +[2025-08-22 15:11:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:11:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:11:35] [Rank 0] PRINT: step:8200/10000 val_loss:3.6874 svd_entropy: attn_qk:H=0.7753,top10E=0.24,eRank=200.2,q75/q25=80.74 attn_vo:H=0.8043,top10E=0.07,eRank=330.4,q75/q25=inf mlp_w1:H=0.8742,top10E=0.15,eRank=340.7,q75/q25=7.99 mlp_w2:H=0.9298,top10E=0.10,eRank=483.1,q75/q25=4.63 vo_prod:H=0.6574,top10E=0.13,eRank=173.2,q75/q25=inf train_time:733430ms step_avg:89.44ms +[2025-08-22 15:11:35] [Rank 0] PRINT: step:8200/10000 val_loss:3.6874 svd_entropy: attn_qk:H=0.7753,top10E=0.24,eRank=200.2,q75/q25=80.74 attn_vo:H=0.8043,top10E=0.07,eRank=330.4,q75/q25=inf mlp_w1:H=0.8742,top10E=0.15,eRank=340.7,q75/q25=7.99 mlp_w2:H=0.9298,top10E=0.10,eRank=483.1,q75/q25=4.63 vo_prod:H=0.6574,top10E=0.13,eRank=173.2,q75/q25=inf train_time:733430ms step_avg:89.44ms +[2025-08-22 15:11:35] [Rank 0] step:8201/10000 train_time:733445ms step_avg:89.43ms +[2025-08-22 15:11:35] [Rank 0] step:8201/10000 train_time:733445ms step_avg:89.43ms +[2025-08-22 15:11:37] [Rank 0] step:8221/10000 train_time:735268ms step_avg:89.44ms +[2025-08-22 15:11:37] [Rank 0] step:8221/10000 train_time:735268ms step_avg:89.44ms +[2025-08-22 15:11:39] [Rank 0] step:8241/10000 train_time:737176ms step_avg:89.45ms +[2025-08-22 15:11:39] [Rank 0] step:8241/10000 train_time:737176ms step_avg:89.45ms +[2025-08-22 15:11:41] [Rank 0] step:8261/10000 train_time:739083ms step_avg:89.47ms +[2025-08-22 15:11:41] [Rank 0] step:8261/10000 train_time:739083ms step_avg:89.47ms +[2025-08-22 15:11:43] [Rank 0] step:8281/10000 train_time:741016ms step_avg:89.48ms +[2025-08-22 15:11:43] [Rank 0] step:8281/10000 train_time:741016ms step_avg:89.48ms +[2025-08-22 15:11:45] [Rank 0] step:8301/10000 train_time:742919ms step_avg:89.50ms +[2025-08-22 15:11:45] [Rank 0] step:8301/10000 train_time:742919ms step_avg:89.50ms +[2025-08-22 15:11:47] [Rank 0] step:8321/10000 train_time:744818ms step_avg:89.51ms +[2025-08-22 15:11:47] [Rank 0] step:8321/10000 train_time:744818ms step_avg:89.51ms +[2025-08-22 15:11:49] [Rank 0] step:8341/10000 train_time:746727ms step_avg:89.52ms +[2025-08-22 15:11:49] [Rank 0] step:8341/10000 train_time:746727ms step_avg:89.52ms +[2025-08-22 15:11:50] [Rank 0] step:8361/10000 train_time:748634ms step_avg:89.54ms +[2025-08-22 15:11:50] [Rank 0] step:8361/10000 train_time:748634ms step_avg:89.54ms +[2025-08-22 15:11:52] [Rank 0] step:8381/10000 train_time:750539ms step_avg:89.55ms +[2025-08-22 15:11:52] [Rank 0] step:8381/10000 train_time:750539ms step_avg:89.55ms +[2025-08-22 15:11:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:11:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:12:08] [Rank 0] PRINT: step:8400/10000 val_loss:3.6731 svd_entropy: attn_qk:H=0.7758,top10E=0.24,eRank=200.6,q75/q25=80.63 attn_vo:H=0.8046,top10E=0.07,eRank=331.0,q75/q25=inf mlp_w1:H=0.8748,top10E=0.14,eRank=341.9,q75/q25=7.95 mlp_w2:H=0.9300,top10E=0.10,eRank=484.0,q75/q25=4.61 vo_prod:H=0.6579,top10E=0.13,eRank=173.8,q75/q25=inf train_time:752536ms step_avg:89.59ms +[2025-08-22 15:12:08] [Rank 0] PRINT: step:8400/10000 val_loss:3.6731 svd_entropy: attn_qk:H=0.7758,top10E=0.24,eRank=200.6,q75/q25=80.63 attn_vo:H=0.8046,top10E=0.07,eRank=331.0,q75/q25=inf mlp_w1:H=0.8748,top10E=0.14,eRank=341.9,q75/q25=7.95 mlp_w2:H=0.9300,top10E=0.10,eRank=484.0,q75/q25=4.61 vo_prod:H=0.6579,top10E=0.13,eRank=173.8,q75/q25=inf train_time:752536ms step_avg:89.59ms +[2025-08-22 15:12:08] [Rank 0] step:8401/10000 train_time:752550ms step_avg:89.58ms +[2025-08-22 15:12:08] [Rank 0] step:8401/10000 train_time:752550ms step_avg:89.58ms +[2025-08-22 15:12:10] [Rank 0] step:8421/10000 train_time:754366ms step_avg:89.58ms +[2025-08-22 15:12:10] [Rank 0] step:8421/10000 train_time:754366ms step_avg:89.58ms +[2025-08-22 15:12:12] [Rank 0] step:8441/10000 train_time:756262ms step_avg:89.59ms +[2025-08-22 15:12:12] [Rank 0] step:8441/10000 train_time:756262ms step_avg:89.59ms +[2025-08-22 15:12:14] [Rank 0] step:8461/10000 train_time:758161ms step_avg:89.61ms +[2025-08-22 15:12:14] [Rank 0] step:8461/10000 train_time:758161ms step_avg:89.61ms +[2025-08-22 15:12:16] [Rank 0] step:8481/10000 train_time:760071ms step_avg:89.62ms +[2025-08-22 15:12:16] [Rank 0] step:8481/10000 train_time:760071ms step_avg:89.62ms +[2025-08-22 15:12:18] [Rank 0] step:8501/10000 train_time:761994ms step_avg:89.64ms +[2025-08-22 15:12:18] [Rank 0] step:8501/10000 train_time:761994ms step_avg:89.64ms +[2025-08-22 15:12:19] [Rank 0] step:8521/10000 train_time:763902ms step_avg:89.65ms +[2025-08-22 15:12:19] [Rank 0] step:8521/10000 train_time:763902ms step_avg:89.65ms +[2025-08-22 15:12:21] [Rank 0] step:8541/10000 train_time:765821ms step_avg:89.66ms +[2025-08-22 15:12:21] [Rank 0] step:8541/10000 train_time:765821ms step_avg:89.66ms +[2025-08-22 15:12:23] [Rank 0] step:8561/10000 train_time:767731ms step_avg:89.68ms +[2025-08-22 15:12:23] [Rank 0] step:8561/10000 train_time:767731ms step_avg:89.68ms +[2025-08-22 15:12:25] [Rank 0] step:8581/10000 train_time:769640ms step_avg:89.69ms +[2025-08-22 15:12:25] [Rank 0] step:8581/10000 train_time:769640ms step_avg:89.69ms +[2025-08-22 15:12:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:12:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:12:41] [Rank 0] PRINT: step:8600/10000 val_loss:3.6649 svd_entropy: attn_qk:H=0.7762,top10E=0.24,eRank=201.0,q75/q25=80.66 attn_vo:H=0.8049,top10E=0.07,eRank=331.5,q75/q25=inf mlp_w1:H=0.8753,top10E=0.14,eRank=343.0,q75/q25=7.92 mlp_w2:H=0.9303,top10E=0.10,eRank=484.7,q75/q25=4.59 vo_prod:H=0.6583,top10E=0.13,eRank=174.3,q75/q25=inf train_time:771638ms step_avg:89.73ms +[2025-08-22 15:12:41] [Rank 0] PRINT: step:8600/10000 val_loss:3.6649 svd_entropy: attn_qk:H=0.7762,top10E=0.24,eRank=201.0,q75/q25=80.66 attn_vo:H=0.8049,top10E=0.07,eRank=331.5,q75/q25=inf mlp_w1:H=0.8753,top10E=0.14,eRank=343.0,q75/q25=7.92 mlp_w2:H=0.9303,top10E=0.10,eRank=484.7,q75/q25=4.59 vo_prod:H=0.6583,top10E=0.13,eRank=174.3,q75/q25=inf train_time:771638ms step_avg:89.73ms +[2025-08-22 15:12:41] [Rank 0] step:8601/10000 train_time:771652ms step_avg:89.72ms +[2025-08-22 15:12:41] [Rank 0] step:8601/10000 train_time:771652ms step_avg:89.72ms +[2025-08-22 15:12:43] [Rank 0] step:8621/10000 train_time:773471ms step_avg:89.72ms +[2025-08-22 15:12:43] [Rank 0] step:8621/10000 train_time:773471ms step_avg:89.72ms +[2025-08-22 15:12:45] [Rank 0] step:8641/10000 train_time:775371ms step_avg:89.73ms +[2025-08-22 15:12:45] [Rank 0] step:8641/10000 train_time:775371ms step_avg:89.73ms +[2025-08-22 15:12:46] [Rank 0] step:8661/10000 train_time:777274ms step_avg:89.74ms +[2025-08-22 15:12:46] [Rank 0] step:8661/10000 train_time:777274ms step_avg:89.74ms +[2025-08-22 15:12:48] [Rank 0] step:8681/10000 train_time:779177ms step_avg:89.76ms +[2025-08-22 15:12:48] [Rank 0] step:8681/10000 train_time:779177ms step_avg:89.76ms +[2025-08-22 15:12:50] [Rank 0] step:8701/10000 train_time:781073ms step_avg:89.77ms +[2025-08-22 15:12:50] [Rank 0] step:8701/10000 train_time:781073ms step_avg:89.77ms +[2025-08-22 15:12:52] [Rank 0] step:8721/10000 train_time:782977ms step_avg:89.78ms +[2025-08-22 15:12:52] [Rank 0] step:8721/10000 train_time:782977ms step_avg:89.78ms +[2025-08-22 15:12:54] [Rank 0] step:8741/10000 train_time:784870ms step_avg:89.79ms +[2025-08-22 15:12:54] [Rank 0] step:8741/10000 train_time:784870ms step_avg:89.79ms +[2025-08-22 15:12:56] [Rank 0] step:8761/10000 train_time:786774ms step_avg:89.80ms +[2025-08-22 15:12:56] [Rank 0] step:8761/10000 train_time:786774ms step_avg:89.80ms +[2025-08-22 15:12:58] [Rank 0] step:8781/10000 train_time:788678ms step_avg:89.82ms +[2025-08-22 15:12:58] [Rank 0] step:8781/10000 train_time:788678ms step_avg:89.82ms +[2025-08-22 15:13:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:13:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:13:13] [Rank 0] PRINT: step:8800/10000 val_loss:3.6502 svd_entropy: attn_qk:H=0.7766,top10E=0.24,eRank=201.3,q75/q25=80.28 attn_vo:H=0.8051,top10E=0.07,eRank=332.0,q75/q25=inf mlp_w1:H=0.8757,top10E=0.14,eRank=343.9,q75/q25=7.89 mlp_w2:H=0.9305,top10E=0.10,eRank=485.4,q75/q25=4.57 vo_prod:H=0.6587,top10E=0.13,eRank=174.8,q75/q25=inf train_time:790675ms step_avg:89.85ms +[2025-08-22 15:13:13] [Rank 0] PRINT: step:8800/10000 val_loss:3.6502 svd_entropy: attn_qk:H=0.7766,top10E=0.24,eRank=201.3,q75/q25=80.28 attn_vo:H=0.8051,top10E=0.07,eRank=332.0,q75/q25=inf mlp_w1:H=0.8757,top10E=0.14,eRank=343.9,q75/q25=7.89 mlp_w2:H=0.9305,top10E=0.10,eRank=485.4,q75/q25=4.57 vo_prod:H=0.6587,top10E=0.13,eRank=174.8,q75/q25=inf train_time:790675ms step_avg:89.85ms +[2025-08-22 15:13:13] [Rank 0] step:8801/10000 train_time:790690ms step_avg:89.84ms +[2025-08-22 15:13:13] [Rank 0] step:8801/10000 train_time:790690ms step_avg:89.84ms +[2025-08-22 15:13:15] [Rank 0] step:8821/10000 train_time:792488ms step_avg:89.84ms +[2025-08-22 15:13:15] [Rank 0] step:8821/10000 train_time:792488ms step_avg:89.84ms +[2025-08-22 15:13:17] [Rank 0] step:8841/10000 train_time:794406ms step_avg:89.85ms +[2025-08-22 15:13:17] [Rank 0] step:8841/10000 train_time:794406ms step_avg:89.85ms +[2025-08-22 15:13:19] [Rank 0] step:8861/10000 train_time:796301ms step_avg:89.87ms +[2025-08-22 15:13:19] [Rank 0] step:8861/10000 train_time:796301ms step_avg:89.87ms +[2025-08-22 15:13:21] [Rank 0] step:8881/10000 train_time:798202ms step_avg:89.88ms +[2025-08-22 15:13:21] [Rank 0] step:8881/10000 train_time:798202ms step_avg:89.88ms +[2025-08-22 15:13:23] [Rank 0] step:8901/10000 train_time:800105ms step_avg:89.89ms +[2025-08-22 15:13:23] [Rank 0] step:8901/10000 train_time:800105ms step_avg:89.89ms +[2025-08-22 15:13:25] [Rank 0] step:8921/10000 train_time:802017ms step_avg:89.90ms +[2025-08-22 15:13:25] [Rank 0] step:8921/10000 train_time:802017ms step_avg:89.90ms +[2025-08-22 15:13:27] [Rank 0] step:8941/10000 train_time:803927ms step_avg:89.91ms +[2025-08-22 15:13:27] [Rank 0] step:8941/10000 train_time:803927ms step_avg:89.91ms +[2025-08-22 15:13:29] [Rank 0] step:8961/10000 train_time:805827ms step_avg:89.93ms +[2025-08-22 15:13:29] [Rank 0] step:8961/10000 train_time:805827ms step_avg:89.93ms +[2025-08-22 15:13:31] [Rank 0] step:8981/10000 train_time:807730ms step_avg:89.94ms +[2025-08-22 15:13:31] [Rank 0] step:8981/10000 train_time:807730ms step_avg:89.94ms +[2025-08-22 15:13:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:13:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:13:46] [Rank 0] PRINT: step:9000/10000 val_loss:3.6388 svd_entropy: attn_qk:H=0.7769,top10E=0.24,eRank=201.6,q75/q25=80.10 attn_vo:H=0.8053,top10E=0.07,eRank=332.5,q75/q25=inf mlp_w1:H=0.8761,top10E=0.14,eRank=344.7,q75/q25=7.86 mlp_w2:H=0.9307,top10E=0.10,eRank=486.0,q75/q25=4.55 vo_prod:H=0.6590,top10E=0.13,eRank=175.3,q75/q25=inf train_time:809726ms step_avg:89.97ms +[2025-08-22 15:13:46] [Rank 0] PRINT: step:9000/10000 val_loss:3.6388 svd_entropy: attn_qk:H=0.7769,top10E=0.24,eRank=201.6,q75/q25=80.10 attn_vo:H=0.8053,top10E=0.07,eRank=332.5,q75/q25=inf mlp_w1:H=0.8761,top10E=0.14,eRank=344.7,q75/q25=7.86 mlp_w2:H=0.9307,top10E=0.10,eRank=486.0,q75/q25=4.55 vo_prod:H=0.6590,top10E=0.13,eRank=175.3,q75/q25=inf train_time:809726ms step_avg:89.97ms +[2025-08-22 15:13:46] [Rank 0] step:9001/10000 train_time:809741ms step_avg:89.96ms +[2025-08-22 15:13:46] [Rank 0] step:9001/10000 train_time:809741ms step_avg:89.96ms +[2025-08-22 15:13:48] [Rank 0] step:9021/10000 train_time:811552ms step_avg:89.96ms +[2025-08-22 15:13:48] [Rank 0] step:9021/10000 train_time:811552ms step_avg:89.96ms +[2025-08-22 15:13:50] [Rank 0] step:9041/10000 train_time:813456ms step_avg:89.97ms +[2025-08-22 15:13:50] [Rank 0] step:9041/10000 train_time:813456ms step_avg:89.97ms +[2025-08-22 15:13:52] [Rank 0] step:9061/10000 train_time:815371ms step_avg:89.99ms +[2025-08-22 15:13:52] [Rank 0] step:9061/10000 train_time:815371ms step_avg:89.99ms +[2025-08-22 15:13:54] [Rank 0] step:9081/10000 train_time:817286ms step_avg:90.00ms +[2025-08-22 15:13:54] [Rank 0] step:9081/10000 train_time:817286ms step_avg:90.00ms +[2025-08-22 15:13:56] [Rank 0] step:9101/10000 train_time:819212ms step_avg:90.01ms +[2025-08-22 15:13:56] [Rank 0] step:9101/10000 train_time:819212ms step_avg:90.01ms +[2025-08-22 15:13:58] [Rank 0] step:9121/10000 train_time:821127ms step_avg:90.03ms +[2025-08-22 15:13:58] [Rank 0] step:9121/10000 train_time:821127ms step_avg:90.03ms +[2025-08-22 15:14:00] [Rank 0] step:9141/10000 train_time:823026ms step_avg:90.04ms +[2025-08-22 15:14:00] [Rank 0] step:9141/10000 train_time:823026ms step_avg:90.04ms +[2025-08-22 15:14:01] [Rank 0] step:9161/10000 train_time:824931ms step_avg:90.05ms +[2025-08-22 15:14:01] [Rank 0] step:9161/10000 train_time:824931ms step_avg:90.05ms +[2025-08-22 15:14:03] [Rank 0] step:9181/10000 train_time:826869ms step_avg:90.06ms +[2025-08-22 15:14:03] [Rank 0] step:9181/10000 train_time:826869ms step_avg:90.06ms +[2025-08-22 15:14:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:14:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:14:19] [Rank 0] PRINT: step:9200/10000 val_loss:3.6289 svd_entropy: attn_qk:H=0.7772,top10E=0.24,eRank=201.9,q75/q25=80.13 attn_vo:H=0.8055,top10E=0.07,eRank=332.8,q75/q25=inf mlp_w1:H=0.8764,top10E=0.14,eRank=345.5,q75/q25=7.85 mlp_w2:H=0.9309,top10E=0.10,eRank=486.6,q75/q25=4.54 vo_prod:H=0.6594,top10E=0.13,eRank=175.7,q75/q25=inf train_time:828868ms step_avg:90.09ms +[2025-08-22 15:14:19] [Rank 0] PRINT: step:9200/10000 val_loss:3.6289 svd_entropy: attn_qk:H=0.7772,top10E=0.24,eRank=201.9,q75/q25=80.13 attn_vo:H=0.8055,top10E=0.07,eRank=332.8,q75/q25=inf mlp_w1:H=0.8764,top10E=0.14,eRank=345.5,q75/q25=7.85 mlp_w2:H=0.9309,top10E=0.10,eRank=486.6,q75/q25=4.54 vo_prod:H=0.6594,top10E=0.13,eRank=175.7,q75/q25=inf train_time:828868ms step_avg:90.09ms +[2025-08-22 15:14:19] [Rank 0] step:9201/10000 train_time:828883ms step_avg:90.09ms +[2025-08-22 15:14:19] [Rank 0] step:9201/10000 train_time:828883ms step_avg:90.09ms +[2025-08-22 15:14:21] [Rank 0] step:9221/10000 train_time:830703ms step_avg:90.09ms +[2025-08-22 15:14:21] [Rank 0] step:9221/10000 train_time:830703ms step_avg:90.09ms +[2025-08-22 15:14:23] [Rank 0] step:9241/10000 train_time:832617ms step_avg:90.10ms +[2025-08-22 15:14:23] [Rank 0] step:9241/10000 train_time:832617ms step_avg:90.10ms +[2025-08-22 15:14:25] [Rank 0] step:9261/10000 train_time:834531ms step_avg:90.11ms +[2025-08-22 15:14:25] [Rank 0] step:9261/10000 train_time:834531ms step_avg:90.11ms +[2025-08-22 15:14:26] [Rank 0] step:9281/10000 train_time:836426ms step_avg:90.12ms +[2025-08-22 15:14:26] [Rank 0] step:9281/10000 train_time:836426ms step_avg:90.12ms +[2025-08-22 15:14:28] [Rank 0] step:9301/10000 train_time:838324ms step_avg:90.13ms +[2025-08-22 15:14:28] [Rank 0] step:9301/10000 train_time:838324ms step_avg:90.13ms +[2025-08-22 15:14:30] [Rank 0] step:9321/10000 train_time:840236ms step_avg:90.14ms +[2025-08-22 15:14:30] [Rank 0] step:9321/10000 train_time:840236ms step_avg:90.14ms +[2025-08-22 15:14:32] [Rank 0] step:9341/10000 train_time:842142ms step_avg:90.16ms +[2025-08-22 15:14:32] [Rank 0] step:9341/10000 train_time:842142ms step_avg:90.16ms +[2025-08-22 15:14:34] [Rank 0] step:9361/10000 train_time:844054ms step_avg:90.17ms +[2025-08-22 15:14:34] [Rank 0] step:9361/10000 train_time:844054ms step_avg:90.17ms +[2025-08-22 15:14:36] [Rank 0] step:9381/10000 train_time:845974ms step_avg:90.18ms +[2025-08-22 15:14:36] [Rank 0] step:9381/10000 train_time:845974ms step_avg:90.18ms +[2025-08-22 15:14:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:14:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:14:52] [Rank 0] PRINT: step:9400/10000 val_loss:3.6194 svd_entropy: attn_qk:H=0.7774,top10E=0.24,eRank=202.1,q75/q25=80.22 attn_vo:H=0.8056,top10E=0.07,eRank=333.1,q75/q25=inf mlp_w1:H=0.8767,top10E=0.14,eRank=346.1,q75/q25=7.82 mlp_w2:H=0.9310,top10E=0.10,eRank=487.0,q75/q25=4.54 vo_prod:H=0.6596,top10E=0.13,eRank=176.1,q75/q25=inf train_time:847982ms step_avg:90.21ms +[2025-08-22 15:14:52] [Rank 0] PRINT: step:9400/10000 val_loss:3.6194 svd_entropy: attn_qk:H=0.7774,top10E=0.24,eRank=202.1,q75/q25=80.22 attn_vo:H=0.8056,top10E=0.07,eRank=333.1,q75/q25=inf mlp_w1:H=0.8767,top10E=0.14,eRank=346.1,q75/q25=7.82 mlp_w2:H=0.9310,top10E=0.10,eRank=487.0,q75/q25=4.54 vo_prod:H=0.6596,top10E=0.13,eRank=176.1,q75/q25=inf train_time:847982ms step_avg:90.21ms +[2025-08-22 15:14:52] [Rank 0] step:9401/10000 train_time:847997ms step_avg:90.20ms +[2025-08-22 15:14:52] [Rank 0] step:9401/10000 train_time:847997ms step_avg:90.20ms +[2025-08-22 15:14:54] [Rank 0] step:9421/10000 train_time:849818ms step_avg:90.20ms +[2025-08-22 15:14:54] [Rank 0] step:9421/10000 train_time:849818ms step_avg:90.20ms +[2025-08-22 15:14:55] [Rank 0] step:9441/10000 train_time:851724ms step_avg:90.22ms +[2025-08-22 15:14:55] [Rank 0] step:9441/10000 train_time:851724ms step_avg:90.22ms +[2025-08-22 15:14:57] [Rank 0] step:9461/10000 train_time:853633ms step_avg:90.23ms +[2025-08-22 15:14:57] [Rank 0] step:9461/10000 train_time:853633ms step_avg:90.23ms +[2025-08-22 15:14:59] [Rank 0] step:9481/10000 train_time:855539ms step_avg:90.24ms +[2025-08-22 15:14:59] [Rank 0] step:9481/10000 train_time:855539ms step_avg:90.24ms +[2025-08-22 15:15:01] [Rank 0] step:9501/10000 train_time:857458ms step_avg:90.25ms +[2025-08-22 15:15:01] [Rank 0] step:9501/10000 train_time:857458ms step_avg:90.25ms +[2025-08-22 15:15:03] [Rank 0] step:9521/10000 train_time:859355ms step_avg:90.26ms +[2025-08-22 15:15:03] [Rank 0] step:9521/10000 train_time:859355ms step_avg:90.26ms +[2025-08-22 15:15:05] [Rank 0] step:9541/10000 train_time:861259ms step_avg:90.27ms +[2025-08-22 15:15:05] [Rank 0] step:9541/10000 train_time:861259ms step_avg:90.27ms +[2025-08-22 15:15:07] [Rank 0] step:9561/10000 train_time:863158ms step_avg:90.28ms +[2025-08-22 15:15:07] [Rank 0] step:9561/10000 train_time:863158ms step_avg:90.28ms +[2025-08-22 15:15:09] [Rank 0] step:9581/10000 train_time:865063ms step_avg:90.29ms +[2025-08-22 15:15:09] [Rank 0] step:9581/10000 train_time:865063ms step_avg:90.29ms +[2025-08-22 15:15:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:15:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:15:24] [Rank 0] PRINT: step:9600/10000 val_loss:3.6103 svd_entropy: attn_qk:H=0.7776,top10E=0.23,eRank=202.3,q75/q25=80.11 attn_vo:H=0.8058,top10E=0.07,eRank=333.4,q75/q25=inf mlp_w1:H=0.8769,top10E=0.14,eRank=346.6,q75/q25=7.80 mlp_w2:H=0.9311,top10E=0.10,eRank=487.3,q75/q25=4.52 vo_prod:H=0.6598,top10E=0.13,eRank=176.4,q75/q25=inf train_time:867077ms step_avg:90.32ms +[2025-08-22 15:15:24] [Rank 0] PRINT: step:9600/10000 val_loss:3.6103 svd_entropy: attn_qk:H=0.7776,top10E=0.23,eRank=202.3,q75/q25=80.11 attn_vo:H=0.8058,top10E=0.07,eRank=333.4,q75/q25=inf mlp_w1:H=0.8769,top10E=0.14,eRank=346.6,q75/q25=7.80 mlp_w2:H=0.9311,top10E=0.10,eRank=487.3,q75/q25=4.52 vo_prod:H=0.6598,top10E=0.13,eRank=176.4,q75/q25=inf train_time:867077ms step_avg:90.32ms +[2025-08-22 15:15:24] [Rank 0] step:9601/10000 train_time:867092ms step_avg:90.31ms +[2025-08-22 15:15:24] [Rank 0] step:9601/10000 train_time:867092ms step_avg:90.31ms +[2025-08-22 15:15:26] [Rank 0] step:9621/10000 train_time:868899ms step_avg:90.31ms +[2025-08-22 15:15:26] [Rank 0] step:9621/10000 train_time:868899ms step_avg:90.31ms +[2025-08-22 15:15:28] [Rank 0] step:9641/10000 train_time:870805ms step_avg:90.32ms +[2025-08-22 15:15:28] [Rank 0] step:9641/10000 train_time:870805ms step_avg:90.32ms +[2025-08-22 15:15:30] [Rank 0] step:9661/10000 train_time:872741ms step_avg:90.34ms +[2025-08-22 15:15:30] [Rank 0] step:9661/10000 train_time:872741ms step_avg:90.34ms +[2025-08-22 15:15:32] [Rank 0] step:9681/10000 train_time:874665ms step_avg:90.35ms +[2025-08-22 15:15:32] [Rank 0] step:9681/10000 train_time:874665ms step_avg:90.35ms +[2025-08-22 15:15:34] [Rank 0] step:9701/10000 train_time:876610ms step_avg:90.36ms +[2025-08-22 15:15:34] [Rank 0] step:9701/10000 train_time:876610ms step_avg:90.36ms +[2025-08-22 15:15:36] [Rank 0] step:9721/10000 train_time:878533ms step_avg:90.37ms +[2025-08-22 15:15:36] [Rank 0] step:9721/10000 train_time:878533ms step_avg:90.37ms +[2025-08-22 15:15:38] [Rank 0] step:9741/10000 train_time:880486ms step_avg:90.39ms +[2025-08-22 15:15:38] [Rank 0] step:9741/10000 train_time:880486ms step_avg:90.39ms +[2025-08-22 15:15:40] [Rank 0] step:9761/10000 train_time:882423ms step_avg:90.40ms +[2025-08-22 15:15:40] [Rank 0] step:9761/10000 train_time:882423ms step_avg:90.40ms +[2025-08-22 15:15:42] [Rank 0] step:9781/10000 train_time:884365ms step_avg:90.42ms +[2025-08-22 15:15:42] [Rank 0] step:9781/10000 train_time:884365ms step_avg:90.42ms +[2025-08-22 15:15:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:15:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:15:57] [Rank 0] PRINT: step:9800/10000 val_loss:3.6030 svd_entropy: attn_qk:H=0.7777,top10E=0.23,eRank=202.4,q75/q25=80.13 attn_vo:H=0.8059,top10E=0.07,eRank=333.6,q75/q25=inf mlp_w1:H=0.8771,top10E=0.14,eRank=347.0,q75/q25=7.79 mlp_w2:H=0.9312,top10E=0.10,eRank=487.6,q75/q25=4.52 vo_prod:H=0.6600,top10E=0.13,eRank=176.6,q75/q25=inf train_time:886414ms step_avg:90.45ms +[2025-08-22 15:15:57] [Rank 0] PRINT: step:9800/10000 val_loss:3.6030 svd_entropy: attn_qk:H=0.7777,top10E=0.23,eRank=202.4,q75/q25=80.13 attn_vo:H=0.8059,top10E=0.07,eRank=333.6,q75/q25=inf mlp_w1:H=0.8771,top10E=0.14,eRank=347.0,q75/q25=7.79 mlp_w2:H=0.9312,top10E=0.10,eRank=487.6,q75/q25=4.52 vo_prod:H=0.6600,top10E=0.13,eRank=176.6,q75/q25=inf train_time:886414ms step_avg:90.45ms +[2025-08-22 15:15:57] [Rank 0] step:9801/10000 train_time:886429ms step_avg:90.44ms +[2025-08-22 15:15:57] [Rank 0] step:9801/10000 train_time:886429ms step_avg:90.44ms +[2025-08-22 15:15:59] [Rank 0] step:9821/10000 train_time:888267ms step_avg:90.45ms +[2025-08-22 15:15:59] [Rank 0] step:9821/10000 train_time:888267ms step_avg:90.45ms +[2025-08-22 15:16:01] [Rank 0] step:9841/10000 train_time:890211ms step_avg:90.46ms +[2025-08-22 15:16:01] [Rank 0] step:9841/10000 train_time:890211ms step_avg:90.46ms +[2025-08-22 15:16:03] [Rank 0] step:9861/10000 train_time:892133ms step_avg:90.47ms +[2025-08-22 15:16:03] [Rank 0] step:9861/10000 train_time:892133ms step_avg:90.47ms +[2025-08-22 15:16:05] [Rank 0] step:9881/10000 train_time:894058ms step_avg:90.48ms +[2025-08-22 15:16:05] [Rank 0] step:9881/10000 train_time:894058ms step_avg:90.48ms +[2025-08-22 15:16:07] [Rank 0] step:9901/10000 train_time:895997ms step_avg:90.50ms +[2025-08-22 15:16:07] [Rank 0] step:9901/10000 train_time:895997ms step_avg:90.50ms +[2025-08-22 15:16:09] [Rank 0] step:9921/10000 train_time:897930ms step_avg:90.51ms +[2025-08-22 15:16:09] [Rank 0] step:9921/10000 train_time:897930ms step_avg:90.51ms +[2025-08-22 15:16:11] [Rank 0] step:9941/10000 train_time:899868ms step_avg:90.52ms +[2025-08-22 15:16:11] [Rank 0] step:9941/10000 train_time:899868ms step_avg:90.52ms +[2025-08-22 15:16:13] [Rank 0] step:9961/10000 train_time:901799ms step_avg:90.53ms +[2025-08-22 15:16:13] [Rank 0] step:9961/10000 train_time:901799ms step_avg:90.53ms +[2025-08-22 15:16:15] [Rank 0] step:9981/10000 train_time:903737ms step_avg:90.55ms +[2025-08-22 15:16:15] [Rank 0] step:9981/10000 train_time:903737ms step_avg:90.55ms +[2025-08-22 15:16:17] [Rank 0] step:10000/10000 train_time:905579ms step_avg:90.56ms +[2025-08-22 15:16:17] [Rank 0] step:10000/10000 train_time:905579ms step_avg:90.56ms +[2025-08-22 15:16:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:16:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:16:30] [Rank 0] PRINT: step:10000/10000 val_loss:3.5951 svd_entropy: attn_qk:H=0.7778,top10E=0.23,eRank=202.5,q75/q25=80.13 attn_vo:H=0.8059,top10E=0.07,eRank=333.8,q75/q25=inf mlp_w1:H=0.8772,top10E=0.14,eRank=347.3,q75/q25=7.78 mlp_w2:H=0.9312,top10E=0.10,eRank=487.8,q75/q25=4.52 vo_prod:H=0.6601,top10E=0.13,eRank=176.8,q75/q25=inf train_time:905779ms step_avg:90.58ms +[2025-08-22 15:16:30] [Rank 0] PRINT: step:10000/10000 val_loss:3.5951 svd_entropy: attn_qk:H=0.7778,top10E=0.23,eRank=202.5,q75/q25=80.13 attn_vo:H=0.8059,top10E=0.07,eRank=333.8,q75/q25=inf mlp_w1:H=0.8772,top10E=0.14,eRank=347.3,q75/q25=7.78 mlp_w2:H=0.9312,top10E=0.10,eRank=487.8,q75/q25=4.52 vo_prod:H=0.6601,top10E=0.13,eRank=176.8,q75/q25=inf train_time:905779ms step_avg:90.58ms +[2025-08-22 15:16:30] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 15:16:30 2025 --- +[2025-08-22 15:16:30] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 15:16:30 2025 --- +[2025-08-22 15:16:30] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15516 MiB +[2025-08-22 15:16:30] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15516 MiB diff --git a/logs_svd_gated/mode_2_param_gated_seed_43/config.json b/logs_svd_gated/mode_2_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7ce604c8e0ee5e084c40c155c6c06175e3afd872 --- /dev/null +++ b/logs_svd_gated/mode_2_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 2, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "3571aa52-76a2-4ef1-9971-b3c8b0bdc2b9", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_2_param_gated_seed_43/training_log_3571aa52-76a2-4ef1-9971-b3c8b0bdc2b9.txt b/logs_svd_gated/mode_2_param_gated_seed_43/training_log_3571aa52-76a2-4ef1-9971-b3c8b0bdc2b9.txt new file mode 100644 index 0000000000000000000000000000000000000000..04901dd24b197661f82241c833c3fde7e8ea868b --- /dev/null +++ b/logs_svd_gated/mode_2_param_gated_seed_43/training_log_3571aa52-76a2-4ef1-9971-b3c8b0bdc2b9.txt @@ -0,0 +1,2926 @@ +[2025-08-22 20:02:41] [Rank 0] PRINT: --- Script Start: Fri Aug 22 20:02:41 2025 --- +[2025-08-22 20:02:41] [Rank 0] PRINT: --- Script Start: Fri Aug 22 20:02:41 2025 --- +[2025-08-22 20:02:41] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=2, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 20:02:41] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=2, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 20:02:41] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 20:02:41] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 20:02:41] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 20:02:41] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 20:02:41] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_2_param_gated_seed_43 +[2025-08-22 20:02:41] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_2_param_gated_seed_43 +[2025-08-22 20:02:41] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 20:02:41] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 20:02:41] [Rank 0] PRINT: Constructing model... +[2025-08-22 20:02:41] [Rank 0] PRINT: Constructing model... +[2025-08-22 20:02:43] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 20:02:43] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 20:02:43] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 20:02:43] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 20:02:43] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 20:02:43] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 20:02:43] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-08-22 20:02:43] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 2 +[2025-08-22 20:02:43] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.05). +[2025-08-22 20:02:43] [Rank 0] PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: 0.05). +[2025-08-22 20:02:43] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 20:02:43] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 20:02:43] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 20:02:43] [Rank 0] PRINT: Muon optimizer is active with 22 parameters. +[2025-08-22 20:02:43] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 20:02:43] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 20:02:43] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 20:02:43] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 20:02:43] [Rank 0] PRINT: Starting warmup... +[2025-08-22 20:02:43] [Rank 0] PRINT: Starting warmup... +[2025-08-22 20:03:27] [Rank 0] PRINT: Warmup complete. +[2025-08-22 20:03:27] [Rank 0] PRINT: Warmup complete. +[2025-08-22 20:03:27] [Rank 0] PRINT: Starting training... +[2025-08-22 20:03:27] [Rank 0] PRINT: Starting training... +[2025-08-22 20:03:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:03:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:03:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 20:03:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 20:03:46] [Rank 0] step:21/10000 train_time:1640ms step_avg:78.11ms +[2025-08-22 20:03:46] [Rank 0] step:21/10000 train_time:1640ms step_avg:78.11ms +[2025-08-22 20:03:48] [Rank 0] step:41/10000 train_time:3310ms step_avg:80.73ms +[2025-08-22 20:03:48] [Rank 0] step:41/10000 train_time:3310ms step_avg:80.73ms +[2025-08-22 20:03:50] [Rank 0] step:61/10000 train_time:4984ms step_avg:81.71ms +[2025-08-22 20:03:50] [Rank 0] step:61/10000 train_time:4984ms step_avg:81.71ms +[2025-08-22 20:03:51] [Rank 0] step:81/10000 train_time:6658ms step_avg:82.20ms +[2025-08-22 20:03:51] [Rank 0] step:81/10000 train_time:6658ms step_avg:82.20ms +[2025-08-22 20:03:53] [Rank 0] step:101/10000 train_time:8333ms step_avg:82.51ms +[2025-08-22 20:03:53] [Rank 0] step:101/10000 train_time:8333ms step_avg:82.51ms +[2025-08-22 20:03:55] [Rank 0] step:121/10000 train_time:10008ms step_avg:82.71ms +[2025-08-22 20:03:55] [Rank 0] step:121/10000 train_time:10008ms step_avg:82.71ms +[2025-08-22 20:03:56] [Rank 0] step:141/10000 train_time:11686ms step_avg:82.88ms +[2025-08-22 20:03:56] [Rank 0] step:141/10000 train_time:11686ms step_avg:82.88ms +[2025-08-22 20:03:58] [Rank 0] step:161/10000 train_time:13365ms step_avg:83.01ms +[2025-08-22 20:03:58] [Rank 0] step:161/10000 train_time:13365ms step_avg:83.01ms +[2025-08-22 20:04:00] [Rank 0] step:181/10000 train_time:15048ms step_avg:83.14ms +[2025-08-22 20:04:00] [Rank 0] step:181/10000 train_time:15048ms step_avg:83.14ms +[2025-08-22 20:04:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:04:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:04:15] [Rank 0] PRINT: step:200/10000 val_loss:6.6418 svd_entropy: attn_qk:H=0.5429,top10E=0.62,eRank=108.0,q75/q25=26.38 attn_vo:H=0.7204,top10E=0.20,eRank=193.5,q75/q25=inf mlp_w1:H=0.3725,top10E=0.86,eRank=15.8,q75/q25=5.24 mlp_w2:H=0.3786,top10E=0.84,eRank=13.4,q75/q25=7.91 vo_prod:H=0.4374,top10E=0.51,eRank=34.8,q75/q25=inf train_time:16814ms step_avg:84.07ms +[2025-08-22 20:04:15] [Rank 0] PRINT: step:200/10000 val_loss:6.6418 svd_entropy: attn_qk:H=0.5429,top10E=0.62,eRank=108.0,q75/q25=26.38 attn_vo:H=0.7204,top10E=0.20,eRank=193.5,q75/q25=inf mlp_w1:H=0.3725,top10E=0.86,eRank=15.8,q75/q25=5.24 mlp_w2:H=0.3786,top10E=0.84,eRank=13.4,q75/q25=7.91 vo_prod:H=0.4374,top10E=0.51,eRank=34.8,q75/q25=inf train_time:16814ms step_avg:84.07ms +[2025-08-22 20:04:15] [Rank 0] step:201/10000 train_time:16829ms step_avg:83.73ms +[2025-08-22 20:04:15] [Rank 0] step:201/10000 train_time:16829ms step_avg:83.73ms +[2025-08-22 20:04:17] [Rank 0] step:221/10000 train_time:18431ms step_avg:83.40ms +[2025-08-22 20:04:17] [Rank 0] step:221/10000 train_time:18431ms step_avg:83.40ms +[2025-08-22 20:04:19] [Rank 0] step:241/10000 train_time:20107ms step_avg:83.43ms +[2025-08-22 20:04:19] [Rank 0] step:241/10000 train_time:20107ms step_avg:83.43ms +[2025-08-22 20:04:20] [Rank 0] step:261/10000 train_time:21783ms step_avg:83.46ms +[2025-08-22 20:04:20] [Rank 0] step:261/10000 train_time:21783ms step_avg:83.46ms +[2025-08-22 20:04:22] [Rank 0] step:281/10000 train_time:23458ms step_avg:83.48ms +[2025-08-22 20:04:22] [Rank 0] step:281/10000 train_time:23458ms step_avg:83.48ms +[2025-08-22 20:04:24] [Rank 0] step:301/10000 train_time:25138ms step_avg:83.52ms +[2025-08-22 20:04:24] [Rank 0] step:301/10000 train_time:25138ms step_avg:83.52ms +[2025-08-22 20:04:25] [Rank 0] step:321/10000 train_time:26818ms step_avg:83.55ms +[2025-08-22 20:04:25] [Rank 0] step:321/10000 train_time:26818ms step_avg:83.55ms +[2025-08-22 20:04:27] [Rank 0] step:341/10000 train_time:28497ms step_avg:83.57ms +[2025-08-22 20:04:27] [Rank 0] step:341/10000 train_time:28497ms step_avg:83.57ms +[2025-08-22 20:04:29] [Rank 0] step:361/10000 train_time:30177ms step_avg:83.59ms +[2025-08-22 20:04:29] [Rank 0] step:361/10000 train_time:30177ms step_avg:83.59ms +[2025-08-22 20:04:30] [Rank 0] step:381/10000 train_time:31857ms step_avg:83.61ms +[2025-08-22 20:04:30] [Rank 0] step:381/10000 train_time:31857ms step_avg:83.61ms +[2025-08-22 20:04:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:04:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:04:46] [Rank 0] PRINT: step:400/10000 val_loss:5.9305 svd_entropy: attn_qk:H=0.6220,top10E=0.49,eRank=120.2,q75/q25=47.17 attn_vo:H=0.6731,top10E=0.23,eRank=157.6,q75/q25=inf mlp_w1:H=0.5682,top10E=0.58,eRank=51.1,q75/q25=8.14 mlp_w2:H=0.5575,top10E=0.59,eRank=45.5,q75/q25=9.11 vo_prod:H=0.4423,top10E=0.47,eRank=42.8,q75/q25=inf train_time:33618ms step_avg:84.05ms +[2025-08-22 20:04:46] [Rank 0] PRINT: step:400/10000 val_loss:5.9305 svd_entropy: attn_qk:H=0.6220,top10E=0.49,eRank=120.2,q75/q25=47.17 attn_vo:H=0.6731,top10E=0.23,eRank=157.6,q75/q25=inf mlp_w1:H=0.5682,top10E=0.58,eRank=51.1,q75/q25=8.14 mlp_w2:H=0.5575,top10E=0.59,eRank=45.5,q75/q25=9.11 vo_prod:H=0.4423,top10E=0.47,eRank=42.8,q75/q25=inf train_time:33618ms step_avg:84.05ms +[2025-08-22 20:04:46] [Rank 0] step:401/10000 train_time:33634ms step_avg:83.87ms +[2025-08-22 20:04:46] [Rank 0] step:401/10000 train_time:33634ms step_avg:83.87ms +[2025-08-22 20:04:48] [Rank 0] step:421/10000 train_time:35237ms step_avg:83.70ms +[2025-08-22 20:04:48] [Rank 0] step:421/10000 train_time:35237ms step_avg:83.70ms +[2025-08-22 20:04:49] [Rank 0] step:441/10000 train_time:36909ms step_avg:83.69ms +[2025-08-22 20:04:49] [Rank 0] step:441/10000 train_time:36909ms step_avg:83.69ms +[2025-08-22 20:04:51] [Rank 0] step:461/10000 train_time:38581ms step_avg:83.69ms +[2025-08-22 20:04:51] [Rank 0] step:461/10000 train_time:38581ms step_avg:83.69ms +[2025-08-22 20:04:53] [Rank 0] step:481/10000 train_time:40254ms step_avg:83.69ms +[2025-08-22 20:04:53] [Rank 0] step:481/10000 train_time:40254ms step_avg:83.69ms +[2025-08-22 20:04:54] [Rank 0] step:501/10000 train_time:41925ms step_avg:83.68ms +[2025-08-22 20:04:54] [Rank 0] step:501/10000 train_time:41925ms step_avg:83.68ms +[2025-08-22 20:04:56] [Rank 0] step:521/10000 train_time:43597ms step_avg:83.68ms +[2025-08-22 20:04:56] [Rank 0] step:521/10000 train_time:43597ms step_avg:83.68ms +[2025-08-22 20:04:58] [Rank 0] step:541/10000 train_time:45268ms step_avg:83.68ms +[2025-08-22 20:04:58] [Rank 0] step:541/10000 train_time:45268ms step_avg:83.68ms +[2025-08-22 20:04:59] [Rank 0] step:561/10000 train_time:46941ms step_avg:83.67ms +[2025-08-22 20:04:59] [Rank 0] step:561/10000 train_time:46941ms step_avg:83.67ms +[2025-08-22 20:05:01] [Rank 0] step:581/10000 train_time:48614ms step_avg:83.67ms +[2025-08-22 20:05:01] [Rank 0] step:581/10000 train_time:48614ms step_avg:83.67ms +[2025-08-22 20:05:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:05:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:05:16] [Rank 0] PRINT: step:600/10000 val_loss:5.5492 svd_entropy: attn_qk:H=0.6454,top10E=0.45,eRank=125.7,q75/q25=45.55 attn_vo:H=0.6672,top10E=0.21,eRank=152.5,q75/q25=inf mlp_w1:H=0.6503,top10E=0.43,eRank=84.9,q75/q25=6.66 mlp_w2:H=0.6781,top10E=0.41,eRank=97.8,q75/q25=8.99 vo_prod:H=0.4726,top10E=0.40,eRank=49.9,q75/q25=inf train_time:50370ms step_avg:83.95ms +[2025-08-22 20:05:16] [Rank 0] PRINT: step:600/10000 val_loss:5.5492 svd_entropy: attn_qk:H=0.6454,top10E=0.45,eRank=125.7,q75/q25=45.55 attn_vo:H=0.6672,top10E=0.21,eRank=152.5,q75/q25=inf mlp_w1:H=0.6503,top10E=0.43,eRank=84.9,q75/q25=6.66 mlp_w2:H=0.6781,top10E=0.41,eRank=97.8,q75/q25=8.99 vo_prod:H=0.4726,top10E=0.40,eRank=49.9,q75/q25=inf train_time:50370ms step_avg:83.95ms +[2025-08-22 20:05:16] [Rank 0] step:601/10000 train_time:50384ms step_avg:83.83ms +[2025-08-22 20:05:16] [Rank 0] step:601/10000 train_time:50384ms step_avg:83.83ms +[2025-08-22 20:05:18] [Rank 0] step:621/10000 train_time:51977ms step_avg:83.70ms +[2025-08-22 20:05:18] [Rank 0] step:621/10000 train_time:51977ms step_avg:83.70ms +[2025-08-22 20:05:20] [Rank 0] step:641/10000 train_time:53648ms step_avg:83.69ms +[2025-08-22 20:05:20] [Rank 0] step:641/10000 train_time:53648ms step_avg:83.69ms +[2025-08-22 20:05:22] [Rank 0] step:661/10000 train_time:55320ms step_avg:83.69ms +[2025-08-22 20:05:22] [Rank 0] step:661/10000 train_time:55320ms step_avg:83.69ms +[2025-08-22 20:05:23] [Rank 0] step:681/10000 train_time:56989ms step_avg:83.68ms +[2025-08-22 20:05:23] [Rank 0] step:681/10000 train_time:56989ms step_avg:83.68ms +[2025-08-22 20:05:25] [Rank 0] step:701/10000 train_time:58662ms step_avg:83.68ms +[2025-08-22 20:05:25] [Rank 0] step:701/10000 train_time:58662ms step_avg:83.68ms +[2025-08-22 20:05:27] [Rank 0] step:721/10000 train_time:60335ms step_avg:83.68ms +[2025-08-22 20:05:27] [Rank 0] step:721/10000 train_time:60335ms step_avg:83.68ms +[2025-08-22 20:05:28] [Rank 0] step:741/10000 train_time:62007ms step_avg:83.68ms +[2025-08-22 20:05:28] [Rank 0] step:741/10000 train_time:62007ms step_avg:83.68ms +[2025-08-22 20:05:30] [Rank 0] step:761/10000 train_time:63694ms step_avg:83.70ms +[2025-08-22 20:05:30] [Rank 0] step:761/10000 train_time:63694ms step_avg:83.70ms +[2025-08-22 20:05:32] [Rank 0] step:781/10000 train_time:65382ms step_avg:83.72ms +[2025-08-22 20:05:32] [Rank 0] step:781/10000 train_time:65382ms step_avg:83.72ms +[2025-08-22 20:05:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:05:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:05:47] [Rank 0] PRINT: step:800/10000 val_loss:5.2785 svd_entropy: attn_qk:H=0.6612,top10E=0.42,eRank=130.5,q75/q25=44.92 attn_vo:H=0.6767,top10E=0.18,eRank=158.8,q75/q25=inf mlp_w1:H=0.6902,top10E=0.37,eRank=108.2,q75/q25=7.00 mlp_w2:H=0.7428,top10E=0.32,eRank=145.0,q75/q25=9.61 vo_prod:H=0.4963,top10E=0.34,eRank=56.8,q75/q25=inf train_time:67155ms step_avg:83.94ms +[2025-08-22 20:05:47] [Rank 0] PRINT: step:800/10000 val_loss:5.2785 svd_entropy: attn_qk:H=0.6612,top10E=0.42,eRank=130.5,q75/q25=44.92 attn_vo:H=0.6767,top10E=0.18,eRank=158.8,q75/q25=inf mlp_w1:H=0.6902,top10E=0.37,eRank=108.2,q75/q25=7.00 mlp_w2:H=0.7428,top10E=0.32,eRank=145.0,q75/q25=9.61 vo_prod:H=0.4963,top10E=0.34,eRank=56.8,q75/q25=inf train_time:67155ms step_avg:83.94ms +[2025-08-22 20:05:47] [Rank 0] step:801/10000 train_time:67170ms step_avg:83.86ms +[2025-08-22 20:05:47] [Rank 0] step:801/10000 train_time:67170ms step_avg:83.86ms +[2025-08-22 20:05:49] [Rank 0] step:821/10000 train_time:68785ms step_avg:83.78ms +[2025-08-22 20:05:49] [Rank 0] step:821/10000 train_time:68785ms step_avg:83.78ms +[2025-08-22 20:05:50] [Rank 0] step:841/10000 train_time:70471ms step_avg:83.79ms +[2025-08-22 20:05:50] [Rank 0] step:841/10000 train_time:70471ms step_avg:83.79ms +[2025-08-22 20:05:52] [Rank 0] step:861/10000 train_time:72157ms step_avg:83.81ms +[2025-08-22 20:05:52] [Rank 0] step:861/10000 train_time:72157ms step_avg:83.81ms +[2025-08-22 20:05:54] [Rank 0] step:881/10000 train_time:73844ms step_avg:83.82ms +[2025-08-22 20:05:54] [Rank 0] step:881/10000 train_time:73844ms step_avg:83.82ms +[2025-08-22 20:05:56] [Rank 0] step:901/10000 train_time:75533ms step_avg:83.83ms +[2025-08-22 20:05:56] [Rank 0] step:901/10000 train_time:75533ms step_avg:83.83ms +[2025-08-22 20:05:57] [Rank 0] step:921/10000 train_time:77222ms step_avg:83.85ms +[2025-08-22 20:05:57] [Rank 0] step:921/10000 train_time:77222ms step_avg:83.85ms +[2025-08-22 20:05:59] [Rank 0] step:941/10000 train_time:78911ms step_avg:83.86ms +[2025-08-22 20:05:59] [Rank 0] step:941/10000 train_time:78911ms step_avg:83.86ms +[2025-08-22 20:06:01] [Rank 0] step:961/10000 train_time:80602ms step_avg:83.87ms +[2025-08-22 20:06:01] [Rank 0] step:961/10000 train_time:80602ms step_avg:83.87ms +[2025-08-22 20:06:02] [Rank 0] step:981/10000 train_time:82294ms step_avg:83.89ms +[2025-08-22 20:06:02] [Rank 0] step:981/10000 train_time:82294ms step_avg:83.89ms +[2025-08-22 20:06:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:06:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:06:18] [Rank 0] PRINT: step:1000/10000 val_loss:5.1152 svd_entropy: attn_qk:H=0.6741,top10E=0.40,eRank=135.3,q75/q25=47.15 attn_vo:H=0.6895,top10E=0.17,eRank=168.5,q75/q25=inf mlp_w1:H=0.7164,top10E=0.32,eRank=127.1,q75/q25=7.58 mlp_w2:H=0.7840,top10E=0.26,eRank=186.8,q75/q25=10.25 vo_prod:H=0.5125,top10E=0.31,eRank=62.7,q75/q25=inf train_time:84068ms step_avg:84.07ms +[2025-08-22 20:06:18] [Rank 0] PRINT: step:1000/10000 val_loss:5.1152 svd_entropy: attn_qk:H=0.6741,top10E=0.40,eRank=135.3,q75/q25=47.15 attn_vo:H=0.6895,top10E=0.17,eRank=168.5,q75/q25=inf mlp_w1:H=0.7164,top10E=0.32,eRank=127.1,q75/q25=7.58 mlp_w2:H=0.7840,top10E=0.26,eRank=186.8,q75/q25=10.25 vo_prod:H=0.5125,top10E=0.31,eRank=62.7,q75/q25=inf train_time:84068ms step_avg:84.07ms +[2025-08-22 20:06:18] [Rank 0] step:1001/10000 train_time:84082ms step_avg:84.00ms +[2025-08-22 20:06:18] [Rank 0] step:1001/10000 train_time:84082ms step_avg:84.00ms +[2025-08-22 20:06:19] [Rank 0] step:1021/10000 train_time:85696ms step_avg:83.93ms +[2025-08-22 20:06:19] [Rank 0] step:1021/10000 train_time:85696ms step_avg:83.93ms +[2025-08-22 20:06:21] [Rank 0] step:1041/10000 train_time:87379ms step_avg:83.94ms +[2025-08-22 20:06:21] [Rank 0] step:1041/10000 train_time:87379ms step_avg:83.94ms +[2025-08-22 20:06:23] [Rank 0] step:1061/10000 train_time:89062ms step_avg:83.94ms +[2025-08-22 20:06:23] [Rank 0] step:1061/10000 train_time:89062ms step_avg:83.94ms +[2025-08-22 20:06:25] [Rank 0] step:1081/10000 train_time:90748ms step_avg:83.95ms +[2025-08-22 20:06:25] [Rank 0] step:1081/10000 train_time:90748ms step_avg:83.95ms +[2025-08-22 20:06:26] [Rank 0] step:1101/10000 train_time:92432ms step_avg:83.95ms +[2025-08-22 20:06:26] [Rank 0] step:1101/10000 train_time:92432ms step_avg:83.95ms +[2025-08-22 20:06:28] [Rank 0] step:1121/10000 train_time:94118ms step_avg:83.96ms +[2025-08-22 20:06:28] [Rank 0] step:1121/10000 train_time:94118ms step_avg:83.96ms +[2025-08-22 20:06:30] [Rank 0] step:1141/10000 train_time:95803ms step_avg:83.96ms +[2025-08-22 20:06:30] [Rank 0] step:1141/10000 train_time:95803ms step_avg:83.96ms +[2025-08-22 20:06:31] [Rank 0] step:1161/10000 train_time:97491ms step_avg:83.97ms +[2025-08-22 20:06:31] [Rank 0] step:1161/10000 train_time:97491ms step_avg:83.97ms +[2025-08-22 20:06:33] [Rank 0] step:1181/10000 train_time:99176ms step_avg:83.98ms +[2025-08-22 20:06:33] [Rank 0] step:1181/10000 train_time:99176ms step_avg:83.98ms +[2025-08-22 20:06:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:06:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:06:48] [Rank 0] PRINT: step:1200/10000 val_loss:4.9840 svd_entropy: attn_qk:H=0.6854,top10E=0.38,eRank=140.0,q75/q25=51.02 attn_vo:H=0.7034,top10E=0.15,eRank=180.5,q75/q25=inf mlp_w1:H=0.7363,top10E=0.29,eRank=143.6,q75/q25=8.26 mlp_w2:H=0.8115,top10E=0.22,eRank=222.2,q75/q25=10.62 vo_prod:H=0.5276,top10E=0.28,eRank=69.2,q75/q25=inf train_time:100945ms step_avg:84.12ms +[2025-08-22 20:06:48] [Rank 0] PRINT: step:1200/10000 val_loss:4.9840 svd_entropy: attn_qk:H=0.6854,top10E=0.38,eRank=140.0,q75/q25=51.02 attn_vo:H=0.7034,top10E=0.15,eRank=180.5,q75/q25=inf mlp_w1:H=0.7363,top10E=0.29,eRank=143.6,q75/q25=8.26 mlp_w2:H=0.8115,top10E=0.22,eRank=222.2,q75/q25=10.62 vo_prod:H=0.5276,top10E=0.28,eRank=69.2,q75/q25=inf train_time:100945ms step_avg:84.12ms +[2025-08-22 20:06:48] [Rank 0] step:1201/10000 train_time:100961ms step_avg:84.06ms +[2025-08-22 20:06:48] [Rank 0] step:1201/10000 train_time:100961ms step_avg:84.06ms +[2025-08-22 20:06:50] [Rank 0] step:1221/10000 train_time:102614ms step_avg:84.04ms +[2025-08-22 20:06:50] [Rank 0] step:1221/10000 train_time:102614ms step_avg:84.04ms +[2025-08-22 20:06:52] [Rank 0] step:1241/10000 train_time:104296ms step_avg:84.04ms +[2025-08-22 20:06:52] [Rank 0] step:1241/10000 train_time:104296ms step_avg:84.04ms +[2025-08-22 20:06:54] [Rank 0] step:1261/10000 train_time:105980ms step_avg:84.04ms +[2025-08-22 20:06:54] [Rank 0] step:1261/10000 train_time:105980ms step_avg:84.04ms +[2025-08-22 20:06:55] [Rank 0] step:1281/10000 train_time:107662ms step_avg:84.05ms +[2025-08-22 20:06:55] [Rank 0] step:1281/10000 train_time:107662ms step_avg:84.05ms +[2025-08-22 20:06:57] [Rank 0] step:1301/10000 train_time:109346ms step_avg:84.05ms +[2025-08-22 20:06:57] [Rank 0] step:1301/10000 train_time:109346ms step_avg:84.05ms +[2025-08-22 20:06:59] [Rank 0] step:1321/10000 train_time:111030ms step_avg:84.05ms +[2025-08-22 20:06:59] [Rank 0] step:1321/10000 train_time:111030ms step_avg:84.05ms +[2025-08-22 20:07:00] [Rank 0] step:1341/10000 train_time:112714ms step_avg:84.05ms +[2025-08-22 20:07:00] [Rank 0] step:1341/10000 train_time:112714ms step_avg:84.05ms +[2025-08-22 20:07:02] [Rank 0] step:1361/10000 train_time:114400ms step_avg:84.06ms +[2025-08-22 20:07:02] [Rank 0] step:1361/10000 train_time:114400ms step_avg:84.06ms +[2025-08-22 20:07:04] [Rank 0] step:1381/10000 train_time:116087ms step_avg:84.06ms +[2025-08-22 20:07:04] [Rank 0] step:1381/10000 train_time:116087ms step_avg:84.06ms +[2025-08-22 20:07:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:07:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:07:19] [Rank 0] PRINT: step:1400/10000 val_loss:4.9010 svd_entropy: attn_qk:H=0.6947,top10E=0.36,eRank=144.2,q75/q25=55.44 attn_vo:H=0.7170,top10E=0.14,eRank=193.7,q75/q25=inf mlp_w1:H=0.7520,top10E=0.27,eRank=158.1,q75/q25=8.95 mlp_w2:H=0.8308,top10E=0.20,eRank=251.7,q75/q25=10.70 vo_prod:H=0.5428,top10E=0.26,eRank=76.3,q75/q25=inf train_time:117859ms step_avg:84.19ms +[2025-08-22 20:07:19] [Rank 0] PRINT: step:1400/10000 val_loss:4.9010 svd_entropy: attn_qk:H=0.6947,top10E=0.36,eRank=144.2,q75/q25=55.44 attn_vo:H=0.7170,top10E=0.14,eRank=193.7,q75/q25=inf mlp_w1:H=0.7520,top10E=0.27,eRank=158.1,q75/q25=8.95 mlp_w2:H=0.8308,top10E=0.20,eRank=251.7,q75/q25=10.70 vo_prod:H=0.5428,top10E=0.26,eRank=76.3,q75/q25=inf train_time:117859ms step_avg:84.19ms +[2025-08-22 20:07:19] [Rank 0] step:1401/10000 train_time:117874ms step_avg:84.14ms +[2025-08-22 20:07:19] [Rank 0] step:1401/10000 train_time:117874ms step_avg:84.14ms +[2025-08-22 20:07:21] [Rank 0] step:1421/10000 train_time:119494ms step_avg:84.09ms +[2025-08-22 20:07:21] [Rank 0] step:1421/10000 train_time:119494ms step_avg:84.09ms +[2025-08-22 20:07:23] [Rank 0] step:1441/10000 train_time:121176ms step_avg:84.09ms +[2025-08-22 20:07:23] [Rank 0] step:1441/10000 train_time:121176ms step_avg:84.09ms +[2025-08-22 20:07:24] [Rank 0] step:1461/10000 train_time:122860ms step_avg:84.09ms +[2025-08-22 20:07:24] [Rank 0] step:1461/10000 train_time:122860ms step_avg:84.09ms +[2025-08-22 20:07:26] [Rank 0] step:1481/10000 train_time:124545ms step_avg:84.09ms +[2025-08-22 20:07:26] [Rank 0] step:1481/10000 train_time:124545ms step_avg:84.09ms +[2025-08-22 20:07:28] [Rank 0] step:1501/10000 train_time:126240ms step_avg:84.10ms +[2025-08-22 20:07:28] [Rank 0] step:1501/10000 train_time:126240ms step_avg:84.10ms +[2025-08-22 20:07:29] [Rank 0] step:1521/10000 train_time:127937ms step_avg:84.11ms +[2025-08-22 20:07:29] [Rank 0] step:1521/10000 train_time:127937ms step_avg:84.11ms +[2025-08-22 20:07:31] [Rank 0] step:1541/10000 train_time:129635ms step_avg:84.12ms +[2025-08-22 20:07:31] [Rank 0] step:1541/10000 train_time:129635ms step_avg:84.12ms +[2025-08-22 20:07:33] [Rank 0] step:1561/10000 train_time:131335ms step_avg:84.14ms +[2025-08-22 20:07:33] [Rank 0] step:1561/10000 train_time:131335ms step_avg:84.14ms +[2025-08-22 20:07:34] [Rank 0] step:1581/10000 train_time:133035ms step_avg:84.15ms +[2025-08-22 20:07:34] [Rank 0] step:1581/10000 train_time:133035ms step_avg:84.15ms +[2025-08-22 20:07:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:07:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:07:50] [Rank 0] PRINT: step:1600/10000 val_loss:4.8032 svd_entropy: attn_qk:H=0.7023,top10E=0.35,eRank=148.0,q75/q25=59.96 attn_vo:H=0.7291,top10E=0.13,eRank=206.7,q75/q25=inf mlp_w1:H=0.7650,top10E=0.26,eRank=171.2,q75/q25=9.52 mlp_w2:H=0.8455,top10E=0.18,eRank=276.9,q75/q25=10.54 vo_prod:H=0.5565,top10E=0.24,eRank=83.6,q75/q25=inf train_time:134819ms step_avg:84.26ms +[2025-08-22 20:07:50] [Rank 0] PRINT: step:1600/10000 val_loss:4.8032 svd_entropy: attn_qk:H=0.7023,top10E=0.35,eRank=148.0,q75/q25=59.96 attn_vo:H=0.7291,top10E=0.13,eRank=206.7,q75/q25=inf mlp_w1:H=0.7650,top10E=0.26,eRank=171.2,q75/q25=9.52 mlp_w2:H=0.8455,top10E=0.18,eRank=276.9,q75/q25=10.54 vo_prod:H=0.5565,top10E=0.24,eRank=83.6,q75/q25=inf train_time:134819ms step_avg:84.26ms +[2025-08-22 20:07:50] [Rank 0] step:1601/10000 train_time:134836ms step_avg:84.22ms +[2025-08-22 20:07:50] [Rank 0] step:1601/10000 train_time:134836ms step_avg:84.22ms +[2025-08-22 20:07:52] [Rank 0] step:1621/10000 train_time:136447ms step_avg:84.17ms +[2025-08-22 20:07:52] [Rank 0] step:1621/10000 train_time:136447ms step_avg:84.17ms +[2025-08-22 20:07:54] [Rank 0] step:1641/10000 train_time:138190ms step_avg:84.21ms +[2025-08-22 20:07:54] [Rank 0] step:1641/10000 train_time:138190ms step_avg:84.21ms +[2025-08-22 20:07:55] [Rank 0] step:1661/10000 train_time:139888ms step_avg:84.22ms +[2025-08-22 20:07:55] [Rank 0] step:1661/10000 train_time:139888ms step_avg:84.22ms +[2025-08-22 20:07:57] [Rank 0] step:1681/10000 train_time:141587ms step_avg:84.23ms +[2025-08-22 20:07:57] [Rank 0] step:1681/10000 train_time:141587ms step_avg:84.23ms +[2025-08-22 20:07:59] [Rank 0] step:1701/10000 train_time:143287ms step_avg:84.24ms +[2025-08-22 20:07:59] [Rank 0] step:1701/10000 train_time:143287ms step_avg:84.24ms +[2025-08-22 20:08:00] [Rank 0] step:1721/10000 train_time:144986ms step_avg:84.25ms +[2025-08-22 20:08:00] [Rank 0] step:1721/10000 train_time:144986ms step_avg:84.25ms +[2025-08-22 20:08:02] [Rank 0] step:1741/10000 train_time:146686ms step_avg:84.25ms +[2025-08-22 20:08:02] [Rank 0] step:1741/10000 train_time:146686ms step_avg:84.25ms +[2025-08-22 20:08:04] [Rank 0] step:1761/10000 train_time:148388ms step_avg:84.26ms +[2025-08-22 20:08:04] [Rank 0] step:1761/10000 train_time:148388ms step_avg:84.26ms +[2025-08-22 20:08:05] [Rank 0] step:1781/10000 train_time:150091ms step_avg:84.27ms +[2025-08-22 20:08:05] [Rank 0] step:1781/10000 train_time:150091ms step_avg:84.27ms +[2025-08-22 20:08:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:08:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:08:21] [Rank 0] PRINT: step:1800/10000 val_loss:4.7041 svd_entropy: attn_qk:H=0.7088,top10E=0.33,eRank=151.4,q75/q25=63.65 attn_vo:H=0.7395,top10E=0.12,eRank=219.2,q75/q25=inf mlp_w1:H=0.7761,top10E=0.24,eRank=183.1,q75/q25=10.02 mlp_w2:H=0.8573,top10E=0.17,eRank=299.3,q75/q25=10.22 vo_prod:H=0.5685,top10E=0.22,eRank=90.8,q75/q25=inf train_time:151879ms step_avg:84.38ms +[2025-08-22 20:08:21] [Rank 0] PRINT: step:1800/10000 val_loss:4.7041 svd_entropy: attn_qk:H=0.7088,top10E=0.33,eRank=151.4,q75/q25=63.65 attn_vo:H=0.7395,top10E=0.12,eRank=219.2,q75/q25=inf mlp_w1:H=0.7761,top10E=0.24,eRank=183.1,q75/q25=10.02 mlp_w2:H=0.8573,top10E=0.17,eRank=299.3,q75/q25=10.22 vo_prod:H=0.5685,top10E=0.22,eRank=90.8,q75/q25=inf train_time:151879ms step_avg:84.38ms +[2025-08-22 20:08:21] [Rank 0] step:1801/10000 train_time:151895ms step_avg:84.34ms +[2025-08-22 20:08:21] [Rank 0] step:1801/10000 train_time:151895ms step_avg:84.34ms +[2025-08-22 20:08:23] [Rank 0] step:1821/10000 train_time:153505ms step_avg:84.30ms +[2025-08-22 20:08:23] [Rank 0] step:1821/10000 train_time:153505ms step_avg:84.30ms +[2025-08-22 20:08:24] [Rank 0] step:1841/10000 train_time:155201ms step_avg:84.30ms +[2025-08-22 20:08:24] [Rank 0] step:1841/10000 train_time:155201ms step_avg:84.30ms +[2025-08-22 20:08:26] [Rank 0] step:1861/10000 train_time:156897ms step_avg:84.31ms +[2025-08-22 20:08:26] [Rank 0] step:1861/10000 train_time:156897ms step_avg:84.31ms +[2025-08-22 20:08:28] [Rank 0] step:1881/10000 train_time:158594ms step_avg:84.31ms +[2025-08-22 20:08:28] [Rank 0] step:1881/10000 train_time:158594ms step_avg:84.31ms +[2025-08-22 20:08:29] [Rank 0] step:1901/10000 train_time:160290ms step_avg:84.32ms +[2025-08-22 20:08:29] [Rank 0] step:1901/10000 train_time:160290ms step_avg:84.32ms +[2025-08-22 20:08:31] [Rank 0] step:1921/10000 train_time:161986ms step_avg:84.32ms +[2025-08-22 20:08:31] [Rank 0] step:1921/10000 train_time:161986ms step_avg:84.32ms +[2025-08-22 20:08:33] [Rank 0] step:1941/10000 train_time:163686ms step_avg:84.33ms +[2025-08-22 20:08:33] [Rank 0] step:1941/10000 train_time:163686ms step_avg:84.33ms +[2025-08-22 20:08:35] [Rank 0] step:1961/10000 train_time:165385ms step_avg:84.34ms +[2025-08-22 20:08:35] [Rank 0] step:1961/10000 train_time:165385ms step_avg:84.34ms +[2025-08-22 20:08:36] [Rank 0] step:1981/10000 train_time:167084ms step_avg:84.34ms +[2025-08-22 20:08:36] [Rank 0] step:1981/10000 train_time:167084ms step_avg:84.34ms +[2025-08-22 20:08:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:08:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:08:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.6364 svd_entropy: attn_qk:H=0.7146,top10E=0.32,eRank=154.6,q75/q25=67.32 attn_vo:H=0.7485,top10E=0.11,eRank=231.1,q75/q25=inf mlp_w1:H=0.7857,top10E=0.23,eRank=194.3,q75/q25=10.42 mlp_w2:H=0.8673,top10E=0.16,eRank=319.5,q75/q25=9.81 vo_prod:H=0.5788,top10E=0.21,eRank=97.5,q75/q25=inf train_time:168865ms step_avg:84.43ms +[2025-08-22 20:08:52] [Rank 0] PRINT: step:2000/10000 val_loss:4.6364 svd_entropy: attn_qk:H=0.7146,top10E=0.32,eRank=154.6,q75/q25=67.32 attn_vo:H=0.7485,top10E=0.11,eRank=231.1,q75/q25=inf mlp_w1:H=0.7857,top10E=0.23,eRank=194.3,q75/q25=10.42 mlp_w2:H=0.8673,top10E=0.16,eRank=319.5,q75/q25=9.81 vo_prod:H=0.5788,top10E=0.21,eRank=97.5,q75/q25=inf train_time:168865ms step_avg:84.43ms +[2025-08-22 20:08:52] [Rank 0] step:2001/10000 train_time:168882ms step_avg:84.40ms +[2025-08-22 20:08:52] [Rank 0] step:2001/10000 train_time:168882ms step_avg:84.40ms +[2025-08-22 20:08:54] [Rank 0] step:2021/10000 train_time:170579ms step_avg:84.40ms +[2025-08-22 20:08:54] [Rank 0] step:2021/10000 train_time:170579ms step_avg:84.40ms +[2025-08-22 20:08:55] [Rank 0] step:2041/10000 train_time:172227ms step_avg:84.38ms +[2025-08-22 20:08:55] [Rank 0] step:2041/10000 train_time:172227ms step_avg:84.38ms +[2025-08-22 20:08:57] [Rank 0] step:2061/10000 train_time:174029ms step_avg:84.44ms +[2025-08-22 20:08:57] [Rank 0] step:2061/10000 train_time:174029ms step_avg:84.44ms +[2025-08-22 20:08:59] [Rank 0] step:2081/10000 train_time:175724ms step_avg:84.44ms +[2025-08-22 20:08:59] [Rank 0] step:2081/10000 train_time:175724ms step_avg:84.44ms +[2025-08-22 20:09:00] [Rank 0] step:2101/10000 train_time:177418ms step_avg:84.44ms +[2025-08-22 20:09:00] [Rank 0] step:2101/10000 train_time:177418ms step_avg:84.44ms +[2025-08-22 20:09:02] [Rank 0] step:2121/10000 train_time:179115ms step_avg:84.45ms +[2025-08-22 20:09:02] [Rank 0] step:2121/10000 train_time:179115ms step_avg:84.45ms +[2025-08-22 20:09:04] [Rank 0] step:2141/10000 train_time:180813ms step_avg:84.45ms +[2025-08-22 20:09:04] [Rank 0] step:2141/10000 train_time:180813ms step_avg:84.45ms +[2025-08-22 20:09:06] [Rank 0] step:2161/10000 train_time:182511ms step_avg:84.46ms +[2025-08-22 20:09:06] [Rank 0] step:2161/10000 train_time:182511ms step_avg:84.46ms +[2025-08-22 20:09:07] [Rank 0] step:2181/10000 train_time:184208ms step_avg:84.46ms +[2025-08-22 20:09:07] [Rank 0] step:2181/10000 train_time:184208ms step_avg:84.46ms +[2025-08-22 20:09:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:09:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:09:23] [Rank 0] PRINT: step:2200/10000 val_loss:4.5191 svd_entropy: attn_qk:H=0.7195,top10E=0.32,eRank=157.4,q75/q25=69.25 attn_vo:H=0.7559,top10E=0.11,eRank=241.7,q75/q25=inf mlp_w1:H=0.7943,top10E=0.22,eRank=204.8,q75/q25=10.68 mlp_w2:H=0.8756,top10E=0.15,eRank=337.6,q75/q25=9.32 vo_prod:H=0.5871,top10E=0.21,eRank=103.6,q75/q25=inf train_time:185991ms step_avg:84.54ms +[2025-08-22 20:09:23] [Rank 0] PRINT: step:2200/10000 val_loss:4.5191 svd_entropy: attn_qk:H=0.7195,top10E=0.32,eRank=157.4,q75/q25=69.25 attn_vo:H=0.7559,top10E=0.11,eRank=241.7,q75/q25=inf mlp_w1:H=0.7943,top10E=0.22,eRank=204.8,q75/q25=10.68 mlp_w2:H=0.8756,top10E=0.15,eRank=337.6,q75/q25=9.32 vo_prod:H=0.5871,top10E=0.21,eRank=103.6,q75/q25=inf train_time:185991ms step_avg:84.54ms +[2025-08-22 20:09:23] [Rank 0] step:2201/10000 train_time:186006ms step_avg:84.51ms +[2025-08-22 20:09:23] [Rank 0] step:2201/10000 train_time:186006ms step_avg:84.51ms +[2025-08-22 20:09:24] [Rank 0] step:2221/10000 train_time:187633ms step_avg:84.48ms +[2025-08-22 20:09:24] [Rank 0] step:2221/10000 train_time:187633ms step_avg:84.48ms +[2025-08-22 20:09:26] [Rank 0] step:2241/10000 train_time:189364ms step_avg:84.50ms +[2025-08-22 20:09:26] [Rank 0] step:2241/10000 train_time:189364ms step_avg:84.50ms +[2025-08-22 20:09:28] [Rank 0] step:2261/10000 train_time:191104ms step_avg:84.52ms +[2025-08-22 20:09:28] [Rank 0] step:2261/10000 train_time:191104ms step_avg:84.52ms +[2025-08-22 20:09:30] [Rank 0] step:2281/10000 train_time:192848ms step_avg:84.55ms +[2025-08-22 20:09:30] [Rank 0] step:2281/10000 train_time:192848ms step_avg:84.55ms +[2025-08-22 20:09:31] [Rank 0] step:2301/10000 train_time:194592ms step_avg:84.57ms +[2025-08-22 20:09:31] [Rank 0] step:2301/10000 train_time:194592ms step_avg:84.57ms +[2025-08-22 20:09:33] [Rank 0] step:2321/10000 train_time:196337ms step_avg:84.59ms +[2025-08-22 20:09:33] [Rank 0] step:2321/10000 train_time:196337ms step_avg:84.59ms +[2025-08-22 20:09:35] [Rank 0] step:2341/10000 train_time:198082ms step_avg:84.61ms +[2025-08-22 20:09:35] [Rank 0] step:2341/10000 train_time:198082ms step_avg:84.61ms +[2025-08-22 20:09:37] [Rank 0] step:2361/10000 train_time:199827ms step_avg:84.64ms +[2025-08-22 20:09:37] [Rank 0] step:2361/10000 train_time:199827ms step_avg:84.64ms +[2025-08-22 20:09:38] [Rank 0] step:2381/10000 train_time:201573ms step_avg:84.66ms +[2025-08-22 20:09:38] [Rank 0] step:2381/10000 train_time:201573ms step_avg:84.66ms +[2025-08-22 20:09:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:09:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:09:54] [Rank 0] PRINT: step:2400/10000 val_loss:4.4112 svd_entropy: attn_qk:H=0.7234,top10E=0.31,eRank=159.8,q75/q25=70.87 attn_vo:H=0.7621,top10E=0.11,eRank=251.0,q75/q25=inf mlp_w1:H=0.8019,top10E=0.22,eRank=214.8,q75/q25=10.86 mlp_w2:H=0.8828,top10E=0.14,eRank=353.8,q75/q25=8.82 vo_prod:H=0.5936,top10E=0.20,eRank=108.7,q75/q25=inf train_time:203407ms step_avg:84.75ms +[2025-08-22 20:09:54] [Rank 0] PRINT: step:2400/10000 val_loss:4.4112 svd_entropy: attn_qk:H=0.7234,top10E=0.31,eRank=159.8,q75/q25=70.87 attn_vo:H=0.7621,top10E=0.11,eRank=251.0,q75/q25=inf mlp_w1:H=0.8019,top10E=0.22,eRank=214.8,q75/q25=10.86 mlp_w2:H=0.8828,top10E=0.14,eRank=353.8,q75/q25=8.82 vo_prod:H=0.5936,top10E=0.20,eRank=108.7,q75/q25=inf train_time:203407ms step_avg:84.75ms +[2025-08-22 20:09:54] [Rank 0] step:2401/10000 train_time:203421ms step_avg:84.72ms +[2025-08-22 20:09:54] [Rank 0] step:2401/10000 train_time:203421ms step_avg:84.72ms +[2025-08-22 20:09:56] [Rank 0] step:2421/10000 train_time:205092ms step_avg:84.71ms +[2025-08-22 20:09:56] [Rank 0] step:2421/10000 train_time:205092ms step_avg:84.71ms +[2025-08-22 20:09:58] [Rank 0] step:2441/10000 train_time:206889ms step_avg:84.76ms +[2025-08-22 20:09:58] [Rank 0] step:2441/10000 train_time:206889ms step_avg:84.76ms +[2025-08-22 20:09:59] [Rank 0] step:2461/10000 train_time:208629ms step_avg:84.77ms +[2025-08-22 20:09:59] [Rank 0] step:2461/10000 train_time:208629ms step_avg:84.77ms +[2025-08-22 20:10:01] [Rank 0] step:2481/10000 train_time:210370ms step_avg:84.79ms +[2025-08-22 20:10:01] [Rank 0] step:2481/10000 train_time:210370ms step_avg:84.79ms +[2025-08-22 20:10:03] [Rank 0] step:2501/10000 train_time:212112ms step_avg:84.81ms +[2025-08-22 20:10:03] [Rank 0] step:2501/10000 train_time:212112ms step_avg:84.81ms +[2025-08-22 20:10:05] [Rank 0] step:2521/10000 train_time:213855ms step_avg:84.83ms +[2025-08-22 20:10:05] [Rank 0] step:2521/10000 train_time:213855ms step_avg:84.83ms +[2025-08-22 20:10:06] [Rank 0] step:2541/10000 train_time:215597ms step_avg:84.85ms +[2025-08-22 20:10:06] [Rank 0] step:2541/10000 train_time:215597ms step_avg:84.85ms +[2025-08-22 20:10:08] [Rank 0] step:2561/10000 train_time:217338ms step_avg:84.86ms +[2025-08-22 20:10:08] [Rank 0] step:2561/10000 train_time:217338ms step_avg:84.86ms +[2025-08-22 20:10:10] [Rank 0] step:2581/10000 train_time:219079ms step_avg:84.88ms +[2025-08-22 20:10:10] [Rank 0] step:2581/10000 train_time:219079ms step_avg:84.88ms +[2025-08-22 20:10:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:10:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:10:25] [Rank 0] PRINT: step:2600/10000 val_loss:4.3319 svd_entropy: attn_qk:H=0.7272,top10E=0.31,eRank=162.1,q75/q25=72.78 attn_vo:H=0.7672,top10E=0.10,eRank=259.3,q75/q25=inf mlp_w1:H=0.8088,top10E=0.21,eRank=224.1,q75/q25=10.94 mlp_w2:H=0.8887,top10E=0.13,eRank=367.9,q75/q25=8.35 vo_prod:H=0.5994,top10E=0.19,eRank=113.5,q75/q25=inf train_time:220906ms step_avg:84.96ms +[2025-08-22 20:10:25] [Rank 0] PRINT: step:2600/10000 val_loss:4.3319 svd_entropy: attn_qk:H=0.7272,top10E=0.31,eRank=162.1,q75/q25=72.78 attn_vo:H=0.7672,top10E=0.10,eRank=259.3,q75/q25=inf mlp_w1:H=0.8088,top10E=0.21,eRank=224.1,q75/q25=10.94 mlp_w2:H=0.8887,top10E=0.13,eRank=367.9,q75/q25=8.35 vo_prod:H=0.5994,top10E=0.19,eRank=113.5,q75/q25=inf train_time:220906ms step_avg:84.96ms +[2025-08-22 20:10:25] [Rank 0] step:2601/10000 train_time:220922ms step_avg:84.94ms +[2025-08-22 20:10:25] [Rank 0] step:2601/10000 train_time:220922ms step_avg:84.94ms +[2025-08-22 20:10:27] [Rank 0] step:2621/10000 train_time:222588ms step_avg:84.92ms +[2025-08-22 20:10:27] [Rank 0] step:2621/10000 train_time:222588ms step_avg:84.92ms +[2025-08-22 20:10:29] [Rank 0] step:2641/10000 train_time:224326ms step_avg:84.94ms +[2025-08-22 20:10:29] [Rank 0] step:2641/10000 train_time:224326ms step_avg:84.94ms +[2025-08-22 20:10:31] [Rank 0] step:2661/10000 train_time:226065ms step_avg:84.95ms +[2025-08-22 20:10:31] [Rank 0] step:2661/10000 train_time:226065ms step_avg:84.95ms +[2025-08-22 20:10:32] [Rank 0] step:2681/10000 train_time:227804ms step_avg:84.97ms +[2025-08-22 20:10:32] [Rank 0] step:2681/10000 train_time:227804ms step_avg:84.97ms +[2025-08-22 20:10:34] [Rank 0] step:2701/10000 train_time:229545ms step_avg:84.99ms +[2025-08-22 20:10:34] [Rank 0] step:2701/10000 train_time:229545ms step_avg:84.99ms +[2025-08-22 20:10:36] [Rank 0] step:2721/10000 train_time:231285ms step_avg:85.00ms +[2025-08-22 20:10:36] [Rank 0] step:2721/10000 train_time:231285ms step_avg:85.00ms +[2025-08-22 20:10:38] [Rank 0] step:2741/10000 train_time:233028ms step_avg:85.02ms +[2025-08-22 20:10:38] [Rank 0] step:2741/10000 train_time:233028ms step_avg:85.02ms +[2025-08-22 20:10:39] [Rank 0] step:2761/10000 train_time:234770ms step_avg:85.03ms +[2025-08-22 20:10:39] [Rank 0] step:2761/10000 train_time:234770ms step_avg:85.03ms +[2025-08-22 20:10:41] [Rank 0] step:2781/10000 train_time:236513ms step_avg:85.05ms +[2025-08-22 20:10:41] [Rank 0] step:2781/10000 train_time:236513ms step_avg:85.05ms +[2025-08-22 20:10:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:10:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:10:56] [Rank 0] PRINT: step:2800/10000 val_loss:4.2805 svd_entropy: attn_qk:H=0.7308,top10E=0.30,eRank=164.5,q75/q25=74.54 attn_vo:H=0.7717,top10E=0.10,eRank=266.8,q75/q25=inf mlp_w1:H=0.8149,top10E=0.20,eRank=232.7,q75/q25=10.99 mlp_w2:H=0.8938,top10E=0.13,eRank=380.4,q75/q25=7.91 vo_prod:H=0.6049,top10E=0.19,eRank=118.3,q75/q25=inf train_time:238343ms step_avg:85.12ms +[2025-08-22 20:10:56] [Rank 0] PRINT: step:2800/10000 val_loss:4.2805 svd_entropy: attn_qk:H=0.7308,top10E=0.30,eRank=164.5,q75/q25=74.54 attn_vo:H=0.7717,top10E=0.10,eRank=266.8,q75/q25=inf mlp_w1:H=0.8149,top10E=0.20,eRank=232.7,q75/q25=10.99 mlp_w2:H=0.8938,top10E=0.13,eRank=380.4,q75/q25=7.91 vo_prod:H=0.6049,top10E=0.19,eRank=118.3,q75/q25=inf train_time:238343ms step_avg:85.12ms +[2025-08-22 20:10:56] [Rank 0] step:2801/10000 train_time:238357ms step_avg:85.10ms +[2025-08-22 20:10:56] [Rank 0] step:2801/10000 train_time:238357ms step_avg:85.10ms +[2025-08-22 20:10:58] [Rank 0] step:2821/10000 train_time:240024ms step_avg:85.08ms +[2025-08-22 20:10:58] [Rank 0] step:2821/10000 train_time:240024ms step_avg:85.08ms +[2025-08-22 20:11:00] [Rank 0] step:2841/10000 train_time:241807ms step_avg:85.11ms +[2025-08-22 20:11:00] [Rank 0] step:2841/10000 train_time:241807ms step_avg:85.11ms +[2025-08-22 20:11:02] [Rank 0] step:2861/10000 train_time:243582ms step_avg:85.14ms +[2025-08-22 20:11:02] [Rank 0] step:2861/10000 train_time:243582ms step_avg:85.14ms +[2025-08-22 20:11:03] [Rank 0] step:2881/10000 train_time:245325ms step_avg:85.15ms +[2025-08-22 20:11:03] [Rank 0] step:2881/10000 train_time:245325ms step_avg:85.15ms +[2025-08-22 20:11:05] [Rank 0] step:2901/10000 train_time:247070ms step_avg:85.17ms +[2025-08-22 20:11:05] [Rank 0] step:2901/10000 train_time:247070ms step_avg:85.17ms +[2025-08-22 20:11:07] [Rank 0] step:2921/10000 train_time:248814ms step_avg:85.18ms +[2025-08-22 20:11:07] [Rank 0] step:2921/10000 train_time:248814ms step_avg:85.18ms +[2025-08-22 20:11:09] [Rank 0] step:2941/10000 train_time:250560ms step_avg:85.20ms +[2025-08-22 20:11:09] [Rank 0] step:2941/10000 train_time:250560ms step_avg:85.20ms +[2025-08-22 20:11:10] [Rank 0] step:2961/10000 train_time:252307ms step_avg:85.21ms +[2025-08-22 20:11:10] [Rank 0] step:2961/10000 train_time:252307ms step_avg:85.21ms +[2025-08-22 20:11:12] [Rank 0] step:2981/10000 train_time:254060ms step_avg:85.23ms +[2025-08-22 20:11:12] [Rank 0] step:2981/10000 train_time:254060ms step_avg:85.23ms +[2025-08-22 20:11:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:11:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:11:28] [Rank 0] PRINT: step:3000/10000 val_loss:4.2314 svd_entropy: attn_qk:H=0.7342,top10E=0.29,eRank=166.7,q75/q25=75.36 attn_vo:H=0.7755,top10E=0.10,eRank=273.2,q75/q25=inf mlp_w1:H=0.8205,top10E=0.20,eRank=241.0,q75/q25=10.94 mlp_w2:H=0.8982,top10E=0.13,eRank=391.6,q75/q25=7.53 vo_prod:H=0.6098,top10E=0.18,eRank=122.6,q75/q25=inf train_time:255902ms step_avg:85.30ms +[2025-08-22 20:11:28] [Rank 0] PRINT: step:3000/10000 val_loss:4.2314 svd_entropy: attn_qk:H=0.7342,top10E=0.29,eRank=166.7,q75/q25=75.36 attn_vo:H=0.7755,top10E=0.10,eRank=273.2,q75/q25=inf mlp_w1:H=0.8205,top10E=0.20,eRank=241.0,q75/q25=10.94 mlp_w2:H=0.8982,top10E=0.13,eRank=391.6,q75/q25=7.53 vo_prod:H=0.6098,top10E=0.18,eRank=122.6,q75/q25=inf train_time:255902ms step_avg:85.30ms +[2025-08-22 20:11:28] [Rank 0] step:3001/10000 train_time:255918ms step_avg:85.28ms +[2025-08-22 20:11:28] [Rank 0] step:3001/10000 train_time:255918ms step_avg:85.28ms +[2025-08-22 20:11:29] [Rank 0] step:3021/10000 train_time:257592ms step_avg:85.27ms +[2025-08-22 20:11:29] [Rank 0] step:3021/10000 train_time:257592ms step_avg:85.27ms +[2025-08-22 20:11:31] [Rank 0] step:3041/10000 train_time:259339ms step_avg:85.28ms +[2025-08-22 20:11:31] [Rank 0] step:3041/10000 train_time:259339ms step_avg:85.28ms +[2025-08-22 20:11:33] [Rank 0] step:3061/10000 train_time:261089ms step_avg:85.30ms +[2025-08-22 20:11:33] [Rank 0] step:3061/10000 train_time:261089ms step_avg:85.30ms +[2025-08-22 20:11:35] [Rank 0] step:3081/10000 train_time:262836ms step_avg:85.31ms +[2025-08-22 20:11:35] [Rank 0] step:3081/10000 train_time:262836ms step_avg:85.31ms +[2025-08-22 20:11:36] [Rank 0] step:3101/10000 train_time:264587ms step_avg:85.32ms +[2025-08-22 20:11:36] [Rank 0] step:3101/10000 train_time:264587ms step_avg:85.32ms +[2025-08-22 20:11:38] [Rank 0] step:3121/10000 train_time:266336ms step_avg:85.34ms +[2025-08-22 20:11:38] [Rank 0] step:3121/10000 train_time:266336ms step_avg:85.34ms +[2025-08-22 20:11:40] [Rank 0] step:3141/10000 train_time:268090ms step_avg:85.35ms +[2025-08-22 20:11:40] [Rank 0] step:3141/10000 train_time:268090ms step_avg:85.35ms +[2025-08-22 20:11:42] [Rank 0] step:3161/10000 train_time:269840ms step_avg:85.37ms +[2025-08-22 20:11:42] [Rank 0] step:3161/10000 train_time:269840ms step_avg:85.37ms +[2025-08-22 20:11:43] [Rank 0] step:3181/10000 train_time:271594ms step_avg:85.38ms +[2025-08-22 20:11:43] [Rank 0] step:3181/10000 train_time:271594ms step_avg:85.38ms +[2025-08-22 20:11:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:11:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:11:59] [Rank 0] PRINT: step:3200/10000 val_loss:4.1897 svd_entropy: attn_qk:H=0.7373,top10E=0.29,eRank=168.8,q75/q25=76.03 attn_vo:H=0.7788,top10E=0.09,eRank=279.2,q75/q25=inf mlp_w1:H=0.8255,top10E=0.19,eRank=248.6,q75/q25=10.85 mlp_w2:H=0.9020,top10E=0.12,eRank=401.7,q75/q25=7.18 vo_prod:H=0.6144,top10E=0.18,eRank=126.9,q75/q25=inf train_time:273433ms step_avg:85.45ms +[2025-08-22 20:11:59] [Rank 0] PRINT: step:3200/10000 val_loss:4.1897 svd_entropy: attn_qk:H=0.7373,top10E=0.29,eRank=168.8,q75/q25=76.03 attn_vo:H=0.7788,top10E=0.09,eRank=279.2,q75/q25=inf mlp_w1:H=0.8255,top10E=0.19,eRank=248.6,q75/q25=10.85 mlp_w2:H=0.9020,top10E=0.12,eRank=401.7,q75/q25=7.18 vo_prod:H=0.6144,top10E=0.18,eRank=126.9,q75/q25=inf train_time:273433ms step_avg:85.45ms +[2025-08-22 20:11:59] [Rank 0] step:3201/10000 train_time:273446ms step_avg:85.43ms +[2025-08-22 20:11:59] [Rank 0] step:3201/10000 train_time:273446ms step_avg:85.43ms +[2025-08-22 20:12:01] [Rank 0] step:3221/10000 train_time:275105ms step_avg:85.41ms +[2025-08-22 20:12:01] [Rank 0] step:3221/10000 train_time:275105ms step_avg:85.41ms +[2025-08-22 20:12:03] [Rank 0] step:3241/10000 train_time:276851ms step_avg:85.42ms +[2025-08-22 20:12:03] [Rank 0] step:3241/10000 train_time:276851ms step_avg:85.42ms +[2025-08-22 20:12:04] [Rank 0] step:3261/10000 train_time:278652ms step_avg:85.45ms +[2025-08-22 20:12:04] [Rank 0] step:3261/10000 train_time:278652ms step_avg:85.45ms +[2025-08-22 20:12:06] [Rank 0] step:3281/10000 train_time:280463ms step_avg:85.48ms +[2025-08-22 20:12:06] [Rank 0] step:3281/10000 train_time:280463ms step_avg:85.48ms +[2025-08-22 20:12:08] [Rank 0] step:3301/10000 train_time:282212ms step_avg:85.49ms +[2025-08-22 20:12:08] [Rank 0] step:3301/10000 train_time:282212ms step_avg:85.49ms +[2025-08-22 20:12:10] [Rank 0] step:3321/10000 train_time:283961ms step_avg:85.50ms +[2025-08-22 20:12:10] [Rank 0] step:3321/10000 train_time:283961ms step_avg:85.50ms +[2025-08-22 20:12:11] [Rank 0] step:3341/10000 train_time:285709ms step_avg:85.52ms +[2025-08-22 20:12:11] [Rank 0] step:3341/10000 train_time:285709ms step_avg:85.52ms +[2025-08-22 20:12:13] [Rank 0] step:3361/10000 train_time:287459ms step_avg:85.53ms +[2025-08-22 20:12:13] [Rank 0] step:3361/10000 train_time:287459ms step_avg:85.53ms +[2025-08-22 20:12:15] [Rank 0] step:3381/10000 train_time:289209ms step_avg:85.54ms +[2025-08-22 20:12:15] [Rank 0] step:3381/10000 train_time:289209ms step_avg:85.54ms +[2025-08-22 20:12:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:12:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:12:30] [Rank 0] PRINT: step:3400/10000 val_loss:4.1465 svd_entropy: attn_qk:H=0.7404,top10E=0.29,eRank=171.0,q75/q25=77.44 attn_vo:H=0.7820,top10E=0.09,eRank=285.0,q75/q25=inf mlp_w1:H=0.8303,top10E=0.19,eRank=256.1,q75/q25=10.76 mlp_w2:H=0.9054,top10E=0.12,eRank=410.8,q75/q25=6.85 vo_prod:H=0.6187,top10E=0.17,eRank=131.1,q75/q25=inf train_time:291046ms step_avg:85.60ms +[2025-08-22 20:12:30] [Rank 0] PRINT: step:3400/10000 val_loss:4.1465 svd_entropy: attn_qk:H=0.7404,top10E=0.29,eRank=171.0,q75/q25=77.44 attn_vo:H=0.7820,top10E=0.09,eRank=285.0,q75/q25=inf mlp_w1:H=0.8303,top10E=0.19,eRank=256.1,q75/q25=10.76 mlp_w2:H=0.9054,top10E=0.12,eRank=410.8,q75/q25=6.85 vo_prod:H=0.6187,top10E=0.17,eRank=131.1,q75/q25=inf train_time:291046ms step_avg:85.60ms +[2025-08-22 20:12:31] [Rank 0] step:3401/10000 train_time:291062ms step_avg:85.58ms +[2025-08-22 20:12:31] [Rank 0] step:3401/10000 train_time:291062ms step_avg:85.58ms +[2025-08-22 20:12:32] [Rank 0] step:3421/10000 train_time:292721ms step_avg:85.57ms +[2025-08-22 20:12:32] [Rank 0] step:3421/10000 train_time:292721ms step_avg:85.57ms +[2025-08-22 20:12:34] [Rank 0] step:3441/10000 train_time:294464ms step_avg:85.58ms +[2025-08-22 20:12:34] [Rank 0] step:3441/10000 train_time:294464ms step_avg:85.58ms +[2025-08-22 20:12:36] [Rank 0] step:3461/10000 train_time:296211ms step_avg:85.59ms +[2025-08-22 20:12:36] [Rank 0] step:3461/10000 train_time:296211ms step_avg:85.59ms +[2025-08-22 20:12:38] [Rank 0] step:3481/10000 train_time:297956ms step_avg:85.59ms +[2025-08-22 20:12:38] [Rank 0] step:3481/10000 train_time:297956ms step_avg:85.59ms +[2025-08-22 20:12:39] [Rank 0] step:3501/10000 train_time:299706ms step_avg:85.61ms +[2025-08-22 20:12:39] [Rank 0] step:3501/10000 train_time:299706ms step_avg:85.61ms +[2025-08-22 20:12:41] [Rank 0] step:3521/10000 train_time:301457ms step_avg:85.62ms +[2025-08-22 20:12:41] [Rank 0] step:3521/10000 train_time:301457ms step_avg:85.62ms +[2025-08-22 20:12:43] [Rank 0] step:3541/10000 train_time:303207ms step_avg:85.63ms +[2025-08-22 20:12:43] [Rank 0] step:3541/10000 train_time:303207ms step_avg:85.63ms +[2025-08-22 20:12:45] [Rank 0] step:3561/10000 train_time:304957ms step_avg:85.64ms +[2025-08-22 20:12:45] [Rank 0] step:3561/10000 train_time:304957ms step_avg:85.64ms +[2025-08-22 20:12:46] [Rank 0] step:3581/10000 train_time:306708ms step_avg:85.65ms +[2025-08-22 20:12:46] [Rank 0] step:3581/10000 train_time:306708ms step_avg:85.65ms +[2025-08-22 20:12:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:12:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:13:02] [Rank 0] PRINT: step:3600/10000 val_loss:4.1407 svd_entropy: attn_qk:H=0.7430,top10E=0.28,eRank=172.9,q75/q25=78.47 attn_vo:H=0.7847,top10E=0.09,eRank=289.9,q75/q25=inf mlp_w1:H=0.8345,top10E=0.18,eRank=263.0,q75/q25=10.63 mlp_w2:H=0.9084,top10E=0.12,eRank=419.0,q75/q25=6.57 vo_prod:H=0.6226,top10E=0.17,eRank=134.9,q75/q25=inf train_time:308548ms step_avg:85.71ms +[2025-08-22 20:13:02] [Rank 0] PRINT: step:3600/10000 val_loss:4.1407 svd_entropy: attn_qk:H=0.7430,top10E=0.28,eRank=172.9,q75/q25=78.47 attn_vo:H=0.7847,top10E=0.09,eRank=289.9,q75/q25=inf mlp_w1:H=0.8345,top10E=0.18,eRank=263.0,q75/q25=10.63 mlp_w2:H=0.9084,top10E=0.12,eRank=419.0,q75/q25=6.57 vo_prod:H=0.6226,top10E=0.17,eRank=134.9,q75/q25=inf train_time:308548ms step_avg:85.71ms +[2025-08-22 20:13:02] [Rank 0] step:3601/10000 train_time:308563ms step_avg:85.69ms +[2025-08-22 20:13:02] [Rank 0] step:3601/10000 train_time:308563ms step_avg:85.69ms +[2025-08-22 20:13:04] [Rank 0] step:3621/10000 train_time:310237ms step_avg:85.68ms +[2025-08-22 20:13:04] [Rank 0] step:3621/10000 train_time:310237ms step_avg:85.68ms +[2025-08-22 20:13:05] [Rank 0] step:3641/10000 train_time:311985ms step_avg:85.69ms +[2025-08-22 20:13:05] [Rank 0] step:3641/10000 train_time:311985ms step_avg:85.69ms +[2025-08-22 20:13:07] [Rank 0] step:3661/10000 train_time:313736ms step_avg:85.70ms +[2025-08-22 20:13:07] [Rank 0] step:3661/10000 train_time:313736ms step_avg:85.70ms +[2025-08-22 20:13:09] [Rank 0] step:3681/10000 train_time:315487ms step_avg:85.71ms +[2025-08-22 20:13:09] [Rank 0] step:3681/10000 train_time:315487ms step_avg:85.71ms +[2025-08-22 20:13:11] [Rank 0] step:3701/10000 train_time:317239ms step_avg:85.72ms +[2025-08-22 20:13:11] [Rank 0] step:3701/10000 train_time:317239ms step_avg:85.72ms +[2025-08-22 20:13:13] [Rank 0] step:3721/10000 train_time:319019ms step_avg:85.73ms +[2025-08-22 20:13:13] [Rank 0] step:3721/10000 train_time:319019ms step_avg:85.73ms +[2025-08-22 20:13:14] [Rank 0] step:3741/10000 train_time:320810ms step_avg:85.76ms +[2025-08-22 20:13:14] [Rank 0] step:3741/10000 train_time:320810ms step_avg:85.76ms +[2025-08-22 20:13:16] [Rank 0] step:3761/10000 train_time:322600ms step_avg:85.78ms +[2025-08-22 20:13:16] [Rank 0] step:3761/10000 train_time:322600ms step_avg:85.78ms +[2025-08-22 20:13:18] [Rank 0] step:3781/10000 train_time:324392ms step_avg:85.80ms +[2025-08-22 20:13:18] [Rank 0] step:3781/10000 train_time:324392ms step_avg:85.80ms +[2025-08-22 20:13:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:13:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:13:33] [Rank 0] PRINT: step:3800/10000 val_loss:4.0773 svd_entropy: attn_qk:H=0.7456,top10E=0.28,eRank=174.8,q75/q25=78.81 attn_vo:H=0.7871,top10E=0.09,eRank=294.5,q75/q25=inf mlp_w1:H=0.8384,top10E=0.18,eRank=269.6,q75/q25=10.50 mlp_w2:H=0.9110,top10E=0.11,eRank=426.2,q75/q25=6.33 vo_prod:H=0.6262,top10E=0.16,eRank=138.7,q75/q25=inf train_time:326274ms step_avg:85.86ms +[2025-08-22 20:13:33] [Rank 0] PRINT: step:3800/10000 val_loss:4.0773 svd_entropy: attn_qk:H=0.7456,top10E=0.28,eRank=174.8,q75/q25=78.81 attn_vo:H=0.7871,top10E=0.09,eRank=294.5,q75/q25=inf mlp_w1:H=0.8384,top10E=0.18,eRank=269.6,q75/q25=10.50 mlp_w2:H=0.9110,top10E=0.11,eRank=426.2,q75/q25=6.33 vo_prod:H=0.6262,top10E=0.16,eRank=138.7,q75/q25=inf train_time:326274ms step_avg:85.86ms +[2025-08-22 20:13:34] [Rank 0] step:3801/10000 train_time:326289ms step_avg:85.84ms +[2025-08-22 20:13:34] [Rank 0] step:3801/10000 train_time:326289ms step_avg:85.84ms +[2025-08-22 20:13:35] [Rank 0] step:3821/10000 train_time:327996ms step_avg:85.84ms +[2025-08-22 20:13:35] [Rank 0] step:3821/10000 train_time:327996ms step_avg:85.84ms +[2025-08-22 20:13:37] [Rank 0] step:3841/10000 train_time:329784ms step_avg:85.86ms +[2025-08-22 20:13:37] [Rank 0] step:3841/10000 train_time:329784ms step_avg:85.86ms +[2025-08-22 20:13:39] [Rank 0] step:3861/10000 train_time:331570ms step_avg:85.88ms +[2025-08-22 20:13:39] [Rank 0] step:3861/10000 train_time:331570ms step_avg:85.88ms +[2025-08-22 20:13:41] [Rank 0] step:3881/10000 train_time:333354ms step_avg:85.89ms +[2025-08-22 20:13:41] [Rank 0] step:3881/10000 train_time:333354ms step_avg:85.89ms +[2025-08-22 20:13:42] [Rank 0] step:3901/10000 train_time:335138ms step_avg:85.91ms +[2025-08-22 20:13:42] [Rank 0] step:3901/10000 train_time:335138ms step_avg:85.91ms +[2025-08-22 20:13:44] [Rank 0] step:3921/10000 train_time:336924ms step_avg:85.93ms +[2025-08-22 20:13:44] [Rank 0] step:3921/10000 train_time:336924ms step_avg:85.93ms +[2025-08-22 20:13:46] [Rank 0] step:3941/10000 train_time:338711ms step_avg:85.95ms +[2025-08-22 20:13:46] [Rank 0] step:3941/10000 train_time:338711ms step_avg:85.95ms +[2025-08-22 20:13:48] [Rank 0] step:3961/10000 train_time:340498ms step_avg:85.96ms +[2025-08-22 20:13:48] [Rank 0] step:3961/10000 train_time:340498ms step_avg:85.96ms +[2025-08-22 20:13:50] [Rank 0] step:3981/10000 train_time:342285ms step_avg:85.98ms +[2025-08-22 20:13:50] [Rank 0] step:3981/10000 train_time:342285ms step_avg:85.98ms +[2025-08-22 20:13:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:13:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:14:05] [Rank 0] PRINT: step:4000/10000 val_loss:4.0463 svd_entropy: attn_qk:H=0.7480,top10E=0.27,eRank=176.5,q75/q25=79.38 attn_vo:H=0.7893,top10E=0.09,eRank=298.8,q75/q25=inf mlp_w1:H=0.8420,top10E=0.18,eRank=275.8,q75/q25=10.36 mlp_w2:H=0.9134,top10E=0.11,eRank=433.0,q75/q25=6.11 vo_prod:H=0.6296,top10E=0.16,eRank=142.2,q75/q25=inf train_time:344161ms step_avg:86.04ms +[2025-08-22 20:14:05] [Rank 0] PRINT: step:4000/10000 val_loss:4.0463 svd_entropy: attn_qk:H=0.7480,top10E=0.27,eRank=176.5,q75/q25=79.38 attn_vo:H=0.7893,top10E=0.09,eRank=298.8,q75/q25=inf mlp_w1:H=0.8420,top10E=0.18,eRank=275.8,q75/q25=10.36 mlp_w2:H=0.9134,top10E=0.11,eRank=433.0,q75/q25=6.11 vo_prod:H=0.6296,top10E=0.16,eRank=142.2,q75/q25=inf train_time:344161ms step_avg:86.04ms +[2025-08-22 20:14:05] [Rank 0] step:4001/10000 train_time:344175ms step_avg:86.02ms +[2025-08-22 20:14:05] [Rank 0] step:4001/10000 train_time:344175ms step_avg:86.02ms +[2025-08-22 20:14:07] [Rank 0] step:4021/10000 train_time:345880ms step_avg:86.02ms +[2025-08-22 20:14:07] [Rank 0] step:4021/10000 train_time:345880ms step_avg:86.02ms +[2025-08-22 20:14:09] [Rank 0] step:4041/10000 train_time:347663ms step_avg:86.03ms +[2025-08-22 20:14:09] [Rank 0] step:4041/10000 train_time:347663ms step_avg:86.03ms +[2025-08-22 20:14:11] [Rank 0] step:4061/10000 train_time:349443ms step_avg:86.05ms +[2025-08-22 20:14:11] [Rank 0] step:4061/10000 train_time:349443ms step_avg:86.05ms +[2025-08-22 20:14:13] [Rank 0] step:4081/10000 train_time:351397ms step_avg:86.11ms +[2025-08-22 20:14:13] [Rank 0] step:4081/10000 train_time:351397ms step_avg:86.11ms +[2025-08-22 20:14:14] [Rank 0] step:4101/10000 train_time:353179ms step_avg:86.12ms +[2025-08-22 20:14:14] [Rank 0] step:4101/10000 train_time:353179ms step_avg:86.12ms +[2025-08-22 20:14:16] [Rank 0] step:4121/10000 train_time:354963ms step_avg:86.14ms +[2025-08-22 20:14:16] [Rank 0] step:4121/10000 train_time:354963ms step_avg:86.14ms +[2025-08-22 20:14:18] [Rank 0] step:4141/10000 train_time:356746ms step_avg:86.15ms +[2025-08-22 20:14:18] [Rank 0] step:4141/10000 train_time:356746ms step_avg:86.15ms +[2025-08-22 20:14:20] [Rank 0] step:4161/10000 train_time:358529ms step_avg:86.16ms +[2025-08-22 20:14:20] [Rank 0] step:4161/10000 train_time:358529ms step_avg:86.16ms +[2025-08-22 20:14:22] [Rank 0] step:4181/10000 train_time:360315ms step_avg:86.18ms +[2025-08-22 20:14:22] [Rank 0] step:4181/10000 train_time:360315ms step_avg:86.18ms +[2025-08-22 20:14:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:14:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:14:37] [Rank 0] PRINT: step:4200/10000 val_loss:4.0276 svd_entropy: attn_qk:H=0.7505,top10E=0.27,eRank=178.4,q75/q25=79.81 attn_vo:H=0.7913,top10E=0.08,eRank=302.7,q75/q25=inf mlp_w1:H=0.8453,top10E=0.17,eRank=281.7,q75/q25=10.20 mlp_w2:H=0.9155,top10E=0.11,eRank=439.1,q75/q25=5.93 vo_prod:H=0.6326,top10E=0.15,eRank=145.5,q75/q25=inf train_time:362190ms step_avg:86.24ms +[2025-08-22 20:14:37] [Rank 0] PRINT: step:4200/10000 val_loss:4.0276 svd_entropy: attn_qk:H=0.7505,top10E=0.27,eRank=178.4,q75/q25=79.81 attn_vo:H=0.7913,top10E=0.08,eRank=302.7,q75/q25=inf mlp_w1:H=0.8453,top10E=0.17,eRank=281.7,q75/q25=10.20 mlp_w2:H=0.9155,top10E=0.11,eRank=439.1,q75/q25=5.93 vo_prod:H=0.6326,top10E=0.15,eRank=145.5,q75/q25=inf train_time:362190ms step_avg:86.24ms +[2025-08-22 20:14:37] [Rank 0] step:4201/10000 train_time:362205ms step_avg:86.22ms +[2025-08-22 20:14:37] [Rank 0] step:4201/10000 train_time:362205ms step_avg:86.22ms +[2025-08-22 20:14:39] [Rank 0] step:4221/10000 train_time:363906ms step_avg:86.21ms +[2025-08-22 20:14:39] [Rank 0] step:4221/10000 train_time:363906ms step_avg:86.21ms +[2025-08-22 20:14:41] [Rank 0] step:4241/10000 train_time:365689ms step_avg:86.23ms +[2025-08-22 20:14:41] [Rank 0] step:4241/10000 train_time:365689ms step_avg:86.23ms +[2025-08-22 20:14:43] [Rank 0] step:4261/10000 train_time:367471ms step_avg:86.24ms +[2025-08-22 20:14:43] [Rank 0] step:4261/10000 train_time:367471ms step_avg:86.24ms +[2025-08-22 20:14:44] [Rank 0] step:4281/10000 train_time:369255ms step_avg:86.25ms +[2025-08-22 20:14:44] [Rank 0] step:4281/10000 train_time:369255ms step_avg:86.25ms +[2025-08-22 20:14:46] [Rank 0] step:4301/10000 train_time:371039ms step_avg:86.27ms +[2025-08-22 20:14:46] [Rank 0] step:4301/10000 train_time:371039ms step_avg:86.27ms +[2025-08-22 20:14:48] [Rank 0] step:4321/10000 train_time:372823ms step_avg:86.28ms +[2025-08-22 20:14:48] [Rank 0] step:4321/10000 train_time:372823ms step_avg:86.28ms +[2025-08-22 20:14:50] [Rank 0] step:4341/10000 train_time:374604ms step_avg:86.29ms +[2025-08-22 20:14:50] [Rank 0] step:4341/10000 train_time:374604ms step_avg:86.29ms +[2025-08-22 20:14:51] [Rank 0] step:4361/10000 train_time:376390ms step_avg:86.31ms +[2025-08-22 20:14:51] [Rank 0] step:4361/10000 train_time:376390ms step_avg:86.31ms +[2025-08-22 20:14:53] [Rank 0] step:4381/10000 train_time:378172ms step_avg:86.32ms +[2025-08-22 20:14:53] [Rank 0] step:4381/10000 train_time:378172ms step_avg:86.32ms +[2025-08-22 20:14:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:14:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:15:09] [Rank 0] PRINT: step:4400/10000 val_loss:4.0076 svd_entropy: attn_qk:H=0.7528,top10E=0.27,eRank=180.2,q75/q25=80.05 attn_vo:H=0.7931,top10E=0.08,eRank=306.4,q75/q25=inf mlp_w1:H=0.8484,top10E=0.17,eRank=287.3,q75/q25=10.07 mlp_w2:H=0.9174,top10E=0.11,eRank=444.6,q75/q25=5.76 vo_prod:H=0.6355,top10E=0.15,eRank=148.7,q75/q25=inf train_time:380046ms step_avg:86.37ms +[2025-08-22 20:15:09] [Rank 0] PRINT: step:4400/10000 val_loss:4.0076 svd_entropy: attn_qk:H=0.7528,top10E=0.27,eRank=180.2,q75/q25=80.05 attn_vo:H=0.7931,top10E=0.08,eRank=306.4,q75/q25=inf mlp_w1:H=0.8484,top10E=0.17,eRank=287.3,q75/q25=10.07 mlp_w2:H=0.9174,top10E=0.11,eRank=444.6,q75/q25=5.76 vo_prod:H=0.6355,top10E=0.15,eRank=148.7,q75/q25=inf train_time:380046ms step_avg:86.37ms +[2025-08-22 20:15:09] [Rank 0] step:4401/10000 train_time:380061ms step_avg:86.36ms +[2025-08-22 20:15:09] [Rank 0] step:4401/10000 train_time:380061ms step_avg:86.36ms +[2025-08-22 20:15:11] [Rank 0] step:4421/10000 train_time:381777ms step_avg:86.36ms +[2025-08-22 20:15:11] [Rank 0] step:4421/10000 train_time:381777ms step_avg:86.36ms +[2025-08-22 20:15:13] [Rank 0] step:4441/10000 train_time:383560ms step_avg:86.37ms +[2025-08-22 20:15:13] [Rank 0] step:4441/10000 train_time:383560ms step_avg:86.37ms +[2025-08-22 20:15:14] [Rank 0] step:4461/10000 train_time:385388ms step_avg:86.39ms +[2025-08-22 20:15:14] [Rank 0] step:4461/10000 train_time:385388ms step_avg:86.39ms +[2025-08-22 20:15:16] [Rank 0] step:4481/10000 train_time:387242ms step_avg:86.42ms +[2025-08-22 20:15:16] [Rank 0] step:4481/10000 train_time:387242ms step_avg:86.42ms +[2025-08-22 20:15:18] [Rank 0] step:4501/10000 train_time:389033ms step_avg:86.43ms +[2025-08-22 20:15:18] [Rank 0] step:4501/10000 train_time:389033ms step_avg:86.43ms +[2025-08-22 20:15:20] [Rank 0] step:4521/10000 train_time:390825ms step_avg:86.45ms +[2025-08-22 20:15:20] [Rank 0] step:4521/10000 train_time:390825ms step_avg:86.45ms +[2025-08-22 20:15:22] [Rank 0] step:4541/10000 train_time:392618ms step_avg:86.46ms +[2025-08-22 20:15:22] [Rank 0] step:4541/10000 train_time:392618ms step_avg:86.46ms +[2025-08-22 20:15:23] [Rank 0] step:4561/10000 train_time:394412ms step_avg:86.47ms +[2025-08-22 20:15:23] [Rank 0] step:4561/10000 train_time:394412ms step_avg:86.47ms +[2025-08-22 20:15:25] [Rank 0] step:4581/10000 train_time:396210ms step_avg:86.49ms +[2025-08-22 20:15:25] [Rank 0] step:4581/10000 train_time:396210ms step_avg:86.49ms +[2025-08-22 20:15:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:15:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:15:41] [Rank 0] PRINT: step:4600/10000 val_loss:3.9837 svd_entropy: attn_qk:H=0.7551,top10E=0.26,eRank=182.0,q75/q25=80.83 attn_vo:H=0.7948,top10E=0.08,eRank=309.9,q75/q25=inf mlp_w1:H=0.8513,top10E=0.17,eRank=292.7,q75/q25=9.90 mlp_w2:H=0.9191,top10E=0.11,eRank=449.8,q75/q25=5.60 vo_prod:H=0.6382,top10E=0.15,eRank=151.6,q75/q25=inf train_time:398098ms step_avg:86.54ms +[2025-08-22 20:15:41] [Rank 0] PRINT: step:4600/10000 val_loss:3.9837 svd_entropy: attn_qk:H=0.7551,top10E=0.26,eRank=182.0,q75/q25=80.83 attn_vo:H=0.7948,top10E=0.08,eRank=309.9,q75/q25=inf mlp_w1:H=0.8513,top10E=0.17,eRank=292.7,q75/q25=9.90 mlp_w2:H=0.9191,top10E=0.11,eRank=449.8,q75/q25=5.60 vo_prod:H=0.6382,top10E=0.15,eRank=151.6,q75/q25=inf train_time:398098ms step_avg:86.54ms +[2025-08-22 20:15:41] [Rank 0] step:4601/10000 train_time:398113ms step_avg:86.53ms +[2025-08-22 20:15:41] [Rank 0] step:4601/10000 train_time:398113ms step_avg:86.53ms +[2025-08-22 20:15:43] [Rank 0] step:4621/10000 train_time:399824ms step_avg:86.52ms +[2025-08-22 20:15:43] [Rank 0] step:4621/10000 train_time:399824ms step_avg:86.52ms +[2025-08-22 20:15:45] [Rank 0] step:4641/10000 train_time:401615ms step_avg:86.54ms +[2025-08-22 20:15:45] [Rank 0] step:4641/10000 train_time:401615ms step_avg:86.54ms +[2025-08-22 20:15:46] [Rank 0] step:4661/10000 train_time:403403ms step_avg:86.55ms +[2025-08-22 20:15:46] [Rank 0] step:4661/10000 train_time:403403ms step_avg:86.55ms +[2025-08-22 20:15:48] [Rank 0] step:4681/10000 train_time:405194ms step_avg:86.56ms +[2025-08-22 20:15:48] [Rank 0] step:4681/10000 train_time:405194ms step_avg:86.56ms +[2025-08-22 20:15:50] [Rank 0] step:4701/10000 train_time:406985ms step_avg:86.57ms +[2025-08-22 20:15:50] [Rank 0] step:4701/10000 train_time:406985ms step_avg:86.57ms +[2025-08-22 20:15:52] [Rank 0] step:4721/10000 train_time:408778ms step_avg:86.59ms +[2025-08-22 20:15:52] [Rank 0] step:4721/10000 train_time:408778ms step_avg:86.59ms +[2025-08-22 20:15:53] [Rank 0] step:4741/10000 train_time:410568ms step_avg:86.60ms +[2025-08-22 20:15:53] [Rank 0] step:4741/10000 train_time:410568ms step_avg:86.60ms +[2025-08-22 20:15:55] [Rank 0] step:4761/10000 train_time:412360ms step_avg:86.61ms +[2025-08-22 20:15:55] [Rank 0] step:4761/10000 train_time:412360ms step_avg:86.61ms +[2025-08-22 20:15:57] [Rank 0] step:4781/10000 train_time:414150ms step_avg:86.62ms +[2025-08-22 20:15:57] [Rank 0] step:4781/10000 train_time:414150ms step_avg:86.62ms +[2025-08-22 20:15:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:15:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:16:13] [Rank 0] PRINT: step:4800/10000 val_loss:3.9839 svd_entropy: attn_qk:H=0.7572,top10E=0.26,eRank=183.7,q75/q25=81.39 attn_vo:H=0.7964,top10E=0.08,eRank=313.1,q75/q25=inf mlp_w1:H=0.8540,top10E=0.16,eRank=297.9,q75/q25=9.76 mlp_w2:H=0.9207,top10E=0.11,eRank=454.5,q75/q25=5.46 vo_prod:H=0.6406,top10E=0.15,eRank=154.4,q75/q25=inf train_time:416032ms step_avg:86.67ms +[2025-08-22 20:16:13] [Rank 0] PRINT: step:4800/10000 val_loss:3.9839 svd_entropy: attn_qk:H=0.7572,top10E=0.26,eRank=183.7,q75/q25=81.39 attn_vo:H=0.7964,top10E=0.08,eRank=313.1,q75/q25=inf mlp_w1:H=0.8540,top10E=0.16,eRank=297.9,q75/q25=9.76 mlp_w2:H=0.9207,top10E=0.11,eRank=454.5,q75/q25=5.46 vo_prod:H=0.6406,top10E=0.15,eRank=154.4,q75/q25=inf train_time:416032ms step_avg:86.67ms +[2025-08-22 20:16:13] [Rank 0] step:4801/10000 train_time:416045ms step_avg:86.66ms +[2025-08-22 20:16:13] [Rank 0] step:4801/10000 train_time:416045ms step_avg:86.66ms +[2025-08-22 20:16:15] [Rank 0] step:4821/10000 train_time:417766ms step_avg:86.66ms +[2025-08-22 20:16:15] [Rank 0] step:4821/10000 train_time:417766ms step_avg:86.66ms +[2025-08-22 20:16:16] [Rank 0] step:4841/10000 train_time:419591ms step_avg:86.67ms +[2025-08-22 20:16:16] [Rank 0] step:4841/10000 train_time:419591ms step_avg:86.67ms +[2025-08-22 20:16:18] [Rank 0] step:4861/10000 train_time:421380ms step_avg:86.69ms +[2025-08-22 20:16:18] [Rank 0] step:4861/10000 train_time:421380ms step_avg:86.69ms +[2025-08-22 20:16:20] [Rank 0] step:4881/10000 train_time:423169ms step_avg:86.70ms +[2025-08-22 20:16:20] [Rank 0] step:4881/10000 train_time:423169ms step_avg:86.70ms +[2025-08-22 20:16:22] [Rank 0] step:4901/10000 train_time:424956ms step_avg:86.71ms +[2025-08-22 20:16:22] [Rank 0] step:4901/10000 train_time:424956ms step_avg:86.71ms +[2025-08-22 20:16:24] [Rank 0] step:4921/10000 train_time:426747ms step_avg:86.72ms +[2025-08-22 20:16:24] [Rank 0] step:4921/10000 train_time:426747ms step_avg:86.72ms +[2025-08-22 20:16:25] [Rank 0] step:4941/10000 train_time:428541ms step_avg:86.73ms +[2025-08-22 20:16:25] [Rank 0] step:4941/10000 train_time:428541ms step_avg:86.73ms +[2025-08-22 20:16:27] [Rank 0] step:4961/10000 train_time:430331ms step_avg:86.74ms +[2025-08-22 20:16:27] [Rank 0] step:4961/10000 train_time:430331ms step_avg:86.74ms +[2025-08-22 20:16:29] [Rank 0] step:4981/10000 train_time:432124ms step_avg:86.75ms +[2025-08-22 20:16:29] [Rank 0] step:4981/10000 train_time:432124ms step_avg:86.75ms +[2025-08-22 20:16:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:16:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:16:44] [Rank 0] PRINT: step:5000/10000 val_loss:3.9379 svd_entropy: attn_qk:H=0.7592,top10E=0.26,eRank=185.4,q75/q25=81.64 attn_vo:H=0.7978,top10E=0.08,eRank=316.0,q75/q25=inf mlp_w1:H=0.8565,top10E=0.16,eRank=302.7,q75/q25=9.61 mlp_w2:H=0.9221,top10E=0.11,eRank=458.8,q75/q25=5.33 vo_prod:H=0.6430,top10E=0.15,eRank=157.2,q75/q25=inf train_time:434008ms step_avg:86.80ms +[2025-08-22 20:16:44] [Rank 0] PRINT: step:5000/10000 val_loss:3.9379 svd_entropy: attn_qk:H=0.7592,top10E=0.26,eRank=185.4,q75/q25=81.64 attn_vo:H=0.7978,top10E=0.08,eRank=316.0,q75/q25=inf mlp_w1:H=0.8565,top10E=0.16,eRank=302.7,q75/q25=9.61 mlp_w2:H=0.9221,top10E=0.11,eRank=458.8,q75/q25=5.33 vo_prod:H=0.6430,top10E=0.15,eRank=157.2,q75/q25=inf train_time:434008ms step_avg:86.80ms +[2025-08-22 20:16:44] [Rank 0] step:5001/10000 train_time:434024ms step_avg:86.79ms +[2025-08-22 20:16:44] [Rank 0] step:5001/10000 train_time:434024ms step_avg:86.79ms +[2025-08-22 20:16:46] [Rank 0] step:5021/10000 train_time:435732ms step_avg:86.78ms +[2025-08-22 20:16:46] [Rank 0] step:5021/10000 train_time:435732ms step_avg:86.78ms +[2025-08-22 20:16:48] [Rank 0] step:5041/10000 train_time:437525ms step_avg:86.79ms +[2025-08-22 20:16:48] [Rank 0] step:5041/10000 train_time:437525ms step_avg:86.79ms +[2025-08-22 20:16:50] [Rank 0] step:5061/10000 train_time:439317ms step_avg:86.80ms +[2025-08-22 20:16:50] [Rank 0] step:5061/10000 train_time:439317ms step_avg:86.80ms +[2025-08-22 20:16:52] [Rank 0] step:5081/10000 train_time:441113ms step_avg:86.82ms +[2025-08-22 20:16:52] [Rank 0] step:5081/10000 train_time:441113ms step_avg:86.82ms +[2025-08-22 20:16:53] [Rank 0] step:5101/10000 train_time:442905ms step_avg:86.83ms +[2025-08-22 20:16:53] [Rank 0] step:5101/10000 train_time:442905ms step_avg:86.83ms +[2025-08-22 20:16:55] [Rank 0] step:5121/10000 train_time:444701ms step_avg:86.84ms +[2025-08-22 20:16:55] [Rank 0] step:5121/10000 train_time:444701ms step_avg:86.84ms +[2025-08-22 20:16:57] [Rank 0] step:5141/10000 train_time:446499ms step_avg:86.85ms +[2025-08-22 20:16:57] [Rank 0] step:5141/10000 train_time:446499ms step_avg:86.85ms +[2025-08-22 20:16:59] [Rank 0] step:5161/10000 train_time:448293ms step_avg:86.86ms +[2025-08-22 20:16:59] [Rank 0] step:5161/10000 train_time:448293ms step_avg:86.86ms +[2025-08-22 20:17:00] [Rank 0] step:5181/10000 train_time:450093ms step_avg:86.87ms +[2025-08-22 20:17:00] [Rank 0] step:5181/10000 train_time:450093ms step_avg:86.87ms +[2025-08-22 20:17:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:17:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:17:16] [Rank 0] PRINT: step:5200/10000 val_loss:3.9203 svd_entropy: attn_qk:H=0.7611,top10E=0.25,eRank=186.9,q75/q25=81.53 attn_vo:H=0.7991,top10E=0.08,eRank=318.8,q75/q25=inf mlp_w1:H=0.8589,top10E=0.16,eRank=307.3,q75/q25=9.45 mlp_w2:H=0.9234,top10E=0.10,eRank=462.9,q75/q25=5.20 vo_prod:H=0.6451,top10E=0.14,eRank=159.6,q75/q25=inf train_time:452003ms step_avg:86.92ms +[2025-08-22 20:17:16] [Rank 0] PRINT: step:5200/10000 val_loss:3.9203 svd_entropy: attn_qk:H=0.7611,top10E=0.25,eRank=186.9,q75/q25=81.53 attn_vo:H=0.7991,top10E=0.08,eRank=318.8,q75/q25=inf mlp_w1:H=0.8589,top10E=0.16,eRank=307.3,q75/q25=9.45 mlp_w2:H=0.9234,top10E=0.10,eRank=462.9,q75/q25=5.20 vo_prod:H=0.6451,top10E=0.14,eRank=159.6,q75/q25=inf train_time:452003ms step_avg:86.92ms +[2025-08-22 20:17:16] [Rank 0] step:5201/10000 train_time:452018ms step_avg:86.91ms +[2025-08-22 20:17:16] [Rank 0] step:5201/10000 train_time:452018ms step_avg:86.91ms +[2025-08-22 20:17:18] [Rank 0] step:5221/10000 train_time:453756ms step_avg:86.91ms +[2025-08-22 20:17:18] [Rank 0] step:5221/10000 train_time:453756ms step_avg:86.91ms +[2025-08-22 20:17:20] [Rank 0] step:5241/10000 train_time:455577ms step_avg:86.93ms +[2025-08-22 20:17:20] [Rank 0] step:5241/10000 train_time:455577ms step_avg:86.93ms +[2025-08-22 20:17:21] [Rank 0] step:5261/10000 train_time:457399ms step_avg:86.94ms +[2025-08-22 20:17:21] [Rank 0] step:5261/10000 train_time:457399ms step_avg:86.94ms +[2025-08-22 20:17:23] [Rank 0] step:5281/10000 train_time:459221ms step_avg:86.96ms +[2025-08-22 20:17:23] [Rank 0] step:5281/10000 train_time:459221ms step_avg:86.96ms +[2025-08-22 20:17:25] [Rank 0] step:5301/10000 train_time:461051ms step_avg:86.97ms +[2025-08-22 20:17:25] [Rank 0] step:5301/10000 train_time:461051ms step_avg:86.97ms +[2025-08-22 20:17:27] [Rank 0] step:5321/10000 train_time:462874ms step_avg:86.99ms +[2025-08-22 20:17:27] [Rank 0] step:5321/10000 train_time:462874ms step_avg:86.99ms +[2025-08-22 20:17:29] [Rank 0] step:5341/10000 train_time:464696ms step_avg:87.01ms +[2025-08-22 20:17:29] [Rank 0] step:5341/10000 train_time:464696ms step_avg:87.01ms +[2025-08-22 20:17:31] [Rank 0] step:5361/10000 train_time:466519ms step_avg:87.02ms +[2025-08-22 20:17:31] [Rank 0] step:5361/10000 train_time:466519ms step_avg:87.02ms +[2025-08-22 20:17:32] [Rank 0] step:5381/10000 train_time:468342ms step_avg:87.04ms +[2025-08-22 20:17:32] [Rank 0] step:5381/10000 train_time:468342ms step_avg:87.04ms +[2025-08-22 20:17:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:17:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:17:48] [Rank 0] PRINT: step:5400/10000 val_loss:3.9007 svd_entropy: attn_qk:H=0.7628,top10E=0.25,eRank=188.3,q75/q25=81.51 attn_vo:H=0.8003,top10E=0.08,eRank=321.3,q75/q25=inf mlp_w1:H=0.8610,top10E=0.16,eRank=311.6,q75/q25=9.30 mlp_w2:H=0.9246,top10E=0.10,eRank=466.6,q75/q25=5.12 vo_prod:H=0.6470,top10E=0.14,eRank=161.8,q75/q25=inf train_time:470253ms step_avg:87.08ms +[2025-08-22 20:17:48] [Rank 0] PRINT: step:5400/10000 val_loss:3.9007 svd_entropy: attn_qk:H=0.7628,top10E=0.25,eRank=188.3,q75/q25=81.51 attn_vo:H=0.8003,top10E=0.08,eRank=321.3,q75/q25=inf mlp_w1:H=0.8610,top10E=0.16,eRank=311.6,q75/q25=9.30 mlp_w2:H=0.9246,top10E=0.10,eRank=466.6,q75/q25=5.12 vo_prod:H=0.6470,top10E=0.14,eRank=161.8,q75/q25=inf train_time:470253ms step_avg:87.08ms +[2025-08-22 20:17:48] [Rank 0] step:5401/10000 train_time:470268ms step_avg:87.07ms +[2025-08-22 20:17:48] [Rank 0] step:5401/10000 train_time:470268ms step_avg:87.07ms +[2025-08-22 20:17:50] [Rank 0] step:5421/10000 train_time:472005ms step_avg:87.07ms +[2025-08-22 20:17:50] [Rank 0] step:5421/10000 train_time:472005ms step_avg:87.07ms +[2025-08-22 20:17:51] [Rank 0] step:5441/10000 train_time:473821ms step_avg:87.08ms +[2025-08-22 20:17:51] [Rank 0] step:5441/10000 train_time:473821ms step_avg:87.08ms +[2025-08-22 20:17:53] [Rank 0] step:5461/10000 train_time:475644ms step_avg:87.10ms +[2025-08-22 20:17:53] [Rank 0] step:5461/10000 train_time:475644ms step_avg:87.10ms +[2025-08-22 20:17:55] [Rank 0] step:5481/10000 train_time:477465ms step_avg:87.11ms +[2025-08-22 20:17:55] [Rank 0] step:5481/10000 train_time:477465ms step_avg:87.11ms +[2025-08-22 20:17:57] [Rank 0] step:5501/10000 train_time:479294ms step_avg:87.13ms +[2025-08-22 20:17:57] [Rank 0] step:5501/10000 train_time:479294ms step_avg:87.13ms +[2025-08-22 20:17:59] [Rank 0] step:5521/10000 train_time:481121ms step_avg:87.14ms +[2025-08-22 20:17:59] [Rank 0] step:5521/10000 train_time:481121ms step_avg:87.14ms +[2025-08-22 20:18:00] [Rank 0] step:5541/10000 train_time:482943ms step_avg:87.16ms +[2025-08-22 20:18:00] [Rank 0] step:5541/10000 train_time:482943ms step_avg:87.16ms +[2025-08-22 20:18:02] [Rank 0] step:5561/10000 train_time:484766ms step_avg:87.17ms +[2025-08-22 20:18:02] [Rank 0] step:5561/10000 train_time:484766ms step_avg:87.17ms +[2025-08-22 20:18:04] [Rank 0] step:5581/10000 train_time:486590ms step_avg:87.19ms +[2025-08-22 20:18:04] [Rank 0] step:5581/10000 train_time:486590ms step_avg:87.19ms +[2025-08-22 20:18:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:18:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:18:19] [Rank 0] PRINT: step:5600/10000 val_loss:3.8946 svd_entropy: attn_qk:H=0.7644,top10E=0.25,eRank=189.7,q75/q25=81.72 attn_vo:H=0.8015,top10E=0.08,eRank=323.7,q75/q25=inf mlp_w1:H=0.8630,top10E=0.16,eRank=315.8,q75/q25=9.15 mlp_w2:H=0.9257,top10E=0.10,eRank=470.0,q75/q25=5.02 vo_prod:H=0.6488,top10E=0.14,eRank=164.0,q75/q25=inf train_time:488503ms step_avg:87.23ms +[2025-08-22 20:18:19] [Rank 0] PRINT: step:5600/10000 val_loss:3.8946 svd_entropy: attn_qk:H=0.7644,top10E=0.25,eRank=189.7,q75/q25=81.72 attn_vo:H=0.8015,top10E=0.08,eRank=323.7,q75/q25=inf mlp_w1:H=0.8630,top10E=0.16,eRank=315.8,q75/q25=9.15 mlp_w2:H=0.9257,top10E=0.10,eRank=470.0,q75/q25=5.02 vo_prod:H=0.6488,top10E=0.14,eRank=164.0,q75/q25=inf train_time:488503ms step_avg:87.23ms +[2025-08-22 20:18:20] [Rank 0] step:5601/10000 train_time:488519ms step_avg:87.22ms +[2025-08-22 20:18:20] [Rank 0] step:5601/10000 train_time:488519ms step_avg:87.22ms +[2025-08-22 20:18:21] [Rank 0] step:5621/10000 train_time:490248ms step_avg:87.22ms +[2025-08-22 20:18:21] [Rank 0] step:5621/10000 train_time:490248ms step_avg:87.22ms +[2025-08-22 20:18:23] [Rank 0] step:5641/10000 train_time:492068ms step_avg:87.23ms +[2025-08-22 20:18:23] [Rank 0] step:5641/10000 train_time:492068ms step_avg:87.23ms +[2025-08-22 20:18:25] [Rank 0] step:5661/10000 train_time:493884ms step_avg:87.24ms +[2025-08-22 20:18:25] [Rank 0] step:5661/10000 train_time:493884ms step_avg:87.24ms +[2025-08-22 20:18:27] [Rank 0] step:5681/10000 train_time:495705ms step_avg:87.26ms +[2025-08-22 20:18:27] [Rank 0] step:5681/10000 train_time:495705ms step_avg:87.26ms +[2025-08-22 20:18:29] [Rank 0] step:5701/10000 train_time:497527ms step_avg:87.27ms +[2025-08-22 20:18:29] [Rank 0] step:5701/10000 train_time:497527ms step_avg:87.27ms +[2025-08-22 20:18:31] [Rank 0] step:5721/10000 train_time:499351ms step_avg:87.28ms +[2025-08-22 20:18:31] [Rank 0] step:5721/10000 train_time:499351ms step_avg:87.28ms +[2025-08-22 20:18:32] [Rank 0] step:5741/10000 train_time:501176ms step_avg:87.30ms +[2025-08-22 20:18:32] [Rank 0] step:5741/10000 train_time:501176ms step_avg:87.30ms +[2025-08-22 20:18:34] [Rank 0] step:5761/10000 train_time:503002ms step_avg:87.31ms +[2025-08-22 20:18:34] [Rank 0] step:5761/10000 train_time:503002ms step_avg:87.31ms +[2025-08-22 20:18:36] [Rank 0] step:5781/10000 train_time:504829ms step_avg:87.33ms +[2025-08-22 20:18:36] [Rank 0] step:5781/10000 train_time:504829ms step_avg:87.33ms +[2025-08-22 20:18:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:18:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:18:51] [Rank 0] PRINT: step:5800/10000 val_loss:3.8809 svd_entropy: attn_qk:H=0.7660,top10E=0.25,eRank=191.0,q75/q25=81.94 attn_vo:H=0.8025,top10E=0.08,eRank=326.0,q75/q25=inf mlp_w1:H=0.8649,top10E=0.15,eRank=319.8,q75/q25=9.02 mlp_w2:H=0.9267,top10E=0.10,eRank=473.2,q75/q25=4.94 vo_prod:H=0.6505,top10E=0.14,eRank=166.0,q75/q25=inf train_time:506744ms step_avg:87.37ms +[2025-08-22 20:18:51] [Rank 0] PRINT: step:5800/10000 val_loss:3.8809 svd_entropy: attn_qk:H=0.7660,top10E=0.25,eRank=191.0,q75/q25=81.94 attn_vo:H=0.8025,top10E=0.08,eRank=326.0,q75/q25=inf mlp_w1:H=0.8649,top10E=0.15,eRank=319.8,q75/q25=9.02 mlp_w2:H=0.9267,top10E=0.10,eRank=473.2,q75/q25=4.94 vo_prod:H=0.6505,top10E=0.14,eRank=166.0,q75/q25=inf train_time:506744ms step_avg:87.37ms +[2025-08-22 20:18:52] [Rank 0] step:5801/10000 train_time:506760ms step_avg:87.36ms +[2025-08-22 20:18:52] [Rank 0] step:5801/10000 train_time:506760ms step_avg:87.36ms +[2025-08-22 20:18:53] [Rank 0] step:5821/10000 train_time:508486ms step_avg:87.35ms +[2025-08-22 20:18:53] [Rank 0] step:5821/10000 train_time:508486ms step_avg:87.35ms +[2025-08-22 20:18:55] [Rank 0] step:5841/10000 train_time:510310ms step_avg:87.37ms +[2025-08-22 20:18:55] [Rank 0] step:5841/10000 train_time:510310ms step_avg:87.37ms +[2025-08-22 20:18:57] [Rank 0] step:5861/10000 train_time:512138ms step_avg:87.38ms +[2025-08-22 20:18:57] [Rank 0] step:5861/10000 train_time:512138ms step_avg:87.38ms +[2025-08-22 20:18:59] [Rank 0] step:5881/10000 train_time:513963ms step_avg:87.39ms +[2025-08-22 20:18:59] [Rank 0] step:5881/10000 train_time:513963ms step_avg:87.39ms +[2025-08-22 20:19:01] [Rank 0] step:5901/10000 train_time:515788ms step_avg:87.41ms +[2025-08-22 20:19:01] [Rank 0] step:5901/10000 train_time:515788ms step_avg:87.41ms +[2025-08-22 20:19:02] [Rank 0] step:5921/10000 train_time:517615ms step_avg:87.42ms +[2025-08-22 20:19:02] [Rank 0] step:5921/10000 train_time:517615ms step_avg:87.42ms +[2025-08-22 20:19:04] [Rank 0] step:5941/10000 train_time:519447ms step_avg:87.43ms +[2025-08-22 20:19:04] [Rank 0] step:5941/10000 train_time:519447ms step_avg:87.43ms +[2025-08-22 20:19:06] [Rank 0] step:5961/10000 train_time:521280ms step_avg:87.45ms +[2025-08-22 20:19:06] [Rank 0] step:5961/10000 train_time:521280ms step_avg:87.45ms +[2025-08-22 20:19:08] [Rank 0] step:5981/10000 train_time:523108ms step_avg:87.46ms +[2025-08-22 20:19:08] [Rank 0] step:5981/10000 train_time:523108ms step_avg:87.46ms +[2025-08-22 20:19:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:19:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:19:23] [Rank 0] PRINT: step:6000/10000 val_loss:3.8577 svd_entropy: attn_qk:H=0.7675,top10E=0.25,eRank=192.4,q75/q25=82.11 attn_vo:H=0.8035,top10E=0.07,eRank=328.1,q75/q25=inf mlp_w1:H=0.8668,top10E=0.15,eRank=323.7,q75/q25=8.88 mlp_w2:H=0.9276,top10E=0.10,eRank=476.2,q75/q25=4.86 vo_prod:H=0.6521,top10E=0.14,eRank=168.0,q75/q25=inf train_time:525026ms step_avg:87.50ms +[2025-08-22 20:19:23] [Rank 0] PRINT: step:6000/10000 val_loss:3.8577 svd_entropy: attn_qk:H=0.7675,top10E=0.25,eRank=192.4,q75/q25=82.11 attn_vo:H=0.8035,top10E=0.07,eRank=328.1,q75/q25=inf mlp_w1:H=0.8668,top10E=0.15,eRank=323.7,q75/q25=8.88 mlp_w2:H=0.9276,top10E=0.10,eRank=476.2,q75/q25=4.86 vo_prod:H=0.6521,top10E=0.14,eRank=168.0,q75/q25=inf train_time:525026ms step_avg:87.50ms +[2025-08-22 20:19:23] [Rank 0] step:6001/10000 train_time:525041ms step_avg:87.49ms +[2025-08-22 20:19:23] [Rank 0] step:6001/10000 train_time:525041ms step_avg:87.49ms +[2025-08-22 20:19:25] [Rank 0] step:6021/10000 train_time:526799ms step_avg:87.49ms +[2025-08-22 20:19:25] [Rank 0] step:6021/10000 train_time:526799ms step_avg:87.49ms +[2025-08-22 20:19:27] [Rank 0] step:6041/10000 train_time:528702ms step_avg:87.52ms +[2025-08-22 20:19:27] [Rank 0] step:6041/10000 train_time:528702ms step_avg:87.52ms +[2025-08-22 20:19:29] [Rank 0] step:6061/10000 train_time:530556ms step_avg:87.54ms +[2025-08-22 20:19:29] [Rank 0] step:6061/10000 train_time:530556ms step_avg:87.54ms +[2025-08-22 20:19:31] [Rank 0] step:6081/10000 train_time:532384ms step_avg:87.55ms +[2025-08-22 20:19:31] [Rank 0] step:6081/10000 train_time:532384ms step_avg:87.55ms +[2025-08-22 20:19:33] [Rank 0] step:6101/10000 train_time:534216ms step_avg:87.56ms +[2025-08-22 20:19:33] [Rank 0] step:6101/10000 train_time:534216ms step_avg:87.56ms +[2025-08-22 20:19:35] [Rank 0] step:6121/10000 train_time:536312ms step_avg:87.62ms +[2025-08-22 20:19:35] [Rank 0] step:6121/10000 train_time:536312ms step_avg:87.62ms +[2025-08-22 20:19:37] [Rank 0] step:6141/10000 train_time:538150ms step_avg:87.63ms +[2025-08-22 20:19:37] [Rank 0] step:6141/10000 train_time:538150ms step_avg:87.63ms +[2025-08-22 20:19:38] [Rank 0] step:6161/10000 train_time:539978ms step_avg:87.64ms +[2025-08-22 20:19:38] [Rank 0] step:6161/10000 train_time:539978ms step_avg:87.64ms +[2025-08-22 20:19:40] [Rank 0] step:6181/10000 train_time:541803ms step_avg:87.66ms +[2025-08-22 20:19:40] [Rank 0] step:6181/10000 train_time:541803ms step_avg:87.66ms +[2025-08-22 20:19:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:19:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:19:55] [Rank 0] PRINT: step:6200/10000 val_loss:3.8393 svd_entropy: attn_qk:H=0.7689,top10E=0.24,eRank=193.6,q75/q25=82.12 attn_vo:H=0.8044,top10E=0.07,eRank=330.1,q75/q25=inf mlp_w1:H=0.8685,top10E=0.15,eRank=327.4,q75/q25=8.77 mlp_w2:H=0.9285,top10E=0.10,eRank=478.9,q75/q25=4.80 vo_prod:H=0.6536,top10E=0.14,eRank=169.8,q75/q25=inf train_time:543726ms step_avg:87.70ms +[2025-08-22 20:19:55] [Rank 0] PRINT: step:6200/10000 val_loss:3.8393 svd_entropy: attn_qk:H=0.7689,top10E=0.24,eRank=193.6,q75/q25=82.12 attn_vo:H=0.8044,top10E=0.07,eRank=330.1,q75/q25=inf mlp_w1:H=0.8685,top10E=0.15,eRank=327.4,q75/q25=8.77 mlp_w2:H=0.9285,top10E=0.10,eRank=478.9,q75/q25=4.80 vo_prod:H=0.6536,top10E=0.14,eRank=169.8,q75/q25=inf train_time:543726ms step_avg:87.70ms +[2025-08-22 20:19:56] [Rank 0] step:6201/10000 train_time:543741ms step_avg:87.69ms +[2025-08-22 20:19:56] [Rank 0] step:6201/10000 train_time:543741ms step_avg:87.69ms +[2025-08-22 20:19:57] [Rank 0] step:6221/10000 train_time:545490ms step_avg:87.69ms +[2025-08-22 20:19:57] [Rank 0] step:6221/10000 train_time:545490ms step_avg:87.69ms +[2025-08-22 20:19:59] [Rank 0] step:6241/10000 train_time:547310ms step_avg:87.70ms +[2025-08-22 20:19:59] [Rank 0] step:6241/10000 train_time:547310ms step_avg:87.70ms +[2025-08-22 20:20:01] [Rank 0] step:6261/10000 train_time:549135ms step_avg:87.71ms +[2025-08-22 20:20:01] [Rank 0] step:6261/10000 train_time:549135ms step_avg:87.71ms +[2025-08-22 20:20:03] [Rank 0] step:6281/10000 train_time:550967ms step_avg:87.72ms +[2025-08-22 20:20:03] [Rank 0] step:6281/10000 train_time:550967ms step_avg:87.72ms +[2025-08-22 20:20:05] [Rank 0] step:6301/10000 train_time:552795ms step_avg:87.73ms +[2025-08-22 20:20:05] [Rank 0] step:6301/10000 train_time:552795ms step_avg:87.73ms +[2025-08-22 20:20:07] [Rank 0] step:6321/10000 train_time:554621ms step_avg:87.74ms +[2025-08-22 20:20:07] [Rank 0] step:6321/10000 train_time:554621ms step_avg:87.74ms +[2025-08-22 20:20:08] [Rank 0] step:6341/10000 train_time:556454ms step_avg:87.75ms +[2025-08-22 20:20:08] [Rank 0] step:6341/10000 train_time:556454ms step_avg:87.75ms +[2025-08-22 20:20:10] [Rank 0] step:6361/10000 train_time:558289ms step_avg:87.77ms +[2025-08-22 20:20:10] [Rank 0] step:6361/10000 train_time:558289ms step_avg:87.77ms +[2025-08-22 20:20:12] [Rank 0] step:6381/10000 train_time:560124ms step_avg:87.78ms +[2025-08-22 20:20:12] [Rank 0] step:6381/10000 train_time:560124ms step_avg:87.78ms +[2025-08-22 20:20:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:20:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:20:27] [Rank 0] PRINT: step:6400/10000 val_loss:3.8231 svd_entropy: attn_qk:H=0.7702,top10E=0.24,eRank=194.8,q75/q25=82.39 attn_vo:H=0.8052,top10E=0.07,eRank=331.9,q75/q25=inf mlp_w1:H=0.8700,top10E=0.15,eRank=330.7,q75/q25=8.66 mlp_w2:H=0.9292,top10E=0.10,eRank=481.2,q75/q25=4.73 vo_prod:H=0.6548,top10E=0.14,eRank=171.3,q75/q25=inf train_time:562043ms step_avg:87.82ms +[2025-08-22 20:20:27] [Rank 0] PRINT: step:6400/10000 val_loss:3.8231 svd_entropy: attn_qk:H=0.7702,top10E=0.24,eRank=194.8,q75/q25=82.39 attn_vo:H=0.8052,top10E=0.07,eRank=331.9,q75/q25=inf mlp_w1:H=0.8700,top10E=0.15,eRank=330.7,q75/q25=8.66 mlp_w2:H=0.9292,top10E=0.10,eRank=481.2,q75/q25=4.73 vo_prod:H=0.6548,top10E=0.14,eRank=171.3,q75/q25=inf train_time:562043ms step_avg:87.82ms +[2025-08-22 20:20:28] [Rank 0] step:6401/10000 train_time:562058ms step_avg:87.81ms +[2025-08-22 20:20:28] [Rank 0] step:6401/10000 train_time:562058ms step_avg:87.81ms +[2025-08-22 20:20:29] [Rank 0] step:6421/10000 train_time:563794ms step_avg:87.80ms +[2025-08-22 20:20:29] [Rank 0] step:6421/10000 train_time:563794ms step_avg:87.80ms +[2025-08-22 20:20:31] [Rank 0] step:6441/10000 train_time:565618ms step_avg:87.82ms +[2025-08-22 20:20:31] [Rank 0] step:6441/10000 train_time:565618ms step_avg:87.82ms +[2025-08-22 20:20:33] [Rank 0] step:6461/10000 train_time:567448ms step_avg:87.83ms +[2025-08-22 20:20:33] [Rank 0] step:6461/10000 train_time:567448ms step_avg:87.83ms +[2025-08-22 20:20:35] [Rank 0] step:6481/10000 train_time:569284ms step_avg:87.84ms +[2025-08-22 20:20:35] [Rank 0] step:6481/10000 train_time:569284ms step_avg:87.84ms +[2025-08-22 20:20:37] [Rank 0] step:6501/10000 train_time:571109ms step_avg:87.85ms +[2025-08-22 20:20:37] [Rank 0] step:6501/10000 train_time:571109ms step_avg:87.85ms +[2025-08-22 20:20:39] [Rank 0] step:6521/10000 train_time:572930ms step_avg:87.86ms +[2025-08-22 20:20:39] [Rank 0] step:6521/10000 train_time:572930ms step_avg:87.86ms +[2025-08-22 20:20:40] [Rank 0] step:6541/10000 train_time:574761ms step_avg:87.87ms +[2025-08-22 20:20:40] [Rank 0] step:6541/10000 train_time:574761ms step_avg:87.87ms +[2025-08-22 20:20:42] [Rank 0] step:6561/10000 train_time:576593ms step_avg:87.88ms +[2025-08-22 20:20:42] [Rank 0] step:6561/10000 train_time:576593ms step_avg:87.88ms +[2025-08-22 20:20:44] [Rank 0] step:6581/10000 train_time:578420ms step_avg:87.89ms +[2025-08-22 20:20:44] [Rank 0] step:6581/10000 train_time:578420ms step_avg:87.89ms +[2025-08-22 20:20:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:20:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:20:59] [Rank 0] PRINT: step:6600/10000 val_loss:3.8056 svd_entropy: attn_qk:H=0.7714,top10E=0.24,eRank=195.8,q75/q25=81.86 attn_vo:H=0.8060,top10E=0.07,eRank=333.5,q75/q25=inf mlp_w1:H=0.8714,top10E=0.15,eRank=333.7,q75/q25=8.54 mlp_w2:H=0.9299,top10E=0.10,eRank=483.3,q75/q25=4.68 vo_prod:H=0.6560,top10E=0.14,eRank=172.8,q75/q25=inf train_time:580343ms step_avg:87.93ms +[2025-08-22 20:20:59] [Rank 0] PRINT: step:6600/10000 val_loss:3.8056 svd_entropy: attn_qk:H=0.7714,top10E=0.24,eRank=195.8,q75/q25=81.86 attn_vo:H=0.8060,top10E=0.07,eRank=333.5,q75/q25=inf mlp_w1:H=0.8714,top10E=0.15,eRank=333.7,q75/q25=8.54 mlp_w2:H=0.9299,top10E=0.10,eRank=483.3,q75/q25=4.68 vo_prod:H=0.6560,top10E=0.14,eRank=172.8,q75/q25=inf train_time:580343ms step_avg:87.93ms +[2025-08-22 20:20:59] [Rank 0] step:6601/10000 train_time:580358ms step_avg:87.92ms +[2025-08-22 20:20:59] [Rank 0] step:6601/10000 train_time:580358ms step_avg:87.92ms +[2025-08-22 20:21:01] [Rank 0] step:6621/10000 train_time:582114ms step_avg:87.92ms +[2025-08-22 20:21:01] [Rank 0] step:6621/10000 train_time:582114ms step_avg:87.92ms +[2025-08-22 20:21:03] [Rank 0] step:6641/10000 train_time:583950ms step_avg:87.93ms +[2025-08-22 20:21:03] [Rank 0] step:6641/10000 train_time:583950ms step_avg:87.93ms +[2025-08-22 20:21:05] [Rank 0] step:6661/10000 train_time:585779ms step_avg:87.94ms +[2025-08-22 20:21:05] [Rank 0] step:6661/10000 train_time:585779ms step_avg:87.94ms +[2025-08-22 20:21:07] [Rank 0] step:6681/10000 train_time:587628ms step_avg:87.96ms +[2025-08-22 20:21:07] [Rank 0] step:6681/10000 train_time:587628ms step_avg:87.96ms +[2025-08-22 20:21:09] [Rank 0] step:6701/10000 train_time:589495ms step_avg:87.97ms +[2025-08-22 20:21:09] [Rank 0] step:6701/10000 train_time:589495ms step_avg:87.97ms +[2025-08-22 20:21:11] [Rank 0] step:6721/10000 train_time:591359ms step_avg:87.99ms +[2025-08-22 20:21:11] [Rank 0] step:6721/10000 train_time:591359ms step_avg:87.99ms +[2025-08-22 20:21:12] [Rank 0] step:6741/10000 train_time:593219ms step_avg:88.00ms +[2025-08-22 20:21:12] [Rank 0] step:6741/10000 train_time:593219ms step_avg:88.00ms +[2025-08-22 20:21:14] [Rank 0] step:6761/10000 train_time:595077ms step_avg:88.02ms +[2025-08-22 20:21:14] [Rank 0] step:6761/10000 train_time:595077ms step_avg:88.02ms +[2025-08-22 20:21:16] [Rank 0] step:6781/10000 train_time:596940ms step_avg:88.03ms +[2025-08-22 20:21:16] [Rank 0] step:6781/10000 train_time:596940ms step_avg:88.03ms +[2025-08-22 20:21:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:21:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:21:32] [Rank 0] PRINT: step:6800/10000 val_loss:3.7899 svd_entropy: attn_qk:H=0.7724,top10E=0.24,eRank=196.7,q75/q25=81.75 attn_vo:H=0.8066,top10E=0.07,eRank=335.0,q75/q25=inf mlp_w1:H=0.8726,top10E=0.15,eRank=336.5,q75/q25=8.45 mlp_w2:H=0.9305,top10E=0.10,eRank=485.3,q75/q25=4.64 vo_prod:H=0.6571,top10E=0.13,eRank=174.1,q75/q25=inf train_time:598902ms step_avg:88.07ms +[2025-08-22 20:21:32] [Rank 0] PRINT: step:6800/10000 val_loss:3.7899 svd_entropy: attn_qk:H=0.7724,top10E=0.24,eRank=196.7,q75/q25=81.75 attn_vo:H=0.8066,top10E=0.07,eRank=335.0,q75/q25=inf mlp_w1:H=0.8726,top10E=0.15,eRank=336.5,q75/q25=8.45 mlp_w2:H=0.9305,top10E=0.10,eRank=485.3,q75/q25=4.64 vo_prod:H=0.6571,top10E=0.13,eRank=174.1,q75/q25=inf train_time:598902ms step_avg:88.07ms +[2025-08-22 20:21:32] [Rank 0] step:6801/10000 train_time:598916ms step_avg:88.06ms +[2025-08-22 20:21:32] [Rank 0] step:6801/10000 train_time:598916ms step_avg:88.06ms +[2025-08-22 20:21:34] [Rank 0] step:6821/10000 train_time:600697ms step_avg:88.07ms +[2025-08-22 20:21:34] [Rank 0] step:6821/10000 train_time:600697ms step_avg:88.07ms +[2025-08-22 20:21:35] [Rank 0] step:6841/10000 train_time:602551ms step_avg:88.08ms +[2025-08-22 20:21:35] [Rank 0] step:6841/10000 train_time:602551ms step_avg:88.08ms +[2025-08-22 20:21:37] [Rank 0] step:6861/10000 train_time:604406ms step_avg:88.09ms +[2025-08-22 20:21:37] [Rank 0] step:6861/10000 train_time:604406ms step_avg:88.09ms +[2025-08-22 20:21:39] [Rank 0] step:6881/10000 train_time:606265ms step_avg:88.11ms +[2025-08-22 20:21:39] [Rank 0] step:6881/10000 train_time:606265ms step_avg:88.11ms +[2025-08-22 20:21:41] [Rank 0] step:6901/10000 train_time:608123ms step_avg:88.12ms +[2025-08-22 20:21:41] [Rank 0] step:6901/10000 train_time:608123ms step_avg:88.12ms +[2025-08-22 20:21:43] [Rank 0] step:6921/10000 train_time:609980ms step_avg:88.13ms +[2025-08-22 20:21:43] [Rank 0] step:6921/10000 train_time:609980ms step_avg:88.13ms +[2025-08-22 20:21:45] [Rank 0] step:6941/10000 train_time:611840ms step_avg:88.15ms +[2025-08-22 20:21:45] [Rank 0] step:6941/10000 train_time:611840ms step_avg:88.15ms +[2025-08-22 20:21:47] [Rank 0] step:6961/10000 train_time:613713ms step_avg:88.16ms +[2025-08-22 20:21:47] [Rank 0] step:6961/10000 train_time:613713ms step_avg:88.16ms +[2025-08-22 20:21:48] [Rank 0] step:6981/10000 train_time:615578ms step_avg:88.18ms +[2025-08-22 20:21:48] [Rank 0] step:6981/10000 train_time:615578ms step_avg:88.18ms +[2025-08-22 20:21:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:21:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:22:04] [Rank 0] PRINT: step:7000/10000 val_loss:3.7757 svd_entropy: attn_qk:H=0.7733,top10E=0.24,eRank=197.6,q75/q25=81.66 attn_vo:H=0.8072,top10E=0.07,eRank=336.3,q75/q25=inf mlp_w1:H=0.8738,top10E=0.15,eRank=339.1,q75/q25=8.38 mlp_w2:H=0.9310,top10E=0.10,eRank=487.0,q75/q25=4.59 vo_prod:H=0.6581,top10E=0.13,eRank=175.4,q75/q25=inf train_time:617535ms step_avg:88.22ms +[2025-08-22 20:22:04] [Rank 0] PRINT: step:7000/10000 val_loss:3.7757 svd_entropy: attn_qk:H=0.7733,top10E=0.24,eRank=197.6,q75/q25=81.66 attn_vo:H=0.8072,top10E=0.07,eRank=336.3,q75/q25=inf mlp_w1:H=0.8738,top10E=0.15,eRank=339.1,q75/q25=8.38 mlp_w2:H=0.9310,top10E=0.10,eRank=487.0,q75/q25=4.59 vo_prod:H=0.6581,top10E=0.13,eRank=175.4,q75/q25=inf train_time:617535ms step_avg:88.22ms +[2025-08-22 20:22:04] [Rank 0] step:7001/10000 train_time:617550ms step_avg:88.21ms +[2025-08-22 20:22:04] [Rank 0] step:7001/10000 train_time:617550ms step_avg:88.21ms +[2025-08-22 20:22:06] [Rank 0] step:7021/10000 train_time:619319ms step_avg:88.21ms +[2025-08-22 20:22:06] [Rank 0] step:7021/10000 train_time:619319ms step_avg:88.21ms +[2025-08-22 20:22:07] [Rank 0] step:7041/10000 train_time:621173ms step_avg:88.22ms +[2025-08-22 20:22:07] [Rank 0] step:7041/10000 train_time:621173ms step_avg:88.22ms +[2025-08-22 20:22:09] [Rank 0] step:7061/10000 train_time:623027ms step_avg:88.23ms +[2025-08-22 20:22:09] [Rank 0] step:7061/10000 train_time:623027ms step_avg:88.23ms +[2025-08-22 20:22:11] [Rank 0] step:7081/10000 train_time:624883ms step_avg:88.25ms +[2025-08-22 20:22:11] [Rank 0] step:7081/10000 train_time:624883ms step_avg:88.25ms +[2025-08-22 20:22:13] [Rank 0] step:7101/10000 train_time:626744ms step_avg:88.26ms +[2025-08-22 20:22:13] [Rank 0] step:7101/10000 train_time:626744ms step_avg:88.26ms +[2025-08-22 20:22:15] [Rank 0] step:7121/10000 train_time:628602ms step_avg:88.27ms +[2025-08-22 20:22:15] [Rank 0] step:7121/10000 train_time:628602ms step_avg:88.27ms +[2025-08-22 20:22:17] [Rank 0] step:7141/10000 train_time:630459ms step_avg:88.29ms +[2025-08-22 20:22:17] [Rank 0] step:7141/10000 train_time:630459ms step_avg:88.29ms +[2025-08-22 20:22:19] [Rank 0] step:7161/10000 train_time:632321ms step_avg:88.30ms +[2025-08-22 20:22:19] [Rank 0] step:7161/10000 train_time:632321ms step_avg:88.30ms +[2025-08-22 20:22:21] [Rank 0] step:7181/10000 train_time:634182ms step_avg:88.31ms +[2025-08-22 20:22:21] [Rank 0] step:7181/10000 train_time:634182ms step_avg:88.31ms +[2025-08-22 20:22:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:22:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:22:36] [Rank 0] PRINT: step:7200/10000 val_loss:3.7583 svd_entropy: attn_qk:H=0.7742,top10E=0.24,eRank=198.4,q75/q25=81.62 attn_vo:H=0.8078,top10E=0.07,eRank=337.5,q75/q25=inf mlp_w1:H=0.8748,top10E=0.14,eRank=341.4,q75/q25=8.28 mlp_w2:H=0.9315,top10E=0.10,eRank=488.5,q75/q25=4.55 vo_prod:H=0.6589,top10E=0.13,eRank=176.5,q75/q25=inf train_time:636139ms step_avg:88.35ms +[2025-08-22 20:22:36] [Rank 0] PRINT: step:7200/10000 val_loss:3.7583 svd_entropy: attn_qk:H=0.7742,top10E=0.24,eRank=198.4,q75/q25=81.62 attn_vo:H=0.8078,top10E=0.07,eRank=337.5,q75/q25=inf mlp_w1:H=0.8748,top10E=0.14,eRank=341.4,q75/q25=8.28 mlp_w2:H=0.9315,top10E=0.10,eRank=488.5,q75/q25=4.55 vo_prod:H=0.6589,top10E=0.13,eRank=176.5,q75/q25=inf train_time:636139ms step_avg:88.35ms +[2025-08-22 20:22:36] [Rank 0] step:7201/10000 train_time:636154ms step_avg:88.34ms +[2025-08-22 20:22:36] [Rank 0] step:7201/10000 train_time:636154ms step_avg:88.34ms +[2025-08-22 20:22:38] [Rank 0] step:7221/10000 train_time:637947ms step_avg:88.35ms +[2025-08-22 20:22:38] [Rank 0] step:7221/10000 train_time:637947ms step_avg:88.35ms +[2025-08-22 20:22:40] [Rank 0] step:7241/10000 train_time:639803ms step_avg:88.36ms +[2025-08-22 20:22:40] [Rank 0] step:7241/10000 train_time:639803ms step_avg:88.36ms +[2025-08-22 20:22:42] [Rank 0] step:7261/10000 train_time:641658ms step_avg:88.37ms +[2025-08-22 20:22:42] [Rank 0] step:7261/10000 train_time:641658ms step_avg:88.37ms +[2025-08-22 20:22:44] [Rank 0] step:7281/10000 train_time:643527ms step_avg:88.38ms +[2025-08-22 20:22:44] [Rank 0] step:7281/10000 train_time:643527ms step_avg:88.38ms +[2025-08-22 20:22:45] [Rank 0] step:7301/10000 train_time:645389ms step_avg:88.40ms +[2025-08-22 20:22:45] [Rank 0] step:7301/10000 train_time:645389ms step_avg:88.40ms +[2025-08-22 20:22:47] [Rank 0] step:7321/10000 train_time:647262ms step_avg:88.41ms +[2025-08-22 20:22:47] [Rank 0] step:7321/10000 train_time:647262ms step_avg:88.41ms +[2025-08-22 20:22:49] [Rank 0] step:7341/10000 train_time:649124ms step_avg:88.42ms +[2025-08-22 20:22:49] [Rank 0] step:7341/10000 train_time:649124ms step_avg:88.42ms +[2025-08-22 20:22:51] [Rank 0] step:7361/10000 train_time:650994ms step_avg:88.44ms +[2025-08-22 20:22:51] [Rank 0] step:7361/10000 train_time:650994ms step_avg:88.44ms +[2025-08-22 20:22:53] [Rank 0] step:7381/10000 train_time:652866ms step_avg:88.45ms +[2025-08-22 20:22:53] [Rank 0] step:7381/10000 train_time:652866ms step_avg:88.45ms +[2025-08-22 20:22:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:22:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:23:08] [Rank 0] PRINT: step:7400/10000 val_loss:3.7347 svd_entropy: attn_qk:H=0.7750,top10E=0.24,eRank=199.1,q75/q25=81.28 attn_vo:H=0.8082,top10E=0.07,eRank=338.5,q75/q25=inf mlp_w1:H=0.8758,top10E=0.14,eRank=343.5,q75/q25=8.21 mlp_w2:H=0.9319,top10E=0.10,eRank=489.9,q75/q25=4.52 vo_prod:H=0.6597,top10E=0.13,eRank=177.5,q75/q25=inf train_time:654806ms step_avg:88.49ms +[2025-08-22 20:23:08] [Rank 0] PRINT: step:7400/10000 val_loss:3.7347 svd_entropy: attn_qk:H=0.7750,top10E=0.24,eRank=199.1,q75/q25=81.28 attn_vo:H=0.8082,top10E=0.07,eRank=338.5,q75/q25=inf mlp_w1:H=0.8758,top10E=0.14,eRank=343.5,q75/q25=8.21 mlp_w2:H=0.9319,top10E=0.10,eRank=489.9,q75/q25=4.52 vo_prod:H=0.6597,top10E=0.13,eRank=177.5,q75/q25=inf train_time:654806ms step_avg:88.49ms +[2025-08-22 20:23:08] [Rank 0] step:7401/10000 train_time:654821ms step_avg:88.48ms +[2025-08-22 20:23:08] [Rank 0] step:7401/10000 train_time:654821ms step_avg:88.48ms +[2025-08-22 20:23:10] [Rank 0] step:7421/10000 train_time:656603ms step_avg:88.48ms +[2025-08-22 20:23:10] [Rank 0] step:7421/10000 train_time:656603ms step_avg:88.48ms +[2025-08-22 20:23:12] [Rank 0] step:7441/10000 train_time:658460ms step_avg:88.49ms +[2025-08-22 20:23:12] [Rank 0] step:7441/10000 train_time:658460ms step_avg:88.49ms +[2025-08-22 20:23:14] [Rank 0] step:7461/10000 train_time:660318ms step_avg:88.50ms +[2025-08-22 20:23:14] [Rank 0] step:7461/10000 train_time:660318ms step_avg:88.50ms +[2025-08-22 20:23:16] [Rank 0] step:7481/10000 train_time:662184ms step_avg:88.52ms +[2025-08-22 20:23:16] [Rank 0] step:7481/10000 train_time:662184ms step_avg:88.52ms +[2025-08-22 20:23:18] [Rank 0] step:7501/10000 train_time:664048ms step_avg:88.53ms +[2025-08-22 20:23:18] [Rank 0] step:7501/10000 train_time:664048ms step_avg:88.53ms +[2025-08-22 20:23:20] [Rank 0] step:7521/10000 train_time:665911ms step_avg:88.54ms +[2025-08-22 20:23:20] [Rank 0] step:7521/10000 train_time:665911ms step_avg:88.54ms +[2025-08-22 20:23:21] [Rank 0] step:7541/10000 train_time:667787ms step_avg:88.55ms +[2025-08-22 20:23:21] [Rank 0] step:7541/10000 train_time:667787ms step_avg:88.55ms +[2025-08-22 20:23:23] [Rank 0] step:7561/10000 train_time:669639ms step_avg:88.56ms +[2025-08-22 20:23:23] [Rank 0] step:7561/10000 train_time:669639ms step_avg:88.56ms +[2025-08-22 20:23:25] [Rank 0] step:7581/10000 train_time:671515ms step_avg:88.58ms +[2025-08-22 20:23:25] [Rank 0] step:7581/10000 train_time:671515ms step_avg:88.58ms +[2025-08-22 20:23:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:23:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:23:41] [Rank 0] PRINT: step:7600/10000 val_loss:3.7265 svd_entropy: attn_qk:H=0.7757,top10E=0.24,eRank=199.8,q75/q25=81.03 attn_vo:H=0.8086,top10E=0.07,eRank=339.5,q75/q25=inf mlp_w1:H=0.8766,top10E=0.14,eRank=345.4,q75/q25=8.15 mlp_w2:H=0.9323,top10E=0.10,eRank=491.1,q75/q25=4.49 vo_prod:H=0.6604,top10E=0.13,eRank=178.4,q75/q25=inf train_time:673481ms step_avg:88.62ms +[2025-08-22 20:23:41] [Rank 0] PRINT: step:7600/10000 val_loss:3.7265 svd_entropy: attn_qk:H=0.7757,top10E=0.24,eRank=199.8,q75/q25=81.03 attn_vo:H=0.8086,top10E=0.07,eRank=339.5,q75/q25=inf mlp_w1:H=0.8766,top10E=0.14,eRank=345.4,q75/q25=8.15 mlp_w2:H=0.9323,top10E=0.10,eRank=491.1,q75/q25=4.49 vo_prod:H=0.6604,top10E=0.13,eRank=178.4,q75/q25=inf train_time:673481ms step_avg:88.62ms +[2025-08-22 20:23:41] [Rank 0] step:7601/10000 train_time:673497ms step_avg:88.61ms +[2025-08-22 20:23:41] [Rank 0] step:7601/10000 train_time:673497ms step_avg:88.61ms +[2025-08-22 20:23:43] [Rank 0] step:7621/10000 train_time:675260ms step_avg:88.61ms +[2025-08-22 20:23:43] [Rank 0] step:7621/10000 train_time:675260ms step_avg:88.61ms +[2025-08-22 20:23:45] [Rank 0] step:7641/10000 train_time:677118ms step_avg:88.62ms +[2025-08-22 20:23:45] [Rank 0] step:7641/10000 train_time:677118ms step_avg:88.62ms +[2025-08-22 20:23:46] [Rank 0] step:7661/10000 train_time:678979ms step_avg:88.63ms +[2025-08-22 20:23:46] [Rank 0] step:7661/10000 train_time:678979ms step_avg:88.63ms +[2025-08-22 20:23:48] [Rank 0] step:7681/10000 train_time:680835ms step_avg:88.64ms +[2025-08-22 20:23:48] [Rank 0] step:7681/10000 train_time:680835ms step_avg:88.64ms +[2025-08-22 20:23:50] [Rank 0] step:7701/10000 train_time:682696ms step_avg:88.65ms +[2025-08-22 20:23:50] [Rank 0] step:7701/10000 train_time:682696ms step_avg:88.65ms +[2025-08-22 20:23:52] [Rank 0] step:7721/10000 train_time:684567ms step_avg:88.66ms +[2025-08-22 20:23:52] [Rank 0] step:7721/10000 train_time:684567ms step_avg:88.66ms +[2025-08-22 20:23:54] [Rank 0] step:7741/10000 train_time:686430ms step_avg:88.67ms +[2025-08-22 20:23:54] [Rank 0] step:7741/10000 train_time:686430ms step_avg:88.67ms +[2025-08-22 20:23:56] [Rank 0] step:7761/10000 train_time:688301ms step_avg:88.69ms +[2025-08-22 20:23:56] [Rank 0] step:7761/10000 train_time:688301ms step_avg:88.69ms +[2025-08-22 20:23:58] [Rank 0] step:7781/10000 train_time:690169ms step_avg:88.70ms +[2025-08-22 20:23:58] [Rank 0] step:7781/10000 train_time:690169ms step_avg:88.70ms +[2025-08-22 20:23:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:23:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:24:13] [Rank 0] PRINT: step:7800/10000 val_loss:3.7133 svd_entropy: attn_qk:H=0.7764,top10E=0.23,eRank=200.4,q75/q25=81.01 attn_vo:H=0.8090,top10E=0.07,eRank=340.3,q75/q25=inf mlp_w1:H=0.8774,top10E=0.14,eRank=347.1,q75/q25=8.08 mlp_w2:H=0.9326,top10E=0.10,eRank=492.3,q75/q25=4.47 vo_prod:H=0.6611,top10E=0.13,eRank=179.3,q75/q25=inf train_time:692135ms step_avg:88.74ms +[2025-08-22 20:24:13] [Rank 0] PRINT: step:7800/10000 val_loss:3.7133 svd_entropy: attn_qk:H=0.7764,top10E=0.23,eRank=200.4,q75/q25=81.01 attn_vo:H=0.8090,top10E=0.07,eRank=340.3,q75/q25=inf mlp_w1:H=0.8774,top10E=0.14,eRank=347.1,q75/q25=8.08 mlp_w2:H=0.9326,top10E=0.10,eRank=492.3,q75/q25=4.47 vo_prod:H=0.6611,top10E=0.13,eRank=179.3,q75/q25=inf train_time:692135ms step_avg:88.74ms +[2025-08-22 20:24:13] [Rank 0] step:7801/10000 train_time:692151ms step_avg:88.73ms +[2025-08-22 20:24:13] [Rank 0] step:7801/10000 train_time:692151ms step_avg:88.73ms +[2025-08-22 20:24:15] [Rank 0] step:7821/10000 train_time:693925ms step_avg:88.73ms +[2025-08-22 20:24:15] [Rank 0] step:7821/10000 train_time:693925ms step_avg:88.73ms +[2025-08-22 20:24:17] [Rank 0] step:7841/10000 train_time:695780ms step_avg:88.74ms +[2025-08-22 20:24:17] [Rank 0] step:7841/10000 train_time:695780ms step_avg:88.74ms +[2025-08-22 20:24:19] [Rank 0] step:7861/10000 train_time:697646ms step_avg:88.75ms +[2025-08-22 20:24:19] [Rank 0] step:7861/10000 train_time:697646ms step_avg:88.75ms +[2025-08-22 20:24:21] [Rank 0] step:7881/10000 train_time:699512ms step_avg:88.76ms +[2025-08-22 20:24:21] [Rank 0] step:7881/10000 train_time:699512ms step_avg:88.76ms +[2025-08-22 20:24:22] [Rank 0] step:7901/10000 train_time:701373ms step_avg:88.77ms +[2025-08-22 20:24:22] [Rank 0] step:7901/10000 train_time:701373ms step_avg:88.77ms +[2025-08-22 20:24:24] [Rank 0] step:7921/10000 train_time:703240ms step_avg:88.78ms +[2025-08-22 20:24:24] [Rank 0] step:7921/10000 train_time:703240ms step_avg:88.78ms +[2025-08-22 20:24:26] [Rank 0] step:7941/10000 train_time:705110ms step_avg:88.79ms +[2025-08-22 20:24:26] [Rank 0] step:7941/10000 train_time:705110ms step_avg:88.79ms +[2025-08-22 20:24:28] [Rank 0] step:7961/10000 train_time:706978ms step_avg:88.81ms +[2025-08-22 20:24:28] [Rank 0] step:7961/10000 train_time:706978ms step_avg:88.81ms +[2025-08-22 20:24:30] [Rank 0] step:7981/10000 train_time:708840ms step_avg:88.82ms +[2025-08-22 20:24:30] [Rank 0] step:7981/10000 train_time:708840ms step_avg:88.82ms +[2025-08-22 20:24:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:24:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:24:45] [Rank 0] PRINT: step:8000/10000 val_loss:3.6907 svd_entropy: attn_qk:H=0.7770,top10E=0.23,eRank=201.0,q75/q25=81.11 attn_vo:H=0.8094,top10E=0.07,eRank=341.1,q75/q25=inf mlp_w1:H=0.8781,top10E=0.14,eRank=348.7,q75/q25=8.04 mlp_w2:H=0.9329,top10E=0.10,eRank=493.3,q75/q25=4.44 vo_prod:H=0.6617,top10E=0.13,eRank=180.1,q75/q25=inf train_time:710803ms step_avg:88.85ms +[2025-08-22 20:24:45] [Rank 0] PRINT: step:8000/10000 val_loss:3.6907 svd_entropy: attn_qk:H=0.7770,top10E=0.23,eRank=201.0,q75/q25=81.11 attn_vo:H=0.8094,top10E=0.07,eRank=341.1,q75/q25=inf mlp_w1:H=0.8781,top10E=0.14,eRank=348.7,q75/q25=8.04 mlp_w2:H=0.9329,top10E=0.10,eRank=493.3,q75/q25=4.44 vo_prod:H=0.6617,top10E=0.13,eRank=180.1,q75/q25=inf train_time:710803ms step_avg:88.85ms +[2025-08-22 20:24:45] [Rank 0] step:8001/10000 train_time:710817ms step_avg:88.84ms +[2025-08-22 20:24:45] [Rank 0] step:8001/10000 train_time:710817ms step_avg:88.84ms +[2025-08-22 20:24:47] [Rank 0] step:8021/10000 train_time:712602ms step_avg:88.84ms +[2025-08-22 20:24:47] [Rank 0] step:8021/10000 train_time:712602ms step_avg:88.84ms +[2025-08-22 20:24:49] [Rank 0] step:8041/10000 train_time:714474ms step_avg:88.85ms +[2025-08-22 20:24:49] [Rank 0] step:8041/10000 train_time:714474ms step_avg:88.85ms +[2025-08-22 20:24:51] [Rank 0] step:8061/10000 train_time:716340ms step_avg:88.86ms +[2025-08-22 20:24:51] [Rank 0] step:8061/10000 train_time:716340ms step_avg:88.86ms +[2025-08-22 20:24:53] [Rank 0] step:8081/10000 train_time:718197ms step_avg:88.87ms +[2025-08-22 20:24:53] [Rank 0] step:8081/10000 train_time:718197ms step_avg:88.87ms +[2025-08-22 20:24:55] [Rank 0] step:8101/10000 train_time:720068ms step_avg:88.89ms +[2025-08-22 20:24:55] [Rank 0] step:8101/10000 train_time:720068ms step_avg:88.89ms +[2025-08-22 20:24:57] [Rank 0] step:8121/10000 train_time:721934ms step_avg:88.90ms +[2025-08-22 20:24:57] [Rank 0] step:8121/10000 train_time:721934ms step_avg:88.90ms +[2025-08-22 20:24:59] [Rank 0] step:8141/10000 train_time:724372ms step_avg:88.98ms +[2025-08-22 20:24:59] [Rank 0] step:8141/10000 train_time:724372ms step_avg:88.98ms +[2025-08-22 20:25:01] [Rank 0] step:8161/10000 train_time:726252ms step_avg:88.99ms +[2025-08-22 20:25:01] [Rank 0] step:8161/10000 train_time:726252ms step_avg:88.99ms +[2025-08-22 20:25:03] [Rank 0] step:8181/10000 train_time:728158ms step_avg:89.01ms +[2025-08-22 20:25:03] [Rank 0] step:8181/10000 train_time:728158ms step_avg:89.01ms +[2025-08-22 20:25:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:25:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:25:18] [Rank 0] PRINT: step:8200/10000 val_loss:3.6806 svd_entropy: attn_qk:H=0.7776,top10E=0.23,eRank=201.5,q75/q25=80.71 attn_vo:H=0.8097,top10E=0.07,eRank=341.9,q75/q25=inf mlp_w1:H=0.8787,top10E=0.14,eRank=350.1,q75/q25=7.99 mlp_w2:H=0.9332,top10E=0.10,eRank=494.2,q75/q25=4.42 vo_prod:H=0.6622,top10E=0.13,eRank=180.8,q75/q25=inf train_time:730171ms step_avg:89.05ms +[2025-08-22 20:25:18] [Rank 0] PRINT: step:8200/10000 val_loss:3.6806 svd_entropy: attn_qk:H=0.7776,top10E=0.23,eRank=201.5,q75/q25=80.71 attn_vo:H=0.8097,top10E=0.07,eRank=341.9,q75/q25=inf mlp_w1:H=0.8787,top10E=0.14,eRank=350.1,q75/q25=7.99 mlp_w2:H=0.9332,top10E=0.10,eRank=494.2,q75/q25=4.42 vo_prod:H=0.6622,top10E=0.13,eRank=180.8,q75/q25=inf train_time:730171ms step_avg:89.05ms +[2025-08-22 20:25:18] [Rank 0] step:8201/10000 train_time:730187ms step_avg:89.04ms +[2025-08-22 20:25:18] [Rank 0] step:8201/10000 train_time:730187ms step_avg:89.04ms +[2025-08-22 20:25:20] [Rank 0] step:8221/10000 train_time:732004ms step_avg:89.04ms +[2025-08-22 20:25:20] [Rank 0] step:8221/10000 train_time:732004ms step_avg:89.04ms +[2025-08-22 20:25:22] [Rank 0] step:8241/10000 train_time:733901ms step_avg:89.05ms +[2025-08-22 20:25:22] [Rank 0] step:8241/10000 train_time:733901ms step_avg:89.05ms +[2025-08-22 20:25:24] [Rank 0] step:8261/10000 train_time:735794ms step_avg:89.07ms +[2025-08-22 20:25:24] [Rank 0] step:8261/10000 train_time:735794ms step_avg:89.07ms +[2025-08-22 20:25:26] [Rank 0] step:8281/10000 train_time:737683ms step_avg:89.08ms +[2025-08-22 20:25:26] [Rank 0] step:8281/10000 train_time:737683ms step_avg:89.08ms +[2025-08-22 20:25:28] [Rank 0] step:8301/10000 train_time:739573ms step_avg:89.09ms +[2025-08-22 20:25:28] [Rank 0] step:8301/10000 train_time:739573ms step_avg:89.09ms +[2025-08-22 20:25:30] [Rank 0] step:8321/10000 train_time:741456ms step_avg:89.11ms +[2025-08-22 20:25:30] [Rank 0] step:8321/10000 train_time:741456ms step_avg:89.11ms +[2025-08-22 20:25:32] [Rank 0] step:8341/10000 train_time:743351ms step_avg:89.12ms +[2025-08-22 20:25:32] [Rank 0] step:8341/10000 train_time:743351ms step_avg:89.12ms +[2025-08-22 20:25:34] [Rank 0] step:8361/10000 train_time:745246ms step_avg:89.13ms +[2025-08-22 20:25:34] [Rank 0] step:8361/10000 train_time:745246ms step_avg:89.13ms +[2025-08-22 20:25:36] [Rank 0] step:8381/10000 train_time:747135ms step_avg:89.15ms +[2025-08-22 20:25:36] [Rank 0] step:8381/10000 train_time:747135ms step_avg:89.15ms +[2025-08-22 20:25:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:25:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:25:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.6658 svd_entropy: attn_qk:H=0.7780,top10E=0.23,eRank=201.9,q75/q25=80.30 attn_vo:H=0.8100,top10E=0.07,eRank=342.5,q75/q25=inf mlp_w1:H=0.8793,top10E=0.14,eRank=351.4,q75/q25=7.94 mlp_w2:H=0.9335,top10E=0.10,eRank=495.0,q75/q25=4.40 vo_prod:H=0.6627,top10E=0.13,eRank=181.5,q75/q25=inf train_time:749119ms step_avg:89.18ms +[2025-08-22 20:25:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.6658 svd_entropy: attn_qk:H=0.7780,top10E=0.23,eRank=201.9,q75/q25=80.30 attn_vo:H=0.8100,top10E=0.07,eRank=342.5,q75/q25=inf mlp_w1:H=0.8793,top10E=0.14,eRank=351.4,q75/q25=7.94 mlp_w2:H=0.9335,top10E=0.10,eRank=495.0,q75/q25=4.40 vo_prod:H=0.6627,top10E=0.13,eRank=181.5,q75/q25=inf train_time:749119ms step_avg:89.18ms +[2025-08-22 20:25:51] [Rank 0] step:8401/10000 train_time:749134ms step_avg:89.17ms +[2025-08-22 20:25:51] [Rank 0] step:8401/10000 train_time:749134ms step_avg:89.17ms +[2025-08-22 20:25:53] [Rank 0] step:8421/10000 train_time:750923ms step_avg:89.17ms +[2025-08-22 20:25:53] [Rank 0] step:8421/10000 train_time:750923ms step_avg:89.17ms +[2025-08-22 20:25:55] [Rank 0] step:8441/10000 train_time:752807ms step_avg:89.18ms +[2025-08-22 20:25:55] [Rank 0] step:8441/10000 train_time:752807ms step_avg:89.18ms +[2025-08-22 20:25:57] [Rank 0] step:8461/10000 train_time:754693ms step_avg:89.20ms +[2025-08-22 20:25:57] [Rank 0] step:8461/10000 train_time:754693ms step_avg:89.20ms +[2025-08-22 20:25:59] [Rank 0] step:8481/10000 train_time:756585ms step_avg:89.21ms +[2025-08-22 20:25:59] [Rank 0] step:8481/10000 train_time:756585ms step_avg:89.21ms +[2025-08-22 20:26:01] [Rank 0] step:8501/10000 train_time:758497ms step_avg:89.22ms +[2025-08-22 20:26:01] [Rank 0] step:8501/10000 train_time:758497ms step_avg:89.22ms +[2025-08-22 20:26:02] [Rank 0] step:8521/10000 train_time:760390ms step_avg:89.24ms +[2025-08-22 20:26:02] [Rank 0] step:8521/10000 train_time:760390ms step_avg:89.24ms +[2025-08-22 20:26:04] [Rank 0] step:8541/10000 train_time:762293ms step_avg:89.25ms +[2025-08-22 20:26:04] [Rank 0] step:8541/10000 train_time:762293ms step_avg:89.25ms +[2025-08-22 20:26:06] [Rank 0] step:8561/10000 train_time:764186ms step_avg:89.26ms +[2025-08-22 20:26:06] [Rank 0] step:8561/10000 train_time:764186ms step_avg:89.26ms +[2025-08-22 20:26:08] [Rank 0] step:8581/10000 train_time:766084ms step_avg:89.28ms +[2025-08-22 20:26:08] [Rank 0] step:8581/10000 train_time:766084ms step_avg:89.28ms +[2025-08-22 20:26:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:26:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:26:24] [Rank 0] PRINT: step:8600/10000 val_loss:3.6541 svd_entropy: attn_qk:H=0.7784,top10E=0.23,eRank=202.3,q75/q25=80.50 attn_vo:H=0.8102,top10E=0.07,eRank=343.1,q75/q25=inf mlp_w1:H=0.8798,top10E=0.14,eRank=352.6,q75/q25=7.90 mlp_w2:H=0.9337,top10E=0.10,eRank=495.7,q75/q25=4.39 vo_prod:H=0.6631,top10E=0.13,eRank=182.1,q75/q25=inf train_time:768062ms step_avg:89.31ms +[2025-08-22 20:26:24] [Rank 0] PRINT: step:8600/10000 val_loss:3.6541 svd_entropy: attn_qk:H=0.7784,top10E=0.23,eRank=202.3,q75/q25=80.50 attn_vo:H=0.8102,top10E=0.07,eRank=343.1,q75/q25=inf mlp_w1:H=0.8798,top10E=0.14,eRank=352.6,q75/q25=7.90 mlp_w2:H=0.9337,top10E=0.10,eRank=495.7,q75/q25=4.39 vo_prod:H=0.6631,top10E=0.13,eRank=182.1,q75/q25=inf train_time:768062ms step_avg:89.31ms +[2025-08-22 20:26:24] [Rank 0] step:8601/10000 train_time:768077ms step_avg:89.30ms +[2025-08-22 20:26:24] [Rank 0] step:8601/10000 train_time:768077ms step_avg:89.30ms +[2025-08-22 20:26:26] [Rank 0] step:8621/10000 train_time:769882ms step_avg:89.30ms +[2025-08-22 20:26:26] [Rank 0] step:8621/10000 train_time:769882ms step_avg:89.30ms +[2025-08-22 20:26:28] [Rank 0] step:8641/10000 train_time:771769ms step_avg:89.31ms +[2025-08-22 20:26:28] [Rank 0] step:8641/10000 train_time:771769ms step_avg:89.31ms +[2025-08-22 20:26:29] [Rank 0] step:8661/10000 train_time:773657ms step_avg:89.33ms +[2025-08-22 20:26:29] [Rank 0] step:8661/10000 train_time:773657ms step_avg:89.33ms +[2025-08-22 20:26:31] [Rank 0] step:8681/10000 train_time:775545ms step_avg:89.34ms +[2025-08-22 20:26:31] [Rank 0] step:8681/10000 train_time:775545ms step_avg:89.34ms +[2025-08-22 20:26:33] [Rank 0] step:8701/10000 train_time:777430ms step_avg:89.35ms +[2025-08-22 20:26:33] [Rank 0] step:8701/10000 train_time:777430ms step_avg:89.35ms +[2025-08-22 20:26:35] [Rank 0] step:8721/10000 train_time:779324ms step_avg:89.36ms +[2025-08-22 20:26:35] [Rank 0] step:8721/10000 train_time:779324ms step_avg:89.36ms +[2025-08-22 20:26:37] [Rank 0] step:8741/10000 train_time:781208ms step_avg:89.37ms +[2025-08-22 20:26:37] [Rank 0] step:8741/10000 train_time:781208ms step_avg:89.37ms +[2025-08-22 20:26:39] [Rank 0] step:8761/10000 train_time:783099ms step_avg:89.38ms +[2025-08-22 20:26:39] [Rank 0] step:8761/10000 train_time:783099ms step_avg:89.38ms +[2025-08-22 20:26:41] [Rank 0] step:8781/10000 train_time:784994ms step_avg:89.40ms +[2025-08-22 20:26:41] [Rank 0] step:8781/10000 train_time:784994ms step_avg:89.40ms +[2025-08-22 20:26:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:26:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:26:56] [Rank 0] PRINT: step:8800/10000 val_loss:3.6418 svd_entropy: attn_qk:H=0.7788,top10E=0.23,eRank=202.6,q75/q25=80.60 attn_vo:H=0.8105,top10E=0.07,eRank=343.6,q75/q25=inf mlp_w1:H=0.8802,top10E=0.14,eRank=353.5,q75/q25=7.87 mlp_w2:H=0.9339,top10E=0.10,eRank=496.4,q75/q25=4.37 vo_prod:H=0.6635,top10E=0.13,eRank=182.7,q75/q25=inf train_time:786985ms step_avg:89.43ms +[2025-08-22 20:26:56] [Rank 0] PRINT: step:8800/10000 val_loss:3.6418 svd_entropy: attn_qk:H=0.7788,top10E=0.23,eRank=202.6,q75/q25=80.60 attn_vo:H=0.8105,top10E=0.07,eRank=343.6,q75/q25=inf mlp_w1:H=0.8802,top10E=0.14,eRank=353.5,q75/q25=7.87 mlp_w2:H=0.9339,top10E=0.10,eRank=496.4,q75/q25=4.37 vo_prod:H=0.6635,top10E=0.13,eRank=182.7,q75/q25=inf train_time:786985ms step_avg:89.43ms +[2025-08-22 20:26:56] [Rank 0] step:8801/10000 train_time:786999ms step_avg:89.42ms +[2025-08-22 20:26:56] [Rank 0] step:8801/10000 train_time:786999ms step_avg:89.42ms +[2025-08-22 20:26:58] [Rank 0] step:8821/10000 train_time:788810ms step_avg:89.42ms +[2025-08-22 20:26:58] [Rank 0] step:8821/10000 train_time:788810ms step_avg:89.42ms +[2025-08-22 20:27:00] [Rank 0] step:8841/10000 train_time:790719ms step_avg:89.44ms +[2025-08-22 20:27:00] [Rank 0] step:8841/10000 train_time:790719ms step_avg:89.44ms +[2025-08-22 20:27:02] [Rank 0] step:8861/10000 train_time:792607ms step_avg:89.45ms +[2025-08-22 20:27:02] [Rank 0] step:8861/10000 train_time:792607ms step_avg:89.45ms +[2025-08-22 20:27:04] [Rank 0] step:8881/10000 train_time:794499ms step_avg:89.46ms +[2025-08-22 20:27:04] [Rank 0] step:8881/10000 train_time:794499ms step_avg:89.46ms +[2025-08-22 20:27:06] [Rank 0] step:8901/10000 train_time:796394ms step_avg:89.47ms +[2025-08-22 20:27:06] [Rank 0] step:8901/10000 train_time:796394ms step_avg:89.47ms +[2025-08-22 20:27:08] [Rank 0] step:8921/10000 train_time:798298ms step_avg:89.49ms +[2025-08-22 20:27:08] [Rank 0] step:8921/10000 train_time:798298ms step_avg:89.49ms +[2025-08-22 20:27:10] [Rank 0] step:8941/10000 train_time:800203ms step_avg:89.50ms +[2025-08-22 20:27:10] [Rank 0] step:8941/10000 train_time:800203ms step_avg:89.50ms +[2025-08-22 20:27:12] [Rank 0] step:8961/10000 train_time:802098ms step_avg:89.51ms +[2025-08-22 20:27:12] [Rank 0] step:8961/10000 train_time:802098ms step_avg:89.51ms +[2025-08-22 20:27:13] [Rank 0] step:8981/10000 train_time:803991ms step_avg:89.52ms +[2025-08-22 20:27:13] [Rank 0] step:8981/10000 train_time:803991ms step_avg:89.52ms +[2025-08-22 20:27:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:27:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:27:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.6307 svd_entropy: attn_qk:H=0.7791,top10E=0.23,eRank=203.0,q75/q25=80.70 attn_vo:H=0.8106,top10E=0.07,eRank=344.0,q75/q25=inf mlp_w1:H=0.8806,top10E=0.14,eRank=354.4,q75/q25=7.84 mlp_w2:H=0.9340,top10E=0.10,eRank=497.0,q75/q25=4.36 vo_prod:H=0.6639,top10E=0.13,eRank=183.1,q75/q25=inf train_time:805979ms step_avg:89.55ms +[2025-08-22 20:27:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.6307 svd_entropy: attn_qk:H=0.7791,top10E=0.23,eRank=203.0,q75/q25=80.70 attn_vo:H=0.8106,top10E=0.07,eRank=344.0,q75/q25=inf mlp_w1:H=0.8806,top10E=0.14,eRank=354.4,q75/q25=7.84 mlp_w2:H=0.9340,top10E=0.10,eRank=497.0,q75/q25=4.36 vo_prod:H=0.6639,top10E=0.13,eRank=183.1,q75/q25=inf train_time:805979ms step_avg:89.55ms +[2025-08-22 20:27:29] [Rank 0] step:9001/10000 train_time:805994ms step_avg:89.54ms +[2025-08-22 20:27:29] [Rank 0] step:9001/10000 train_time:805994ms step_avg:89.54ms +[2025-08-22 20:27:31] [Rank 0] step:9021/10000 train_time:807801ms step_avg:89.55ms +[2025-08-22 20:27:31] [Rank 0] step:9021/10000 train_time:807801ms step_avg:89.55ms +[2025-08-22 20:27:33] [Rank 0] step:9041/10000 train_time:809691ms step_avg:89.56ms +[2025-08-22 20:27:33] [Rank 0] step:9041/10000 train_time:809691ms step_avg:89.56ms +[2025-08-22 20:27:35] [Rank 0] step:9061/10000 train_time:811588ms step_avg:89.57ms +[2025-08-22 20:27:35] [Rank 0] step:9061/10000 train_time:811588ms step_avg:89.57ms +[2025-08-22 20:27:37] [Rank 0] step:9081/10000 train_time:813490ms step_avg:89.58ms +[2025-08-22 20:27:37] [Rank 0] step:9081/10000 train_time:813490ms step_avg:89.58ms +[2025-08-22 20:27:39] [Rank 0] step:9101/10000 train_time:815400ms step_avg:89.59ms +[2025-08-22 20:27:39] [Rank 0] step:9101/10000 train_time:815400ms step_avg:89.59ms +[2025-08-22 20:27:40] [Rank 0] step:9121/10000 train_time:817297ms step_avg:89.61ms +[2025-08-22 20:27:40] [Rank 0] step:9121/10000 train_time:817297ms step_avg:89.61ms +[2025-08-22 20:27:42] [Rank 0] step:9141/10000 train_time:819180ms step_avg:89.62ms +[2025-08-22 20:27:42] [Rank 0] step:9141/10000 train_time:819180ms step_avg:89.62ms +[2025-08-22 20:27:44] [Rank 0] step:9161/10000 train_time:821067ms step_avg:89.63ms +[2025-08-22 20:27:44] [Rank 0] step:9161/10000 train_time:821067ms step_avg:89.63ms +[2025-08-22 20:27:46] [Rank 0] step:9181/10000 train_time:822994ms step_avg:89.64ms +[2025-08-22 20:27:46] [Rank 0] step:9181/10000 train_time:822994ms step_avg:89.64ms +[2025-08-22 20:27:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:27:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:28:02] [Rank 0] PRINT: step:9200/10000 val_loss:3.6203 svd_entropy: attn_qk:H=0.7794,top10E=0.23,eRank=203.2,q75/q25=80.46 attn_vo:H=0.8108,top10E=0.07,eRank=344.4,q75/q25=inf mlp_w1:H=0.8809,top10E=0.14,eRank=355.1,q75/q25=7.81 mlp_w2:H=0.9342,top10E=0.10,eRank=497.5,q75/q25=4.35 vo_prod:H=0.6642,top10E=0.13,eRank=183.6,q75/q25=inf train_time:824975ms step_avg:89.67ms +[2025-08-22 20:28:02] [Rank 0] PRINT: step:9200/10000 val_loss:3.6203 svd_entropy: attn_qk:H=0.7794,top10E=0.23,eRank=203.2,q75/q25=80.46 attn_vo:H=0.8108,top10E=0.07,eRank=344.4,q75/q25=inf mlp_w1:H=0.8809,top10E=0.14,eRank=355.1,q75/q25=7.81 mlp_w2:H=0.9342,top10E=0.10,eRank=497.5,q75/q25=4.35 vo_prod:H=0.6642,top10E=0.13,eRank=183.6,q75/q25=inf train_time:824975ms step_avg:89.67ms +[2025-08-22 20:28:02] [Rank 0] step:9201/10000 train_time:824989ms step_avg:89.66ms +[2025-08-22 20:28:02] [Rank 0] step:9201/10000 train_time:824989ms step_avg:89.66ms +[2025-08-22 20:28:04] [Rank 0] step:9221/10000 train_time:826812ms step_avg:89.67ms +[2025-08-22 20:28:04] [Rank 0] step:9221/10000 train_time:826812ms step_avg:89.67ms +[2025-08-22 20:28:06] [Rank 0] step:9241/10000 train_time:828714ms step_avg:89.68ms +[2025-08-22 20:28:06] [Rank 0] step:9241/10000 train_time:828714ms step_avg:89.68ms +[2025-08-22 20:28:07] [Rank 0] step:9261/10000 train_time:830615ms step_avg:89.69ms +[2025-08-22 20:28:07] [Rank 0] step:9261/10000 train_time:830615ms step_avg:89.69ms +[2025-08-22 20:28:09] [Rank 0] step:9281/10000 train_time:832499ms step_avg:89.70ms +[2025-08-22 20:28:09] [Rank 0] step:9281/10000 train_time:832499ms step_avg:89.70ms +[2025-08-22 20:28:11] [Rank 0] step:9301/10000 train_time:834385ms step_avg:89.71ms +[2025-08-22 20:28:11] [Rank 0] step:9301/10000 train_time:834385ms step_avg:89.71ms +[2025-08-22 20:28:13] [Rank 0] step:9321/10000 train_time:836283ms step_avg:89.72ms +[2025-08-22 20:28:13] [Rank 0] step:9321/10000 train_time:836283ms step_avg:89.72ms +[2025-08-22 20:28:15] [Rank 0] step:9341/10000 train_time:838177ms step_avg:89.73ms +[2025-08-22 20:28:15] [Rank 0] step:9341/10000 train_time:838177ms step_avg:89.73ms +[2025-08-22 20:28:17] [Rank 0] step:9361/10000 train_time:840076ms step_avg:89.74ms +[2025-08-22 20:28:17] [Rank 0] step:9361/10000 train_time:840076ms step_avg:89.74ms +[2025-08-22 20:28:19] [Rank 0] step:9381/10000 train_time:841985ms step_avg:89.75ms +[2025-08-22 20:28:19] [Rank 0] step:9381/10000 train_time:841985ms step_avg:89.75ms +[2025-08-22 20:28:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:28:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:28:34] [Rank 0] PRINT: step:9400/10000 val_loss:3.6104 svd_entropy: attn_qk:H=0.7796,top10E=0.23,eRank=203.4,q75/q25=80.24 attn_vo:H=0.8109,top10E=0.07,eRank=344.7,q75/q25=inf mlp_w1:H=0.8812,top10E=0.14,eRank=355.8,q75/q25=7.79 mlp_w2:H=0.9343,top10E=0.10,eRank=497.9,q75/q25=4.34 vo_prod:H=0.6644,top10E=0.13,eRank=183.9,q75/q25=inf train_time:843980ms step_avg:89.79ms +[2025-08-22 20:28:34] [Rank 0] PRINT: step:9400/10000 val_loss:3.6104 svd_entropy: attn_qk:H=0.7796,top10E=0.23,eRank=203.4,q75/q25=80.24 attn_vo:H=0.8109,top10E=0.07,eRank=344.7,q75/q25=inf mlp_w1:H=0.8812,top10E=0.14,eRank=355.8,q75/q25=7.79 mlp_w2:H=0.9343,top10E=0.10,eRank=497.9,q75/q25=4.34 vo_prod:H=0.6644,top10E=0.13,eRank=183.9,q75/q25=inf train_time:843980ms step_avg:89.79ms +[2025-08-22 20:28:34] [Rank 0] step:9401/10000 train_time:843996ms step_avg:89.78ms +[2025-08-22 20:28:34] [Rank 0] step:9401/10000 train_time:843996ms step_avg:89.78ms +[2025-08-22 20:28:36] [Rank 0] step:9421/10000 train_time:845801ms step_avg:89.78ms +[2025-08-22 20:28:36] [Rank 0] step:9421/10000 train_time:845801ms step_avg:89.78ms +[2025-08-22 20:28:38] [Rank 0] step:9441/10000 train_time:847693ms step_avg:89.79ms +[2025-08-22 20:28:38] [Rank 0] step:9441/10000 train_time:847693ms step_avg:89.79ms +[2025-08-22 20:28:40] [Rank 0] step:9461/10000 train_time:849590ms step_avg:89.80ms +[2025-08-22 20:28:40] [Rank 0] step:9461/10000 train_time:849590ms step_avg:89.80ms +[2025-08-22 20:28:42] [Rank 0] step:9481/10000 train_time:851485ms step_avg:89.81ms +[2025-08-22 20:28:42] [Rank 0] step:9481/10000 train_time:851485ms step_avg:89.81ms +[2025-08-22 20:28:44] [Rank 0] step:9501/10000 train_time:853394ms step_avg:89.82ms +[2025-08-22 20:28:44] [Rank 0] step:9501/10000 train_time:853394ms step_avg:89.82ms +[2025-08-22 20:28:46] [Rank 0] step:9521/10000 train_time:855279ms step_avg:89.83ms +[2025-08-22 20:28:46] [Rank 0] step:9521/10000 train_time:855279ms step_avg:89.83ms +[2025-08-22 20:28:48] [Rank 0] step:9541/10000 train_time:857171ms step_avg:89.84ms +[2025-08-22 20:28:48] [Rank 0] step:9541/10000 train_time:857171ms step_avg:89.84ms +[2025-08-22 20:28:50] [Rank 0] step:9561/10000 train_time:859060ms step_avg:89.85ms +[2025-08-22 20:28:50] [Rank 0] step:9561/10000 train_time:859060ms step_avg:89.85ms +[2025-08-22 20:28:52] [Rank 0] step:9581/10000 train_time:860956ms step_avg:89.86ms +[2025-08-22 20:28:52] [Rank 0] step:9581/10000 train_time:860956ms step_avg:89.86ms +[2025-08-22 20:28:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:28:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:29:07] [Rank 0] PRINT: step:9600/10000 val_loss:3.6025 svd_entropy: attn_qk:H=0.7798,top10E=0.23,eRank=203.6,q75/q25=80.47 attn_vo:H=0.8110,top10E=0.07,eRank=344.9,q75/q25=inf mlp_w1:H=0.8814,top10E=0.14,eRank=356.3,q75/q25=7.77 mlp_w2:H=0.9344,top10E=0.10,eRank=498.3,q75/q25=4.33 vo_prod:H=0.6646,top10E=0.13,eRank=184.2,q75/q25=inf train_time:862959ms step_avg:89.89ms +[2025-08-22 20:29:07] [Rank 0] PRINT: step:9600/10000 val_loss:3.6025 svd_entropy: attn_qk:H=0.7798,top10E=0.23,eRank=203.6,q75/q25=80.47 attn_vo:H=0.8110,top10E=0.07,eRank=344.9,q75/q25=inf mlp_w1:H=0.8814,top10E=0.14,eRank=356.3,q75/q25=7.77 mlp_w2:H=0.9344,top10E=0.10,eRank=498.3,q75/q25=4.33 vo_prod:H=0.6646,top10E=0.13,eRank=184.2,q75/q25=inf train_time:862959ms step_avg:89.89ms +[2025-08-22 20:29:07] [Rank 0] step:9601/10000 train_time:862973ms step_avg:89.88ms +[2025-08-22 20:29:07] [Rank 0] step:9601/10000 train_time:862973ms step_avg:89.88ms +[2025-08-22 20:29:09] [Rank 0] step:9621/10000 train_time:864787ms step_avg:89.89ms +[2025-08-22 20:29:09] [Rank 0] step:9621/10000 train_time:864787ms step_avg:89.89ms +[2025-08-22 20:29:11] [Rank 0] step:9641/10000 train_time:866684ms step_avg:89.90ms +[2025-08-22 20:29:11] [Rank 0] step:9641/10000 train_time:866684ms step_avg:89.90ms +[2025-08-22 20:29:13] [Rank 0] step:9661/10000 train_time:868610ms step_avg:89.91ms +[2025-08-22 20:29:13] [Rank 0] step:9661/10000 train_time:868610ms step_avg:89.91ms +[2025-08-22 20:29:15] [Rank 0] step:9681/10000 train_time:870528ms step_avg:89.92ms +[2025-08-22 20:29:15] [Rank 0] step:9681/10000 train_time:870528ms step_avg:89.92ms +[2025-08-22 20:29:17] [Rank 0] step:9701/10000 train_time:872463ms step_avg:89.94ms +[2025-08-22 20:29:17] [Rank 0] step:9701/10000 train_time:872463ms step_avg:89.94ms +[2025-08-22 20:29:19] [Rank 0] step:9721/10000 train_time:874378ms step_avg:89.95ms +[2025-08-22 20:29:19] [Rank 0] step:9721/10000 train_time:874378ms step_avg:89.95ms +[2025-08-22 20:29:21] [Rank 0] step:9741/10000 train_time:876325ms step_avg:89.96ms +[2025-08-22 20:29:21] [Rank 0] step:9741/10000 train_time:876325ms step_avg:89.96ms +[2025-08-22 20:29:23] [Rank 0] step:9761/10000 train_time:878253ms step_avg:89.98ms +[2025-08-22 20:29:23] [Rank 0] step:9761/10000 train_time:878253ms step_avg:89.98ms +[2025-08-22 20:29:24] [Rank 0] step:9781/10000 train_time:880187ms step_avg:89.99ms +[2025-08-22 20:29:24] [Rank 0] step:9781/10000 train_time:880187ms step_avg:89.99ms +[2025-08-22 20:29:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:29:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:29:40] [Rank 0] PRINT: step:9800/10000 val_loss:3.5929 svd_entropy: attn_qk:H=0.7799,top10E=0.23,eRank=203.7,q75/q25=80.46 attn_vo:H=0.8111,top10E=0.07,eRank=345.1,q75/q25=inf mlp_w1:H=0.8816,top10E=0.14,eRank=356.7,q75/q25=7.76 mlp_w2:H=0.9345,top10E=0.10,eRank=498.5,q75/q25=4.32 vo_prod:H=0.6648,top10E=0.13,eRank=184.4,q75/q25=inf train_time:882228ms step_avg:90.02ms +[2025-08-22 20:29:40] [Rank 0] PRINT: step:9800/10000 val_loss:3.5929 svd_entropy: attn_qk:H=0.7799,top10E=0.23,eRank=203.7,q75/q25=80.46 attn_vo:H=0.8111,top10E=0.07,eRank=345.1,q75/q25=inf mlp_w1:H=0.8816,top10E=0.14,eRank=356.7,q75/q25=7.76 mlp_w2:H=0.9345,top10E=0.10,eRank=498.5,q75/q25=4.32 vo_prod:H=0.6648,top10E=0.13,eRank=184.4,q75/q25=inf train_time:882228ms step_avg:90.02ms +[2025-08-22 20:29:40] [Rank 0] step:9801/10000 train_time:882243ms step_avg:90.02ms +[2025-08-22 20:29:40] [Rank 0] step:9801/10000 train_time:882243ms step_avg:90.02ms +[2025-08-22 20:29:42] [Rank 0] step:9821/10000 train_time:884062ms step_avg:90.02ms +[2025-08-22 20:29:42] [Rank 0] step:9821/10000 train_time:884062ms step_avg:90.02ms +[2025-08-22 20:29:44] [Rank 0] step:9841/10000 train_time:885988ms step_avg:90.03ms +[2025-08-22 20:29:44] [Rank 0] step:9841/10000 train_time:885988ms step_avg:90.03ms +[2025-08-22 20:29:46] [Rank 0] step:9861/10000 train_time:887896ms step_avg:90.04ms +[2025-08-22 20:29:46] [Rank 0] step:9861/10000 train_time:887896ms step_avg:90.04ms +[2025-08-22 20:29:48] [Rank 0] step:9881/10000 train_time:889808ms step_avg:90.05ms +[2025-08-22 20:29:48] [Rank 0] step:9881/10000 train_time:889808ms step_avg:90.05ms +[2025-08-22 20:29:50] [Rank 0] step:9901/10000 train_time:891733ms step_avg:90.06ms +[2025-08-22 20:29:50] [Rank 0] step:9901/10000 train_time:891733ms step_avg:90.06ms +[2025-08-22 20:29:52] [Rank 0] step:9921/10000 train_time:893653ms step_avg:90.08ms +[2025-08-22 20:29:52] [Rank 0] step:9921/10000 train_time:893653ms step_avg:90.08ms +[2025-08-22 20:29:54] [Rank 0] step:9941/10000 train_time:895576ms step_avg:90.09ms +[2025-08-22 20:29:54] [Rank 0] step:9941/10000 train_time:895576ms step_avg:90.09ms +[2025-08-22 20:29:55] [Rank 0] step:9961/10000 train_time:897491ms step_avg:90.10ms +[2025-08-22 20:29:55] [Rank 0] step:9961/10000 train_time:897491ms step_avg:90.10ms +[2025-08-22 20:29:57] [Rank 0] step:9981/10000 train_time:899414ms step_avg:90.11ms +[2025-08-22 20:29:57] [Rank 0] step:9981/10000 train_time:899414ms step_avg:90.11ms +[2025-08-22 20:29:59] [Rank 0] step:10000/10000 train_time:901243ms step_avg:90.12ms +[2025-08-22 20:29:59] [Rank 0] step:10000/10000 train_time:901243ms step_avg:90.12ms +[2025-08-22 20:29:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:29:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:30:13] [Rank 0] PRINT: step:10000/10000 val_loss:3.5851 svd_entropy: attn_qk:H=0.7800,top10E=0.23,eRank=203.8,q75/q25=80.42 attn_vo:H=0.8112,top10E=0.07,eRank=345.3,q75/q25=inf mlp_w1:H=0.8817,top10E=0.14,eRank=357.0,q75/q25=7.75 mlp_w2:H=0.9346,top10E=0.10,eRank=498.8,q75/q25=4.32 vo_prod:H=0.6649,top10E=0.13,eRank=184.6,q75/q25=inf train_time:901444ms step_avg:90.14ms +[2025-08-22 20:30:13] [Rank 0] PRINT: step:10000/10000 val_loss:3.5851 svd_entropy: attn_qk:H=0.7800,top10E=0.23,eRank=203.8,q75/q25=80.42 attn_vo:H=0.8112,top10E=0.07,eRank=345.3,q75/q25=inf mlp_w1:H=0.8817,top10E=0.14,eRank=357.0,q75/q25=7.75 mlp_w2:H=0.9346,top10E=0.10,eRank=498.8,q75/q25=4.32 vo_prod:H=0.6649,top10E=0.13,eRank=184.6,q75/q25=inf train_time:901444ms step_avg:90.14ms +[2025-08-22 20:30:13] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 20:30:13 2025 --- +[2025-08-22 20:30:13] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 20:30:13 2025 --- +[2025-08-22 20:30:13] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15536 MiB +[2025-08-22 20:30:13] [Rank 0] PRINT: Peak memory allocated: 11530 MiB reserved: 15536 MiB diff --git a/logs_svd_gated/mode_3_param_gated_seed_41/config.json b/logs_svd_gated/mode_3_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..bedb2738db55eee65dcbef6ec29a1516458fe899 --- /dev/null +++ b/logs_svd_gated/mode_3_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 3, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "a1959fa4-df6e-486e-aadc-a40216bf6532", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_3_param_gated_seed_41/training_log_a1959fa4-df6e-486e-aadc-a40216bf6532.txt b/logs_svd_gated/mode_3_param_gated_seed_41/training_log_a1959fa4-df6e-486e-aadc-a40216bf6532.txt new file mode 100644 index 0000000000000000000000000000000000000000..4709f8469166f398c9fd770c8aadc690e1bf8418 --- /dev/null +++ b/logs_svd_gated/mode_3_param_gated_seed_41/training_log_a1959fa4-df6e-486e-aadc-a40216bf6532.txt @@ -0,0 +1,2926 @@ +[2025-08-22 10:02:31] [Rank 0] PRINT: --- Script Start: Fri Aug 22 10:02:31 2025 --- +[2025-08-22 10:02:31] [Rank 0] PRINT: --- Script Start: Fri Aug 22 10:02:31 2025 --- +[2025-08-22 10:02:31] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=3, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 10:02:31] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=3, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 10:02:31] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 10:02:31] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 10:02:32] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 10:02:32] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 10:02:32] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_3_param_gated_seed_41 +[2025-08-22 10:02:32] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_3_param_gated_seed_41 +[2025-08-22 10:02:32] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 10:02:32] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 10:02:32] [Rank 0] PRINT: Constructing model... +[2025-08-22 10:02:32] [Rank 0] PRINT: Constructing model... +[2025-08-22 10:02:33] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 10:02:33] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 10:02:33] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 10:02:33] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 10:02:33] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 10:02:33] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 10:02:34] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-08-22 10:02:34] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-08-22 10:02:34] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.05). +[2025-08-22 10:02:34] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.05). +[2025-08-22 10:02:34] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 10:02:34] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 10:02:34] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-08-22 10:02:34] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-08-22 10:02:34] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 10:02:34] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 10:02:34] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 10:02:34] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 10:02:34] [Rank 0] PRINT: Starting warmup... +[2025-08-22 10:02:34] [Rank 0] PRINT: Starting warmup... +[2025-08-22 10:03:39] [Rank 0] PRINT: Warmup complete. +[2025-08-22 10:03:39] [Rank 0] PRINT: Warmup complete. +[2025-08-22 10:03:39] [Rank 0] PRINT: Starting training... +[2025-08-22 10:03:39] [Rank 0] PRINT: Starting training... +[2025-08-22 10:03:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:03:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:03:57] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 10:03:57] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 10:03:59] [Rank 0] step:21/10000 train_time:1752ms step_avg:83.44ms +[2025-08-22 10:03:59] [Rank 0] step:21/10000 train_time:1752ms step_avg:83.44ms +[2025-08-22 10:04:01] [Rank 0] step:41/10000 train_time:3456ms step_avg:84.29ms +[2025-08-22 10:04:01] [Rank 0] step:41/10000 train_time:3456ms step_avg:84.29ms +[2025-08-22 10:04:02] [Rank 0] step:61/10000 train_time:5164ms step_avg:84.66ms +[2025-08-22 10:04:02] [Rank 0] step:61/10000 train_time:5164ms step_avg:84.66ms +[2025-08-22 10:04:04] [Rank 0] step:81/10000 train_time:6875ms step_avg:84.88ms +[2025-08-22 10:04:04] [Rank 0] step:81/10000 train_time:6875ms step_avg:84.88ms +[2025-08-22 10:04:06] [Rank 0] step:101/10000 train_time:8588ms step_avg:85.03ms +[2025-08-22 10:04:06] [Rank 0] step:101/10000 train_time:8588ms step_avg:85.03ms +[2025-08-22 10:04:07] [Rank 0] step:121/10000 train_time:10300ms step_avg:85.13ms +[2025-08-22 10:04:07] [Rank 0] step:121/10000 train_time:10300ms step_avg:85.13ms +[2025-08-22 10:04:09] [Rank 0] step:141/10000 train_time:12014ms step_avg:85.20ms +[2025-08-22 10:04:09] [Rank 0] step:141/10000 train_time:12014ms step_avg:85.20ms +[2025-08-22 10:04:11] [Rank 0] step:161/10000 train_time:13728ms step_avg:85.27ms +[2025-08-22 10:04:11] [Rank 0] step:161/10000 train_time:13728ms step_avg:85.27ms +[2025-08-22 10:04:13] [Rank 0] step:181/10000 train_time:15443ms step_avg:85.32ms +[2025-08-22 10:04:13] [Rank 0] step:181/10000 train_time:15443ms step_avg:85.32ms +[2025-08-22 10:04:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:04:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:04:28] [Rank 0] PRINT: step:200/10000 val_loss:6.4190 svd_entropy: attn_qk:H=0.8190,top10E=0.21,eRank=238.6,q75/q25=11.57 attn_vo:H=0.7549,top10E=0.26,eRank=167.7,q75/q25=18.94 mlp_w1:H=0.3689,top10E=0.85,eRank=15.7,q75/q25=5.30 mlp_w2:H=0.3859,top10E=0.82,eRank=15.8,q75/q25=7.36 vo_prod:H=0.5286,top10E=0.61,eRank=44.6,q75/q25=153.04 train_time:17159ms step_avg:85.79ms +[2025-08-22 10:04:28] [Rank 0] PRINT: step:200/10000 val_loss:6.4190 svd_entropy: attn_qk:H=0.8190,top10E=0.21,eRank=238.6,q75/q25=11.57 attn_vo:H=0.7549,top10E=0.26,eRank=167.7,q75/q25=18.94 mlp_w1:H=0.3689,top10E=0.85,eRank=15.7,q75/q25=5.30 mlp_w2:H=0.3859,top10E=0.82,eRank=15.8,q75/q25=7.36 vo_prod:H=0.5286,top10E=0.61,eRank=44.6,q75/q25=153.04 train_time:17159ms step_avg:85.79ms +[2025-08-22 10:04:28] [Rank 0] step:201/10000 train_time:17180ms step_avg:85.47ms +[2025-08-22 10:04:28] [Rank 0] step:201/10000 train_time:17180ms step_avg:85.47ms +[2025-08-22 10:04:30] [Rank 0] step:221/10000 train_time:18911ms step_avg:85.57ms +[2025-08-22 10:04:30] [Rank 0] step:221/10000 train_time:18911ms step_avg:85.57ms +[2025-08-22 10:04:31] [Rank 0] step:241/10000 train_time:20621ms step_avg:85.56ms +[2025-08-22 10:04:31] [Rank 0] step:241/10000 train_time:20621ms step_avg:85.56ms +[2025-08-22 10:04:33] [Rank 0] step:261/10000 train_time:22331ms step_avg:85.56ms +[2025-08-22 10:04:33] [Rank 0] step:261/10000 train_time:22331ms step_avg:85.56ms +[2025-08-22 10:04:35] [Rank 0] step:281/10000 train_time:24043ms step_avg:85.56ms +[2025-08-22 10:04:35] [Rank 0] step:281/10000 train_time:24043ms step_avg:85.56ms +[2025-08-22 10:04:36] [Rank 0] step:301/10000 train_time:25757ms step_avg:85.57ms +[2025-08-22 10:04:36] [Rank 0] step:301/10000 train_time:25757ms step_avg:85.57ms +[2025-08-22 10:04:38] [Rank 0] step:321/10000 train_time:27469ms step_avg:85.57ms +[2025-08-22 10:04:38] [Rank 0] step:321/10000 train_time:27469ms step_avg:85.57ms +[2025-08-22 10:04:40] [Rank 0] step:341/10000 train_time:29181ms step_avg:85.57ms +[2025-08-22 10:04:40] [Rank 0] step:341/10000 train_time:29181ms step_avg:85.57ms +[2025-08-22 10:04:42] [Rank 0] step:361/10000 train_time:30894ms step_avg:85.58ms +[2025-08-22 10:04:42] [Rank 0] step:361/10000 train_time:30894ms step_avg:85.58ms +[2025-08-22 10:04:43] [Rank 0] step:381/10000 train_time:32607ms step_avg:85.58ms +[2025-08-22 10:04:43] [Rank 0] step:381/10000 train_time:32607ms step_avg:85.58ms +[2025-08-22 10:04:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:04:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:04:58] [Rank 0] PRINT: step:400/10000 val_loss:5.7865 svd_entropy: attn_qk:H=0.8074,top10E=0.17,eRank=221.5,q75/q25=22.35 attn_vo:H=0.7033,top10E=0.27,eRank=125.8,q75/q25=21.56 mlp_w1:H=0.5621,top10E=0.58,eRank=52.8,q75/q25=7.93 mlp_w2:H=0.5657,top10E=0.57,eRank=51.6,q75/q25=8.81 vo_prod:H=0.5518,top10E=0.53,eRank=51.7,q75/q25=208.60 train_time:34321ms step_avg:85.80ms +[2025-08-22 10:04:58] [Rank 0] PRINT: step:400/10000 val_loss:5.7865 svd_entropy: attn_qk:H=0.8074,top10E=0.17,eRank=221.5,q75/q25=22.35 attn_vo:H=0.7033,top10E=0.27,eRank=125.8,q75/q25=21.56 mlp_w1:H=0.5621,top10E=0.58,eRank=52.8,q75/q25=7.93 mlp_w2:H=0.5657,top10E=0.57,eRank=51.6,q75/q25=8.81 vo_prod:H=0.5518,top10E=0.53,eRank=51.7,q75/q25=208.60 train_time:34321ms step_avg:85.80ms +[2025-08-22 10:04:58] [Rank 0] step:401/10000 train_time:34342ms step_avg:85.64ms +[2025-08-22 10:04:58] [Rank 0] step:401/10000 train_time:34342ms step_avg:85.64ms +[2025-08-22 10:05:00] [Rank 0] step:421/10000 train_time:36051ms step_avg:85.63ms +[2025-08-22 10:05:00] [Rank 0] step:421/10000 train_time:36051ms step_avg:85.63ms +[2025-08-22 10:05:02] [Rank 0] step:441/10000 train_time:37758ms step_avg:85.62ms +[2025-08-22 10:05:02] [Rank 0] step:441/10000 train_time:37758ms step_avg:85.62ms +[2025-08-22 10:05:04] [Rank 0] step:461/10000 train_time:39467ms step_avg:85.61ms +[2025-08-22 10:05:04] [Rank 0] step:461/10000 train_time:39467ms step_avg:85.61ms +[2025-08-22 10:05:05] [Rank 0] step:481/10000 train_time:41179ms step_avg:85.61ms +[2025-08-22 10:05:05] [Rank 0] step:481/10000 train_time:41179ms step_avg:85.61ms +[2025-08-22 10:05:07] [Rank 0] step:501/10000 train_time:42887ms step_avg:85.60ms +[2025-08-22 10:05:07] [Rank 0] step:501/10000 train_time:42887ms step_avg:85.60ms +[2025-08-22 10:05:09] [Rank 0] step:521/10000 train_time:44595ms step_avg:85.59ms +[2025-08-22 10:05:09] [Rank 0] step:521/10000 train_time:44595ms step_avg:85.59ms +[2025-08-22 10:05:10] [Rank 0] step:541/10000 train_time:46302ms step_avg:85.59ms +[2025-08-22 10:05:10] [Rank 0] step:541/10000 train_time:46302ms step_avg:85.59ms +[2025-08-22 10:05:12] [Rank 0] step:561/10000 train_time:48012ms step_avg:85.58ms +[2025-08-22 10:05:12] [Rank 0] step:561/10000 train_time:48012ms step_avg:85.58ms +[2025-08-22 10:05:14] [Rank 0] step:581/10000 train_time:49722ms step_avg:85.58ms +[2025-08-22 10:05:14] [Rank 0] step:581/10000 train_time:49722ms step_avg:85.58ms +[2025-08-22 10:05:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:05:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:05:29] [Rank 0] PRINT: step:600/10000 val_loss:5.4513 svd_entropy: attn_qk:H=0.8129,top10E=0.16,eRank=228.0,q75/q25=43.39 attn_vo:H=0.7017,top10E=0.25,eRank=123.4,q75/q25=26.98 mlp_w1:H=0.6700,top10E=0.40,eRank=95.0,q75/q25=6.51 mlp_w2:H=0.7166,top10E=0.35,eRank=125.7,q75/q25=8.15 vo_prod:H=0.5777,top10E=0.47,eRank=58.4,q75/q25=395.06 train_time:51435ms step_avg:85.72ms +[2025-08-22 10:05:29] [Rank 0] PRINT: step:600/10000 val_loss:5.4513 svd_entropy: attn_qk:H=0.8129,top10E=0.16,eRank=228.0,q75/q25=43.39 attn_vo:H=0.7017,top10E=0.25,eRank=123.4,q75/q25=26.98 mlp_w1:H=0.6700,top10E=0.40,eRank=95.0,q75/q25=6.51 mlp_w2:H=0.7166,top10E=0.35,eRank=125.7,q75/q25=8.15 vo_prod:H=0.5777,top10E=0.47,eRank=58.4,q75/q25=395.06 train_time:51435ms step_avg:85.72ms +[2025-08-22 10:05:29] [Rank 0] step:601/10000 train_time:51456ms step_avg:85.62ms +[2025-08-22 10:05:29] [Rank 0] step:601/10000 train_time:51456ms step_avg:85.62ms +[2025-08-22 10:05:31] [Rank 0] step:621/10000 train_time:53163ms step_avg:85.61ms +[2025-08-22 10:05:31] [Rank 0] step:621/10000 train_time:53163ms step_avg:85.61ms +[2025-08-22 10:05:32] [Rank 0] step:641/10000 train_time:54868ms step_avg:85.60ms +[2025-08-22 10:05:32] [Rank 0] step:641/10000 train_time:54868ms step_avg:85.60ms +[2025-08-22 10:05:34] [Rank 0] step:661/10000 train_time:56574ms step_avg:85.59ms +[2025-08-22 10:05:34] [Rank 0] step:661/10000 train_time:56574ms step_avg:85.59ms +[2025-08-22 10:05:36] [Rank 0] step:681/10000 train_time:58282ms step_avg:85.58ms +[2025-08-22 10:05:36] [Rank 0] step:681/10000 train_time:58282ms step_avg:85.58ms +[2025-08-22 10:05:38] [Rank 0] step:701/10000 train_time:59988ms step_avg:85.58ms +[2025-08-22 10:05:38] [Rank 0] step:701/10000 train_time:59988ms step_avg:85.58ms +[2025-08-22 10:05:39] [Rank 0] step:721/10000 train_time:61695ms step_avg:85.57ms +[2025-08-22 10:05:39] [Rank 0] step:721/10000 train_time:61695ms step_avg:85.57ms +[2025-08-22 10:05:41] [Rank 0] step:741/10000 train_time:63401ms step_avg:85.56ms +[2025-08-22 10:05:41] [Rank 0] step:741/10000 train_time:63401ms step_avg:85.56ms +[2025-08-22 10:05:43] [Rank 0] step:761/10000 train_time:65119ms step_avg:85.57ms +[2025-08-22 10:05:43] [Rank 0] step:761/10000 train_time:65119ms step_avg:85.57ms +[2025-08-22 10:05:44] [Rank 0] step:781/10000 train_time:66839ms step_avg:85.58ms +[2025-08-22 10:05:44] [Rank 0] step:781/10000 train_time:66839ms step_avg:85.58ms +[2025-08-22 10:05:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:05:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:06:00] [Rank 0] PRINT: step:800/10000 val_loss:5.1903 svd_entropy: attn_qk:H=0.8221,top10E=0.15,eRank=240.6,q75/q25=64.03 attn_vo:H=0.7165,top10E=0.23,eRank=133.1,q75/q25=45.25 mlp_w1:H=0.7099,top10E=0.33,eRank=120.9,q75/q25=6.99 mlp_w2:H=0.7806,top10E=0.25,eRank=182.9,q75/q25=8.83 vo_prod:H=0.6015,top10E=0.42,eRank=65.6,q75/q25=1539.99 train_time:68561ms step_avg:85.70ms +[2025-08-22 10:06:00] [Rank 0] PRINT: step:800/10000 val_loss:5.1903 svd_entropy: attn_qk:H=0.8221,top10E=0.15,eRank=240.6,q75/q25=64.03 attn_vo:H=0.7165,top10E=0.23,eRank=133.1,q75/q25=45.25 mlp_w1:H=0.7099,top10E=0.33,eRank=120.9,q75/q25=6.99 mlp_w2:H=0.7806,top10E=0.25,eRank=182.9,q75/q25=8.83 vo_prod:H=0.6015,top10E=0.42,eRank=65.6,q75/q25=1539.99 train_time:68561ms step_avg:85.70ms +[2025-08-22 10:06:00] [Rank 0] step:801/10000 train_time:68582ms step_avg:85.62ms +[2025-08-22 10:06:00] [Rank 0] step:801/10000 train_time:68582ms step_avg:85.62ms +[2025-08-22 10:06:02] [Rank 0] step:821/10000 train_time:70384ms step_avg:85.73ms +[2025-08-22 10:06:02] [Rank 0] step:821/10000 train_time:70384ms step_avg:85.73ms +[2025-08-22 10:06:04] [Rank 0] step:841/10000 train_time:72105ms step_avg:85.74ms +[2025-08-22 10:06:04] [Rank 0] step:841/10000 train_time:72105ms step_avg:85.74ms +[2025-08-22 10:06:05] [Rank 0] step:861/10000 train_time:73824ms step_avg:85.74ms +[2025-08-22 10:06:05] [Rank 0] step:861/10000 train_time:73824ms step_avg:85.74ms +[2025-08-22 10:06:07] [Rank 0] step:881/10000 train_time:75545ms step_avg:85.75ms +[2025-08-22 10:06:07] [Rank 0] step:881/10000 train_time:75545ms step_avg:85.75ms +[2025-08-22 10:06:09] [Rank 0] step:901/10000 train_time:77266ms step_avg:85.76ms +[2025-08-22 10:06:09] [Rank 0] step:901/10000 train_time:77266ms step_avg:85.76ms +[2025-08-22 10:06:10] [Rank 0] step:921/10000 train_time:78987ms step_avg:85.76ms +[2025-08-22 10:06:10] [Rank 0] step:921/10000 train_time:78987ms step_avg:85.76ms +[2025-08-22 10:06:12] [Rank 0] step:941/10000 train_time:80711ms step_avg:85.77ms +[2025-08-22 10:06:12] [Rank 0] step:941/10000 train_time:80711ms step_avg:85.77ms +[2025-08-22 10:06:14] [Rank 0] step:961/10000 train_time:82434ms step_avg:85.78ms +[2025-08-22 10:06:14] [Rank 0] step:961/10000 train_time:82434ms step_avg:85.78ms +[2025-08-22 10:06:16] [Rank 0] step:981/10000 train_time:84158ms step_avg:85.79ms +[2025-08-22 10:06:16] [Rank 0] step:981/10000 train_time:84158ms step_avg:85.79ms +[2025-08-22 10:06:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:06:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:06:31] [Rank 0] PRINT: step:1000/10000 val_loss:5.0295 svd_entropy: attn_qk:H=0.8349,top10E=0.13,eRank=260.0,q75/q25=75.82 attn_vo:H=0.7394,top10E=0.20,eRank=150.1,q75/q25=84.60 mlp_w1:H=0.7310,top10E=0.30,eRank=138.4,q75/q25=7.62 mlp_w2:H=0.8102,top10E=0.21,eRank=220.9,q75/q25=9.38 vo_prod:H=0.6262,top10E=0.37,eRank=74.3,q75/q25=6301.12 train_time:85885ms step_avg:85.88ms +[2025-08-22 10:06:31] [Rank 0] PRINT: step:1000/10000 val_loss:5.0295 svd_entropy: attn_qk:H=0.8349,top10E=0.13,eRank=260.0,q75/q25=75.82 attn_vo:H=0.7394,top10E=0.20,eRank=150.1,q75/q25=84.60 mlp_w1:H=0.7310,top10E=0.30,eRank=138.4,q75/q25=7.62 mlp_w2:H=0.8102,top10E=0.21,eRank=220.9,q75/q25=9.38 vo_prod:H=0.6262,top10E=0.37,eRank=74.3,q75/q25=6301.12 train_time:85885ms step_avg:85.88ms +[2025-08-22 10:06:31] [Rank 0] step:1001/10000 train_time:85904ms step_avg:85.82ms +[2025-08-22 10:06:31] [Rank 0] step:1001/10000 train_time:85904ms step_avg:85.82ms +[2025-08-22 10:06:33] [Rank 0] step:1021/10000 train_time:87622ms step_avg:85.82ms +[2025-08-22 10:06:33] [Rank 0] step:1021/10000 train_time:87622ms step_avg:85.82ms +[2025-08-22 10:06:35] [Rank 0] step:1041/10000 train_time:89341ms step_avg:85.82ms +[2025-08-22 10:06:35] [Rank 0] step:1041/10000 train_time:89341ms step_avg:85.82ms +[2025-08-22 10:06:36] [Rank 0] step:1061/10000 train_time:91066ms step_avg:85.83ms +[2025-08-22 10:06:36] [Rank 0] step:1061/10000 train_time:91066ms step_avg:85.83ms +[2025-08-22 10:06:38] [Rank 0] step:1081/10000 train_time:92786ms step_avg:85.83ms +[2025-08-22 10:06:38] [Rank 0] step:1081/10000 train_time:92786ms step_avg:85.83ms +[2025-08-22 10:06:40] [Rank 0] step:1101/10000 train_time:94510ms step_avg:85.84ms +[2025-08-22 10:06:40] [Rank 0] step:1101/10000 train_time:94510ms step_avg:85.84ms +[2025-08-22 10:06:42] [Rank 0] step:1121/10000 train_time:96233ms step_avg:85.85ms +[2025-08-22 10:06:42] [Rank 0] step:1121/10000 train_time:96233ms step_avg:85.85ms +[2025-08-22 10:06:43] [Rank 0] step:1141/10000 train_time:97955ms step_avg:85.85ms +[2025-08-22 10:06:43] [Rank 0] step:1141/10000 train_time:97955ms step_avg:85.85ms +[2025-08-22 10:06:45] [Rank 0] step:1161/10000 train_time:99680ms step_avg:85.86ms +[2025-08-22 10:06:45] [Rank 0] step:1161/10000 train_time:99680ms step_avg:85.86ms +[2025-08-22 10:06:47] [Rank 0] step:1181/10000 train_time:101403ms step_avg:85.86ms +[2025-08-22 10:06:47] [Rank 0] step:1181/10000 train_time:101403ms step_avg:85.86ms +[2025-08-22 10:06:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:06:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:07:02] [Rank 0] PRINT: step:1200/10000 val_loss:4.9093 svd_entropy: attn_qk:H=0.8453,top10E=0.13,eRank=277.4,q75/q25=75.08 attn_vo:H=0.7618,top10E=0.18,eRank=170.0,q75/q25=129.50 mlp_w1:H=0.7471,top10E=0.28,eRank=153.1,q75/q25=8.29 mlp_w2:H=0.8300,top10E=0.19,eRank=251.4,q75/q25=9.76 vo_prod:H=0.6491,top10E=0.34,eRank=84.1,q75/q25=15245.34 train_time:103128ms step_avg:85.94ms +[2025-08-22 10:07:02] [Rank 0] PRINT: step:1200/10000 val_loss:4.9093 svd_entropy: attn_qk:H=0.8453,top10E=0.13,eRank=277.4,q75/q25=75.08 attn_vo:H=0.7618,top10E=0.18,eRank=170.0,q75/q25=129.50 mlp_w1:H=0.7471,top10E=0.28,eRank=153.1,q75/q25=8.29 mlp_w2:H=0.8300,top10E=0.19,eRank=251.4,q75/q25=9.76 vo_prod:H=0.6491,top10E=0.34,eRank=84.1,q75/q25=15245.34 train_time:103128ms step_avg:85.94ms +[2025-08-22 10:07:02] [Rank 0] step:1201/10000 train_time:103147ms step_avg:85.88ms +[2025-08-22 10:07:02] [Rank 0] step:1201/10000 train_time:103147ms step_avg:85.88ms +[2025-08-22 10:07:04] [Rank 0] step:1221/10000 train_time:104921ms step_avg:85.93ms +[2025-08-22 10:07:04] [Rank 0] step:1221/10000 train_time:104921ms step_avg:85.93ms +[2025-08-22 10:07:06] [Rank 0] step:1241/10000 train_time:106638ms step_avg:85.93ms +[2025-08-22 10:07:06] [Rank 0] step:1241/10000 train_time:106638ms step_avg:85.93ms +[2025-08-22 10:07:08] [Rank 0] step:1261/10000 train_time:108357ms step_avg:85.93ms +[2025-08-22 10:07:08] [Rank 0] step:1261/10000 train_time:108357ms step_avg:85.93ms +[2025-08-22 10:07:09] [Rank 0] step:1281/10000 train_time:110075ms step_avg:85.93ms +[2025-08-22 10:07:09] [Rank 0] step:1281/10000 train_time:110075ms step_avg:85.93ms +[2025-08-22 10:07:11] [Rank 0] step:1301/10000 train_time:111794ms step_avg:85.93ms +[2025-08-22 10:07:11] [Rank 0] step:1301/10000 train_time:111794ms step_avg:85.93ms +[2025-08-22 10:07:13] [Rank 0] step:1321/10000 train_time:113512ms step_avg:85.93ms +[2025-08-22 10:07:13] [Rank 0] step:1321/10000 train_time:113512ms step_avg:85.93ms +[2025-08-22 10:07:14] [Rank 0] step:1341/10000 train_time:115233ms step_avg:85.93ms +[2025-08-22 10:07:14] [Rank 0] step:1341/10000 train_time:115233ms step_avg:85.93ms +[2025-08-22 10:07:16] [Rank 0] step:1361/10000 train_time:116957ms step_avg:85.93ms +[2025-08-22 10:07:16] [Rank 0] step:1361/10000 train_time:116957ms step_avg:85.93ms +[2025-08-22 10:07:18] [Rank 0] step:1381/10000 train_time:118678ms step_avg:85.94ms +[2025-08-22 10:07:18] [Rank 0] step:1381/10000 train_time:118678ms step_avg:85.94ms +[2025-08-22 10:07:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:07:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:07:33] [Rank 0] PRINT: step:1400/10000 val_loss:4.8310 svd_entropy: attn_qk:H=0.8538,top10E=0.12,eRank=292.7,q75/q25=66.04 attn_vo:H=0.7811,top10E=0.16,eRank=189.8,q75/q25=158.50 mlp_w1:H=0.7600,top10E=0.27,eRank=166.0,q75/q25=8.93 mlp_w2:H=0.8452,top10E=0.17,eRank=277.7,q75/q25=9.85 vo_prod:H=0.6693,top10E=0.31,eRank=94.3,q75/q25=23414.76 train_time:120402ms step_avg:86.00ms +[2025-08-22 10:07:33] [Rank 0] PRINT: step:1400/10000 val_loss:4.8310 svd_entropy: attn_qk:H=0.8538,top10E=0.12,eRank=292.7,q75/q25=66.04 attn_vo:H=0.7811,top10E=0.16,eRank=189.8,q75/q25=158.50 mlp_w1:H=0.7600,top10E=0.27,eRank=166.0,q75/q25=8.93 mlp_w2:H=0.8452,top10E=0.17,eRank=277.7,q75/q25=9.85 vo_prod:H=0.6693,top10E=0.31,eRank=94.3,q75/q25=23414.76 train_time:120402ms step_avg:86.00ms +[2025-08-22 10:07:34] [Rank 0] step:1401/10000 train_time:120423ms step_avg:85.95ms +[2025-08-22 10:07:34] [Rank 0] step:1401/10000 train_time:120423ms step_avg:85.95ms +[2025-08-22 10:07:35] [Rank 0] step:1421/10000 train_time:122150ms step_avg:85.96ms +[2025-08-22 10:07:35] [Rank 0] step:1421/10000 train_time:122150ms step_avg:85.96ms +[2025-08-22 10:07:37] [Rank 0] step:1441/10000 train_time:123872ms step_avg:85.96ms +[2025-08-22 10:07:37] [Rank 0] step:1441/10000 train_time:123872ms step_avg:85.96ms +[2025-08-22 10:07:39] [Rank 0] step:1461/10000 train_time:125593ms step_avg:85.96ms +[2025-08-22 10:07:39] [Rank 0] step:1461/10000 train_time:125593ms step_avg:85.96ms +[2025-08-22 10:07:40] [Rank 0] step:1481/10000 train_time:127315ms step_avg:85.97ms +[2025-08-22 10:07:40] [Rank 0] step:1481/10000 train_time:127315ms step_avg:85.97ms +[2025-08-22 10:07:42] [Rank 0] step:1501/10000 train_time:129046ms step_avg:85.97ms +[2025-08-22 10:07:42] [Rank 0] step:1501/10000 train_time:129046ms step_avg:85.97ms +[2025-08-22 10:07:44] [Rank 0] step:1521/10000 train_time:130781ms step_avg:85.98ms +[2025-08-22 10:07:44] [Rank 0] step:1521/10000 train_time:130781ms step_avg:85.98ms +[2025-08-22 10:07:46] [Rank 0] step:1541/10000 train_time:132517ms step_avg:85.99ms +[2025-08-22 10:07:46] [Rank 0] step:1541/10000 train_time:132517ms step_avg:85.99ms +[2025-08-22 10:07:47] [Rank 0] step:1561/10000 train_time:134253ms step_avg:86.00ms +[2025-08-22 10:07:47] [Rank 0] step:1561/10000 train_time:134253ms step_avg:86.00ms +[2025-08-22 10:07:49] [Rank 0] step:1581/10000 train_time:135990ms step_avg:86.01ms +[2025-08-22 10:07:49] [Rank 0] step:1581/10000 train_time:135990ms step_avg:86.01ms +[2025-08-22 10:07:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:07:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:08:05] [Rank 0] PRINT: step:1600/10000 val_loss:4.7388 svd_entropy: attn_qk:H=0.8605,top10E=0.11,eRank=305.6,q75/q25=55.93 attn_vo:H=0.7972,top10E=0.15,eRank=208.8,q75/q25=168.23 mlp_w1:H=0.7713,top10E=0.26,eRank=178.1,q75/q25=9.46 mlp_w2:H=0.8571,top10E=0.16,eRank=300.5,q75/q25=9.76 vo_prod:H=0.6873,top10E=0.29,eRank=104.7,q75/q25=28122.58 train_time:137729ms step_avg:86.08ms +[2025-08-22 10:08:05] [Rank 0] PRINT: step:1600/10000 val_loss:4.7388 svd_entropy: attn_qk:H=0.8605,top10E=0.11,eRank=305.6,q75/q25=55.93 attn_vo:H=0.7972,top10E=0.15,eRank=208.8,q75/q25=168.23 mlp_w1:H=0.7713,top10E=0.26,eRank=178.1,q75/q25=9.46 mlp_w2:H=0.8571,top10E=0.16,eRank=300.5,q75/q25=9.76 vo_prod:H=0.6873,top10E=0.29,eRank=104.7,q75/q25=28122.58 train_time:137729ms step_avg:86.08ms +[2025-08-22 10:08:05] [Rank 0] step:1601/10000 train_time:137750ms step_avg:86.04ms +[2025-08-22 10:08:05] [Rank 0] step:1601/10000 train_time:137750ms step_avg:86.04ms +[2025-08-22 10:08:07] [Rank 0] step:1621/10000 train_time:139490ms step_avg:86.05ms +[2025-08-22 10:08:07] [Rank 0] step:1621/10000 train_time:139490ms step_avg:86.05ms +[2025-08-22 10:08:08] [Rank 0] step:1641/10000 train_time:141275ms step_avg:86.09ms +[2025-08-22 10:08:08] [Rank 0] step:1641/10000 train_time:141275ms step_avg:86.09ms +[2025-08-22 10:08:10] [Rank 0] step:1661/10000 train_time:143006ms step_avg:86.10ms +[2025-08-22 10:08:10] [Rank 0] step:1661/10000 train_time:143006ms step_avg:86.10ms +[2025-08-22 10:08:12] [Rank 0] step:1681/10000 train_time:144738ms step_avg:86.10ms +[2025-08-22 10:08:12] [Rank 0] step:1681/10000 train_time:144738ms step_avg:86.10ms +[2025-08-22 10:08:13] [Rank 0] step:1701/10000 train_time:146472ms step_avg:86.11ms +[2025-08-22 10:08:13] [Rank 0] step:1701/10000 train_time:146472ms step_avg:86.11ms +[2025-08-22 10:08:15] [Rank 0] step:1721/10000 train_time:148205ms step_avg:86.12ms +[2025-08-22 10:08:15] [Rank 0] step:1721/10000 train_time:148205ms step_avg:86.12ms +[2025-08-22 10:08:17] [Rank 0] step:1741/10000 train_time:149937ms step_avg:86.12ms +[2025-08-22 10:08:17] [Rank 0] step:1741/10000 train_time:149937ms step_avg:86.12ms +[2025-08-22 10:08:19] [Rank 0] step:1761/10000 train_time:151672ms step_avg:86.13ms +[2025-08-22 10:08:19] [Rank 0] step:1761/10000 train_time:151672ms step_avg:86.13ms +[2025-08-22 10:08:20] [Rank 0] step:1781/10000 train_time:153405ms step_avg:86.13ms +[2025-08-22 10:08:20] [Rank 0] step:1781/10000 train_time:153405ms step_avg:86.13ms +[2025-08-22 10:08:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:08:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:08:36] [Rank 0] PRINT: step:1800/10000 val_loss:4.6371 svd_entropy: attn_qk:H=0.8661,top10E=0.11,eRank=316.9,q75/q25=46.77 attn_vo:H=0.8108,top10E=0.14,eRank=226.6,q75/q25=161.02 mlp_w1:H=0.7810,top10E=0.25,eRank=189.1,q75/q25=9.92 mlp_w2:H=0.8670,top10E=0.15,eRank=320.9,q75/q25=9.49 vo_prod:H=0.7033,top10E=0.28,eRank=115.5,q75/q25=26651.76 train_time:155144ms step_avg:86.19ms +[2025-08-22 10:08:36] [Rank 0] PRINT: step:1800/10000 val_loss:4.6371 svd_entropy: attn_qk:H=0.8661,top10E=0.11,eRank=316.9,q75/q25=46.77 attn_vo:H=0.8108,top10E=0.14,eRank=226.6,q75/q25=161.02 mlp_w1:H=0.7810,top10E=0.25,eRank=189.1,q75/q25=9.92 mlp_w2:H=0.8670,top10E=0.15,eRank=320.9,q75/q25=9.49 vo_prod:H=0.7033,top10E=0.28,eRank=115.5,q75/q25=26651.76 train_time:155144ms step_avg:86.19ms +[2025-08-22 10:08:36] [Rank 0] step:1801/10000 train_time:155164ms step_avg:86.15ms +[2025-08-22 10:08:36] [Rank 0] step:1801/10000 train_time:155164ms step_avg:86.15ms +[2025-08-22 10:08:38] [Rank 0] step:1821/10000 train_time:156898ms step_avg:86.16ms +[2025-08-22 10:08:38] [Rank 0] step:1821/10000 train_time:156898ms step_avg:86.16ms +[2025-08-22 10:08:40] [Rank 0] step:1841/10000 train_time:158625ms step_avg:86.16ms +[2025-08-22 10:08:40] [Rank 0] step:1841/10000 train_time:158625ms step_avg:86.16ms +[2025-08-22 10:08:41] [Rank 0] step:1861/10000 train_time:160355ms step_avg:86.17ms +[2025-08-22 10:08:41] [Rank 0] step:1861/10000 train_time:160355ms step_avg:86.17ms +[2025-08-22 10:08:43] [Rank 0] step:1881/10000 train_time:162085ms step_avg:86.17ms +[2025-08-22 10:08:43] [Rank 0] step:1881/10000 train_time:162085ms step_avg:86.17ms +[2025-08-22 10:08:45] [Rank 0] step:1901/10000 train_time:163816ms step_avg:86.17ms +[2025-08-22 10:08:45] [Rank 0] step:1901/10000 train_time:163816ms step_avg:86.17ms +[2025-08-22 10:08:46] [Rank 0] step:1921/10000 train_time:165548ms step_avg:86.18ms +[2025-08-22 10:08:46] [Rank 0] step:1921/10000 train_time:165548ms step_avg:86.18ms +[2025-08-22 10:08:48] [Rank 0] step:1941/10000 train_time:167278ms step_avg:86.18ms +[2025-08-22 10:08:48] [Rank 0] step:1941/10000 train_time:167278ms step_avg:86.18ms +[2025-08-22 10:08:50] [Rank 0] step:1961/10000 train_time:169009ms step_avg:86.19ms +[2025-08-22 10:08:50] [Rank 0] step:1961/10000 train_time:169009ms step_avg:86.19ms +[2025-08-22 10:08:52] [Rank 0] step:1981/10000 train_time:170742ms step_avg:86.19ms +[2025-08-22 10:08:52] [Rank 0] step:1981/10000 train_time:170742ms step_avg:86.19ms +[2025-08-22 10:08:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:08:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:09:07] [Rank 0] PRINT: step:2000/10000 val_loss:4.5631 svd_entropy: attn_qk:H=0.8710,top10E=0.11,eRank=327.0,q75/q25=39.68 attn_vo:H=0.8223,top10E=0.13,eRank=243.2,q75/q25=147.54 mlp_w1:H=0.7897,top10E=0.24,eRank=199.6,q75/q25=10.25 mlp_w2:H=0.8754,top10E=0.14,eRank=339.2,q75/q25=9.12 vo_prod:H=0.7175,top10E=0.26,eRank=126.1,q75/q25=22927.49 train_time:172482ms step_avg:86.24ms +[2025-08-22 10:09:07] [Rank 0] PRINT: step:2000/10000 val_loss:4.5631 svd_entropy: attn_qk:H=0.8710,top10E=0.11,eRank=327.0,q75/q25=39.68 attn_vo:H=0.8223,top10E=0.13,eRank=243.2,q75/q25=147.54 mlp_w1:H=0.7897,top10E=0.24,eRank=199.6,q75/q25=10.25 mlp_w2:H=0.8754,top10E=0.14,eRank=339.2,q75/q25=9.12 vo_prod:H=0.7175,top10E=0.26,eRank=126.1,q75/q25=22927.49 train_time:172482ms step_avg:86.24ms +[2025-08-22 10:09:07] [Rank 0] step:2001/10000 train_time:172504ms step_avg:86.21ms +[2025-08-22 10:09:07] [Rank 0] step:2001/10000 train_time:172504ms step_avg:86.21ms +[2025-08-22 10:09:09] [Rank 0] step:2021/10000 train_time:174313ms step_avg:86.25ms +[2025-08-22 10:09:09] [Rank 0] step:2021/10000 train_time:174313ms step_avg:86.25ms +[2025-08-22 10:09:12] [Rank 0] step:2041/10000 train_time:176817ms step_avg:86.63ms +[2025-08-22 10:09:12] [Rank 0] step:2041/10000 train_time:176817ms step_avg:86.63ms +[2025-08-22 10:09:13] [Rank 0] step:2061/10000 train_time:178549ms step_avg:86.63ms +[2025-08-22 10:09:13] [Rank 0] step:2061/10000 train_time:178549ms step_avg:86.63ms +[2025-08-22 10:09:15] [Rank 0] step:2081/10000 train_time:180282ms step_avg:86.63ms +[2025-08-22 10:09:15] [Rank 0] step:2081/10000 train_time:180282ms step_avg:86.63ms +[2025-08-22 10:09:17] [Rank 0] step:2101/10000 train_time:182018ms step_avg:86.63ms +[2025-08-22 10:09:17] [Rank 0] step:2101/10000 train_time:182018ms step_avg:86.63ms +[2025-08-22 10:09:19] [Rank 0] step:2121/10000 train_time:183754ms step_avg:86.64ms +[2025-08-22 10:09:19] [Rank 0] step:2121/10000 train_time:183754ms step_avg:86.64ms +[2025-08-22 10:09:20] [Rank 0] step:2141/10000 train_time:185489ms step_avg:86.64ms +[2025-08-22 10:09:20] [Rank 0] step:2141/10000 train_time:185489ms step_avg:86.64ms +[2025-08-22 10:09:22] [Rank 0] step:2161/10000 train_time:187228ms step_avg:86.64ms +[2025-08-22 10:09:22] [Rank 0] step:2161/10000 train_time:187228ms step_avg:86.64ms +[2025-08-22 10:09:24] [Rank 0] step:2181/10000 train_time:188966ms step_avg:86.64ms +[2025-08-22 10:09:24] [Rank 0] step:2181/10000 train_time:188966ms step_avg:86.64ms +[2025-08-22 10:09:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:09:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:09:39] [Rank 0] PRINT: step:2200/10000 val_loss:4.4520 svd_entropy: attn_qk:H=0.8749,top10E=0.10,eRank=335.5,q75/q25=34.78 attn_vo:H=0.8315,top10E=0.13,eRank=257.6,q75/q25=130.81 mlp_w1:H=0.7974,top10E=0.23,eRank=209.4,q75/q25=10.46 mlp_w2:H=0.8824,top10E=0.13,eRank=355.5,q75/q25=8.78 vo_prod:H=0.7293,top10E=0.25,eRank=135.9,q75/q25=18490.16 train_time:190705ms step_avg:86.68ms +[2025-08-22 10:09:39] [Rank 0] PRINT: step:2200/10000 val_loss:4.4520 svd_entropy: attn_qk:H=0.8749,top10E=0.10,eRank=335.5,q75/q25=34.78 attn_vo:H=0.8315,top10E=0.13,eRank=257.6,q75/q25=130.81 mlp_w1:H=0.7974,top10E=0.23,eRank=209.4,q75/q25=10.46 mlp_w2:H=0.8824,top10E=0.13,eRank=355.5,q75/q25=8.78 vo_prod:H=0.7293,top10E=0.25,eRank=135.9,q75/q25=18490.16 train_time:190705ms step_avg:86.68ms +[2025-08-22 10:09:40] [Rank 0] step:2201/10000 train_time:190727ms step_avg:86.65ms +[2025-08-22 10:09:40] [Rank 0] step:2201/10000 train_time:190727ms step_avg:86.65ms +[2025-08-22 10:09:41] [Rank 0] step:2221/10000 train_time:192449ms step_avg:86.65ms +[2025-08-22 10:09:41] [Rank 0] step:2221/10000 train_time:192449ms step_avg:86.65ms +[2025-08-22 10:09:43] [Rank 0] step:2241/10000 train_time:194217ms step_avg:86.67ms +[2025-08-22 10:09:43] [Rank 0] step:2241/10000 train_time:194217ms step_avg:86.67ms +[2025-08-22 10:09:45] [Rank 0] step:2261/10000 train_time:195994ms step_avg:86.68ms +[2025-08-22 10:09:45] [Rank 0] step:2261/10000 train_time:195994ms step_avg:86.68ms +[2025-08-22 10:09:47] [Rank 0] step:2281/10000 train_time:197772ms step_avg:86.70ms +[2025-08-22 10:09:47] [Rank 0] step:2281/10000 train_time:197772ms step_avg:86.70ms +[2025-08-22 10:09:48] [Rank 0] step:2301/10000 train_time:199549ms step_avg:86.72ms +[2025-08-22 10:09:48] [Rank 0] step:2301/10000 train_time:199549ms step_avg:86.72ms +[2025-08-22 10:09:50] [Rank 0] step:2321/10000 train_time:201326ms step_avg:86.74ms +[2025-08-22 10:09:50] [Rank 0] step:2321/10000 train_time:201326ms step_avg:86.74ms +[2025-08-22 10:09:52] [Rank 0] step:2341/10000 train_time:203101ms step_avg:86.76ms +[2025-08-22 10:09:52] [Rank 0] step:2341/10000 train_time:203101ms step_avg:86.76ms +[2025-08-22 10:09:54] [Rank 0] step:2361/10000 train_time:204879ms step_avg:86.78ms +[2025-08-22 10:09:54] [Rank 0] step:2361/10000 train_time:204879ms step_avg:86.78ms +[2025-08-22 10:09:55] [Rank 0] step:2381/10000 train_time:206655ms step_avg:86.79ms +[2025-08-22 10:09:55] [Rank 0] step:2381/10000 train_time:206655ms step_avg:86.79ms +[2025-08-22 10:09:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:09:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:10:11] [Rank 0] PRINT: step:2400/10000 val_loss:4.3535 svd_entropy: attn_qk:H=0.8778,top10E=0.10,eRank=341.9,q75/q25=30.88 attn_vo:H=0.8388,top10E=0.12,eRank=269.6,q75/q25=114.00 mlp_w1:H=0.8045,top10E=0.22,eRank=218.9,q75/q25=10.64 mlp_w2:H=0.8884,top10E=0.13,eRank=369.9,q75/q25=8.38 vo_prod:H=0.7383,top10E=0.24,eRank=144.0,q75/q25=14012.90 train_time:208434ms step_avg:86.85ms +[2025-08-22 10:10:11] [Rank 0] PRINT: step:2400/10000 val_loss:4.3535 svd_entropy: attn_qk:H=0.8778,top10E=0.10,eRank=341.9,q75/q25=30.88 attn_vo:H=0.8388,top10E=0.12,eRank=269.6,q75/q25=114.00 mlp_w1:H=0.8045,top10E=0.22,eRank=218.9,q75/q25=10.64 mlp_w2:H=0.8884,top10E=0.13,eRank=369.9,q75/q25=8.38 vo_prod:H=0.7383,top10E=0.24,eRank=144.0,q75/q25=14012.90 train_time:208434ms step_avg:86.85ms +[2025-08-22 10:10:11] [Rank 0] step:2401/10000 train_time:208456ms step_avg:86.82ms +[2025-08-22 10:10:11] [Rank 0] step:2401/10000 train_time:208456ms step_avg:86.82ms +[2025-08-22 10:10:13] [Rank 0] step:2421/10000 train_time:210237ms step_avg:86.84ms +[2025-08-22 10:10:13] [Rank 0] step:2421/10000 train_time:210237ms step_avg:86.84ms +[2025-08-22 10:10:15] [Rank 0] step:2441/10000 train_time:212088ms step_avg:86.89ms +[2025-08-22 10:10:15] [Rank 0] step:2441/10000 train_time:212088ms step_avg:86.89ms +[2025-08-22 10:10:16] [Rank 0] step:2461/10000 train_time:213867ms step_avg:86.90ms +[2025-08-22 10:10:16] [Rank 0] step:2461/10000 train_time:213867ms step_avg:86.90ms +[2025-08-22 10:10:18] [Rank 0] step:2481/10000 train_time:215640ms step_avg:86.92ms +[2025-08-22 10:10:18] [Rank 0] step:2481/10000 train_time:215640ms step_avg:86.92ms +[2025-08-22 10:10:20] [Rank 0] step:2501/10000 train_time:217416ms step_avg:86.93ms +[2025-08-22 10:10:20] [Rank 0] step:2501/10000 train_time:217416ms step_avg:86.93ms +[2025-08-22 10:10:22] [Rank 0] step:2521/10000 train_time:219193ms step_avg:86.95ms +[2025-08-22 10:10:22] [Rank 0] step:2521/10000 train_time:219193ms step_avg:86.95ms +[2025-08-22 10:10:24] [Rank 0] step:2541/10000 train_time:220969ms step_avg:86.96ms +[2025-08-22 10:10:24] [Rank 0] step:2541/10000 train_time:220969ms step_avg:86.96ms +[2025-08-22 10:10:25] [Rank 0] step:2561/10000 train_time:222747ms step_avg:86.98ms +[2025-08-22 10:10:25] [Rank 0] step:2561/10000 train_time:222747ms step_avg:86.98ms +[2025-08-22 10:10:27] [Rank 0] step:2581/10000 train_time:224524ms step_avg:86.99ms +[2025-08-22 10:10:27] [Rank 0] step:2581/10000 train_time:224524ms step_avg:86.99ms +[2025-08-22 10:10:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:10:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:10:43] [Rank 0] PRINT: step:2600/10000 val_loss:4.2881 svd_entropy: attn_qk:H=0.8806,top10E=0.10,eRank=348.2,q75/q25=27.76 attn_vo:H=0.8447,top10E=0.12,eRank=280.0,q75/q25=99.82 mlp_w1:H=0.8107,top10E=0.21,eRank=227.6,q75/q25=10.74 mlp_w2:H=0.8935,top10E=0.12,eRank=382.5,q75/q25=8.07 vo_prod:H=0.7462,top10E=0.23,eRank=151.3,q75/q25=10666.18 train_time:226304ms step_avg:87.04ms +[2025-08-22 10:10:43] [Rank 0] PRINT: step:2600/10000 val_loss:4.2881 svd_entropy: attn_qk:H=0.8806,top10E=0.10,eRank=348.2,q75/q25=27.76 attn_vo:H=0.8447,top10E=0.12,eRank=280.0,q75/q25=99.82 mlp_w1:H=0.8107,top10E=0.21,eRank=227.6,q75/q25=10.74 mlp_w2:H=0.8935,top10E=0.12,eRank=382.5,q75/q25=8.07 vo_prod:H=0.7462,top10E=0.23,eRank=151.3,q75/q25=10666.18 train_time:226304ms step_avg:87.04ms +[2025-08-22 10:10:43] [Rank 0] step:2601/10000 train_time:226326ms step_avg:87.02ms +[2025-08-22 10:10:43] [Rank 0] step:2601/10000 train_time:226326ms step_avg:87.02ms +[2025-08-22 10:10:45] [Rank 0] step:2621/10000 train_time:228090ms step_avg:87.02ms +[2025-08-22 10:10:45] [Rank 0] step:2621/10000 train_time:228090ms step_avg:87.02ms +[2025-08-22 10:10:46] [Rank 0] step:2641/10000 train_time:229865ms step_avg:87.04ms +[2025-08-22 10:10:46] [Rank 0] step:2641/10000 train_time:229865ms step_avg:87.04ms +[2025-08-22 10:10:48] [Rank 0] step:2661/10000 train_time:231639ms step_avg:87.05ms +[2025-08-22 10:10:48] [Rank 0] step:2661/10000 train_time:231639ms step_avg:87.05ms +[2025-08-22 10:10:50] [Rank 0] step:2681/10000 train_time:233417ms step_avg:87.06ms +[2025-08-22 10:10:50] [Rank 0] step:2681/10000 train_time:233417ms step_avg:87.06ms +[2025-08-22 10:10:52] [Rank 0] step:2701/10000 train_time:235196ms step_avg:87.08ms +[2025-08-22 10:10:52] [Rank 0] step:2701/10000 train_time:235196ms step_avg:87.08ms +[2025-08-22 10:10:54] [Rank 0] step:2721/10000 train_time:236976ms step_avg:87.09ms +[2025-08-22 10:10:54] [Rank 0] step:2721/10000 train_time:236976ms step_avg:87.09ms +[2025-08-22 10:10:55] [Rank 0] step:2741/10000 train_time:238758ms step_avg:87.11ms +[2025-08-22 10:10:55] [Rank 0] step:2741/10000 train_time:238758ms step_avg:87.11ms +[2025-08-22 10:10:57] [Rank 0] step:2761/10000 train_time:240538ms step_avg:87.12ms +[2025-08-22 10:10:57] [Rank 0] step:2761/10000 train_time:240538ms step_avg:87.12ms +[2025-08-22 10:10:59] [Rank 0] step:2781/10000 train_time:242326ms step_avg:87.14ms +[2025-08-22 10:10:59] [Rank 0] step:2781/10000 train_time:242326ms step_avg:87.14ms +[2025-08-22 10:11:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:11:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:11:15] [Rank 0] PRINT: step:2800/10000 val_loss:4.2453 svd_entropy: attn_qk:H=0.8830,top10E=0.10,eRank=353.8,q75/q25=25.38 attn_vo:H=0.8498,top10E=0.11,eRank=289.2,q75/q25=88.18 mlp_w1:H=0.8164,top10E=0.21,eRank=235.8,q75/q25=10.79 mlp_w2:H=0.8978,top10E=0.12,eRank=393.6,q75/q25=7.78 vo_prod:H=0.7533,top10E=0.22,eRank=158.1,q75/q25=8219.05 train_time:244110ms step_avg:87.18ms +[2025-08-22 10:11:15] [Rank 0] PRINT: step:2800/10000 val_loss:4.2453 svd_entropy: attn_qk:H=0.8830,top10E=0.10,eRank=353.8,q75/q25=25.38 attn_vo:H=0.8498,top10E=0.11,eRank=289.2,q75/q25=88.18 mlp_w1:H=0.8164,top10E=0.21,eRank=235.8,q75/q25=10.79 mlp_w2:H=0.8978,top10E=0.12,eRank=393.6,q75/q25=7.78 vo_prod:H=0.7533,top10E=0.22,eRank=158.1,q75/q25=8219.05 train_time:244110ms step_avg:87.18ms +[2025-08-22 10:11:15] [Rank 0] step:2801/10000 train_time:244131ms step_avg:87.16ms +[2025-08-22 10:11:15] [Rank 0] step:2801/10000 train_time:244131ms step_avg:87.16ms +[2025-08-22 10:11:16] [Rank 0] step:2821/10000 train_time:245997ms step_avg:87.20ms +[2025-08-22 10:11:16] [Rank 0] step:2821/10000 train_time:245997ms step_avg:87.20ms +[2025-08-22 10:11:18] [Rank 0] step:2841/10000 train_time:247775ms step_avg:87.21ms +[2025-08-22 10:11:18] [Rank 0] step:2841/10000 train_time:247775ms step_avg:87.21ms +[2025-08-22 10:11:20] [Rank 0] step:2861/10000 train_time:249550ms step_avg:87.22ms +[2025-08-22 10:11:20] [Rank 0] step:2861/10000 train_time:249550ms step_avg:87.22ms +[2025-08-22 10:11:22] [Rank 0] step:2881/10000 train_time:251325ms step_avg:87.24ms +[2025-08-22 10:11:22] [Rank 0] step:2881/10000 train_time:251325ms step_avg:87.24ms +[2025-08-22 10:11:24] [Rank 0] step:2901/10000 train_time:253103ms step_avg:87.25ms +[2025-08-22 10:11:24] [Rank 0] step:2901/10000 train_time:253103ms step_avg:87.25ms +[2025-08-22 10:11:25] [Rank 0] step:2921/10000 train_time:254878ms step_avg:87.26ms +[2025-08-22 10:11:25] [Rank 0] step:2921/10000 train_time:254878ms step_avg:87.26ms +[2025-08-22 10:11:27] [Rank 0] step:2941/10000 train_time:256656ms step_avg:87.27ms +[2025-08-22 10:11:27] [Rank 0] step:2941/10000 train_time:256656ms step_avg:87.27ms +[2025-08-22 10:11:29] [Rank 0] step:2961/10000 train_time:258437ms step_avg:87.28ms +[2025-08-22 10:11:29] [Rank 0] step:2961/10000 train_time:258437ms step_avg:87.28ms +[2025-08-22 10:11:31] [Rank 0] step:2981/10000 train_time:260221ms step_avg:87.29ms +[2025-08-22 10:11:31] [Rank 0] step:2981/10000 train_time:260221ms step_avg:87.29ms +[2025-08-22 10:11:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:11:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:11:46] [Rank 0] PRINT: step:3000/10000 val_loss:4.1922 svd_entropy: attn_qk:H=0.8851,top10E=0.10,eRank=358.7,q75/q25=23.68 attn_vo:H=0.8542,top10E=0.11,eRank=297.2,q75/q25=78.92 mlp_w1:H=0.8216,top10E=0.20,eRank=243.5,q75/q25=10.81 mlp_w2:H=0.9015,top10E=0.12,eRank=403.4,q75/q25=7.50 vo_prod:H=0.7594,top10E=0.21,eRank=164.3,q75/q25=6405.65 train_time:262009ms step_avg:87.34ms +[2025-08-22 10:11:46] [Rank 0] PRINT: step:3000/10000 val_loss:4.1922 svd_entropy: attn_qk:H=0.8851,top10E=0.10,eRank=358.7,q75/q25=23.68 attn_vo:H=0.8542,top10E=0.11,eRank=297.2,q75/q25=78.92 mlp_w1:H=0.8216,top10E=0.20,eRank=243.5,q75/q25=10.81 mlp_w2:H=0.9015,top10E=0.12,eRank=403.4,q75/q25=7.50 vo_prod:H=0.7594,top10E=0.21,eRank=164.3,q75/q25=6405.65 train_time:262009ms step_avg:87.34ms +[2025-08-22 10:11:46] [Rank 0] step:3001/10000 train_time:262029ms step_avg:87.31ms +[2025-08-22 10:11:46] [Rank 0] step:3001/10000 train_time:262029ms step_avg:87.31ms +[2025-08-22 10:11:48] [Rank 0] step:3021/10000 train_time:263809ms step_avg:87.32ms +[2025-08-22 10:11:48] [Rank 0] step:3021/10000 train_time:263809ms step_avg:87.32ms +[2025-08-22 10:11:50] [Rank 0] step:3041/10000 train_time:265588ms step_avg:87.34ms +[2025-08-22 10:11:50] [Rank 0] step:3041/10000 train_time:265588ms step_avg:87.34ms +[2025-08-22 10:11:52] [Rank 0] step:3061/10000 train_time:267369ms step_avg:87.35ms +[2025-08-22 10:11:52] [Rank 0] step:3061/10000 train_time:267369ms step_avg:87.35ms +[2025-08-22 10:11:53] [Rank 0] step:3081/10000 train_time:269151ms step_avg:87.36ms +[2025-08-22 10:11:53] [Rank 0] step:3081/10000 train_time:269151ms step_avg:87.36ms +[2025-08-22 10:11:55] [Rank 0] step:3101/10000 train_time:270933ms step_avg:87.37ms +[2025-08-22 10:11:55] [Rank 0] step:3101/10000 train_time:270933ms step_avg:87.37ms +[2025-08-22 10:11:57] [Rank 0] step:3121/10000 train_time:272716ms step_avg:87.38ms +[2025-08-22 10:11:57] [Rank 0] step:3121/10000 train_time:272716ms step_avg:87.38ms +[2025-08-22 10:11:59] [Rank 0] step:3141/10000 train_time:274500ms step_avg:87.39ms +[2025-08-22 10:11:59] [Rank 0] step:3141/10000 train_time:274500ms step_avg:87.39ms +[2025-08-22 10:12:00] [Rank 0] step:3161/10000 train_time:276285ms step_avg:87.40ms +[2025-08-22 10:12:00] [Rank 0] step:3161/10000 train_time:276285ms step_avg:87.40ms +[2025-08-22 10:12:02] [Rank 0] step:3181/10000 train_time:278071ms step_avg:87.42ms +[2025-08-22 10:12:02] [Rank 0] step:3181/10000 train_time:278071ms step_avg:87.42ms +[2025-08-22 10:12:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:12:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:12:18] [Rank 0] PRINT: step:3200/10000 val_loss:4.1573 svd_entropy: attn_qk:H=0.8869,top10E=0.10,eRank=363.0,q75/q25=22.11 attn_vo:H=0.8579,top10E=0.11,eRank=304.2,q75/q25=70.39 mlp_w1:H=0.8263,top10E=0.20,eRank=250.8,q75/q25=10.84 mlp_w2:H=0.9046,top10E=0.11,eRank=411.9,q75/q25=7.31 vo_prod:H=0.7648,top10E=0.21,eRank=169.8,q75/q25=5165.49 train_time:279860ms step_avg:87.46ms +[2025-08-22 10:12:18] [Rank 0] PRINT: step:3200/10000 val_loss:4.1573 svd_entropy: attn_qk:H=0.8869,top10E=0.10,eRank=363.0,q75/q25=22.11 attn_vo:H=0.8579,top10E=0.11,eRank=304.2,q75/q25=70.39 mlp_w1:H=0.8263,top10E=0.20,eRank=250.8,q75/q25=10.84 mlp_w2:H=0.9046,top10E=0.11,eRank=411.9,q75/q25=7.31 vo_prod:H=0.7648,top10E=0.21,eRank=169.8,q75/q25=5165.49 train_time:279860ms step_avg:87.46ms +[2025-08-22 10:12:18] [Rank 0] step:3201/10000 train_time:279881ms step_avg:87.44ms +[2025-08-22 10:12:18] [Rank 0] step:3201/10000 train_time:279881ms step_avg:87.44ms +[2025-08-22 10:12:20] [Rank 0] step:3221/10000 train_time:281740ms step_avg:87.47ms +[2025-08-22 10:12:20] [Rank 0] step:3221/10000 train_time:281740ms step_avg:87.47ms +[2025-08-22 10:12:21] [Rank 0] step:3241/10000 train_time:283530ms step_avg:87.48ms +[2025-08-22 10:12:21] [Rank 0] step:3241/10000 train_time:283530ms step_avg:87.48ms +[2025-08-22 10:12:23] [Rank 0] step:3261/10000 train_time:285314ms step_avg:87.49ms +[2025-08-22 10:12:23] [Rank 0] step:3261/10000 train_time:285314ms step_avg:87.49ms +[2025-08-22 10:12:25] [Rank 0] step:3281/10000 train_time:287100ms step_avg:87.50ms +[2025-08-22 10:12:25] [Rank 0] step:3281/10000 train_time:287100ms step_avg:87.50ms +[2025-08-22 10:12:27] [Rank 0] step:3301/10000 train_time:288887ms step_avg:87.52ms +[2025-08-22 10:12:27] [Rank 0] step:3301/10000 train_time:288887ms step_avg:87.52ms +[2025-08-22 10:12:28] [Rank 0] step:3321/10000 train_time:290675ms step_avg:87.53ms +[2025-08-22 10:12:28] [Rank 0] step:3321/10000 train_time:290675ms step_avg:87.53ms +[2025-08-22 10:12:30] [Rank 0] step:3341/10000 train_time:292464ms step_avg:87.54ms +[2025-08-22 10:12:30] [Rank 0] step:3341/10000 train_time:292464ms step_avg:87.54ms +[2025-08-22 10:12:32] [Rank 0] step:3361/10000 train_time:294254ms step_avg:87.55ms +[2025-08-22 10:12:32] [Rank 0] step:3361/10000 train_time:294254ms step_avg:87.55ms +[2025-08-22 10:12:34] [Rank 0] step:3381/10000 train_time:296043ms step_avg:87.56ms +[2025-08-22 10:12:34] [Rank 0] step:3381/10000 train_time:296043ms step_avg:87.56ms +[2025-08-22 10:12:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:12:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:12:49] [Rank 0] PRINT: step:3400/10000 val_loss:4.1168 svd_entropy: attn_qk:H=0.8887,top10E=0.09,eRank=367.1,q75/q25=20.87 attn_vo:H=0.8613,top10E=0.10,eRank=310.9,q75/q25=63.83 mlp_w1:H=0.8307,top10E=0.19,eRank=257.8,q75/q25=10.84 mlp_w2:H=0.9074,top10E=0.11,eRank=419.4,q75/q25=7.10 vo_prod:H=0.7698,top10E=0.20,eRank=175.1,q75/q25=4048.12 train_time:297838ms step_avg:87.60ms +[2025-08-22 10:12:49] [Rank 0] PRINT: step:3400/10000 val_loss:4.1168 svd_entropy: attn_qk:H=0.8887,top10E=0.09,eRank=367.1,q75/q25=20.87 attn_vo:H=0.8613,top10E=0.10,eRank=310.9,q75/q25=63.83 mlp_w1:H=0.8307,top10E=0.19,eRank=257.8,q75/q25=10.84 mlp_w2:H=0.9074,top10E=0.11,eRank=419.4,q75/q25=7.10 vo_prod:H=0.7698,top10E=0.20,eRank=175.1,q75/q25=4048.12 train_time:297838ms step_avg:87.60ms +[2025-08-22 10:12:49] [Rank 0] step:3401/10000 train_time:297858ms step_avg:87.58ms +[2025-08-22 10:12:49] [Rank 0] step:3401/10000 train_time:297858ms step_avg:87.58ms +[2025-08-22 10:12:51] [Rank 0] step:3421/10000 train_time:299634ms step_avg:87.59ms +[2025-08-22 10:12:51] [Rank 0] step:3421/10000 train_time:299634ms step_avg:87.59ms +[2025-08-22 10:12:53] [Rank 0] step:3441/10000 train_time:301417ms step_avg:87.60ms +[2025-08-22 10:12:53] [Rank 0] step:3441/10000 train_time:301417ms step_avg:87.60ms +[2025-08-22 10:12:55] [Rank 0] step:3461/10000 train_time:303198ms step_avg:87.60ms +[2025-08-22 10:12:55] [Rank 0] step:3461/10000 train_time:303198ms step_avg:87.60ms +[2025-08-22 10:12:56] [Rank 0] step:3481/10000 train_time:304980ms step_avg:87.61ms +[2025-08-22 10:12:56] [Rank 0] step:3481/10000 train_time:304980ms step_avg:87.61ms +[2025-08-22 10:12:58] [Rank 0] step:3501/10000 train_time:306765ms step_avg:87.62ms +[2025-08-22 10:12:58] [Rank 0] step:3501/10000 train_time:306765ms step_avg:87.62ms +[2025-08-22 10:13:00] [Rank 0] step:3521/10000 train_time:308551ms step_avg:87.63ms +[2025-08-22 10:13:00] [Rank 0] step:3521/10000 train_time:308551ms step_avg:87.63ms +[2025-08-22 10:13:02] [Rank 0] step:3541/10000 train_time:310337ms step_avg:87.64ms +[2025-08-22 10:13:02] [Rank 0] step:3541/10000 train_time:310337ms step_avg:87.64ms +[2025-08-22 10:13:04] [Rank 0] step:3561/10000 train_time:312123ms step_avg:87.65ms +[2025-08-22 10:13:04] [Rank 0] step:3561/10000 train_time:312123ms step_avg:87.65ms +[2025-08-22 10:13:05] [Rank 0] step:3581/10000 train_time:313909ms step_avg:87.66ms +[2025-08-22 10:13:05] [Rank 0] step:3581/10000 train_time:313909ms step_avg:87.66ms +[2025-08-22 10:13:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:13:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:13:21] [Rank 0] PRINT: step:3600/10000 val_loss:4.1027 svd_entropy: attn_qk:H=0.8901,top10E=0.09,eRank=370.7,q75/q25=20.00 attn_vo:H=0.8642,top10E=0.10,eRank=316.6,q75/q25=58.18 mlp_w1:H=0.8346,top10E=0.19,eRank=264.1,q75/q25=10.82 mlp_w2:H=0.9097,top10E=0.11,eRank=426.0,q75/q25=6.94 vo_prod:H=0.7742,top10E=0.20,eRank=179.8,q75/q25=3352.41 train_time:315698ms step_avg:87.69ms +[2025-08-22 10:13:21] [Rank 0] PRINT: step:3600/10000 val_loss:4.1027 svd_entropy: attn_qk:H=0.8901,top10E=0.09,eRank=370.7,q75/q25=20.00 attn_vo:H=0.8642,top10E=0.10,eRank=316.6,q75/q25=58.18 mlp_w1:H=0.8346,top10E=0.19,eRank=264.1,q75/q25=10.82 mlp_w2:H=0.9097,top10E=0.11,eRank=426.0,q75/q25=6.94 vo_prod:H=0.7742,top10E=0.20,eRank=179.8,q75/q25=3352.41 train_time:315698ms step_avg:87.69ms +[2025-08-22 10:13:21] [Rank 0] step:3601/10000 train_time:315717ms step_avg:87.67ms +[2025-08-22 10:13:21] [Rank 0] step:3601/10000 train_time:315717ms step_avg:87.67ms +[2025-08-22 10:13:23] [Rank 0] step:3621/10000 train_time:317572ms step_avg:87.70ms +[2025-08-22 10:13:23] [Rank 0] step:3621/10000 train_time:317572ms step_avg:87.70ms +[2025-08-22 10:13:25] [Rank 0] step:3641/10000 train_time:319351ms step_avg:87.71ms +[2025-08-22 10:13:25] [Rank 0] step:3641/10000 train_time:319351ms step_avg:87.71ms +[2025-08-22 10:13:26] [Rank 0] step:3661/10000 train_time:321134ms step_avg:87.72ms +[2025-08-22 10:13:26] [Rank 0] step:3661/10000 train_time:321134ms step_avg:87.72ms +[2025-08-22 10:13:28] [Rank 0] step:3681/10000 train_time:322916ms step_avg:87.73ms +[2025-08-22 10:13:28] [Rank 0] step:3681/10000 train_time:322916ms step_avg:87.73ms +[2025-08-22 10:13:30] [Rank 0] step:3701/10000 train_time:324698ms step_avg:87.73ms +[2025-08-22 10:13:30] [Rank 0] step:3701/10000 train_time:324698ms step_avg:87.73ms +[2025-08-22 10:13:32] [Rank 0] step:3721/10000 train_time:326510ms step_avg:87.75ms +[2025-08-22 10:13:32] [Rank 0] step:3721/10000 train_time:326510ms step_avg:87.75ms +[2025-08-22 10:13:34] [Rank 0] step:3741/10000 train_time:328330ms step_avg:87.77ms +[2025-08-22 10:13:34] [Rank 0] step:3741/10000 train_time:328330ms step_avg:87.77ms +[2025-08-22 10:13:35] [Rank 0] step:3761/10000 train_time:330152ms step_avg:87.78ms +[2025-08-22 10:13:35] [Rank 0] step:3761/10000 train_time:330152ms step_avg:87.78ms +[2025-08-22 10:13:37] [Rank 0] step:3781/10000 train_time:331977ms step_avg:87.80ms +[2025-08-22 10:13:37] [Rank 0] step:3781/10000 train_time:331977ms step_avg:87.80ms +[2025-08-22 10:13:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:13:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:13:53] [Rank 0] PRINT: step:3800/10000 val_loss:4.0558 svd_entropy: attn_qk:H=0.8914,top10E=0.09,eRank=373.7,q75/q25=19.17 attn_vo:H=0.8667,top10E=0.10,eRank=321.8,q75/q25=54.31 mlp_w1:H=0.8383,top10E=0.19,eRank=270.3,q75/q25=10.78 mlp_w2:H=0.9117,top10E=0.11,eRank=431.6,q75/q25=6.82 vo_prod:H=0.7780,top10E=0.19,eRank=184.1,q75/q25=2802.48 train_time:333804ms step_avg:87.84ms +[2025-08-22 10:13:53] [Rank 0] PRINT: step:3800/10000 val_loss:4.0558 svd_entropy: attn_qk:H=0.8914,top10E=0.09,eRank=373.7,q75/q25=19.17 attn_vo:H=0.8667,top10E=0.10,eRank=321.8,q75/q25=54.31 mlp_w1:H=0.8383,top10E=0.19,eRank=270.3,q75/q25=10.78 mlp_w2:H=0.9117,top10E=0.11,eRank=431.6,q75/q25=6.82 vo_prod:H=0.7780,top10E=0.19,eRank=184.1,q75/q25=2802.48 train_time:333804ms step_avg:87.84ms +[2025-08-22 10:13:53] [Rank 0] step:3801/10000 train_time:333824ms step_avg:87.83ms +[2025-08-22 10:13:53] [Rank 0] step:3801/10000 train_time:333824ms step_avg:87.83ms +[2025-08-22 10:13:55] [Rank 0] step:3821/10000 train_time:335634ms step_avg:87.84ms +[2025-08-22 10:13:55] [Rank 0] step:3821/10000 train_time:335634ms step_avg:87.84ms +[2025-08-22 10:13:56] [Rank 0] step:3841/10000 train_time:337456ms step_avg:87.86ms +[2025-08-22 10:13:56] [Rank 0] step:3841/10000 train_time:337456ms step_avg:87.86ms +[2025-08-22 10:13:58] [Rank 0] step:3861/10000 train_time:339278ms step_avg:87.87ms +[2025-08-22 10:13:58] [Rank 0] step:3861/10000 train_time:339278ms step_avg:87.87ms +[2025-08-22 10:14:00] [Rank 0] step:3881/10000 train_time:341099ms step_avg:87.89ms +[2025-08-22 10:14:00] [Rank 0] step:3881/10000 train_time:341099ms step_avg:87.89ms +[2025-08-22 10:14:02] [Rank 0] step:3901/10000 train_time:342922ms step_avg:87.91ms +[2025-08-22 10:14:02] [Rank 0] step:3901/10000 train_time:342922ms step_avg:87.91ms +[2025-08-22 10:14:04] [Rank 0] step:3921/10000 train_time:344747ms step_avg:87.92ms +[2025-08-22 10:14:04] [Rank 0] step:3921/10000 train_time:344747ms step_avg:87.92ms +[2025-08-22 10:14:05] [Rank 0] step:3941/10000 train_time:346572ms step_avg:87.94ms +[2025-08-22 10:14:05] [Rank 0] step:3941/10000 train_time:346572ms step_avg:87.94ms +[2025-08-22 10:14:07] [Rank 0] step:3961/10000 train_time:348395ms step_avg:87.96ms +[2025-08-22 10:14:07] [Rank 0] step:3961/10000 train_time:348395ms step_avg:87.96ms +[2025-08-22 10:14:09] [Rank 0] step:3981/10000 train_time:350219ms step_avg:87.97ms +[2025-08-22 10:14:09] [Rank 0] step:3981/10000 train_time:350219ms step_avg:87.97ms +[2025-08-22 10:14:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:14:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:14:25] [Rank 0] PRINT: step:4000/10000 val_loss:4.0200 svd_entropy: attn_qk:H=0.8924,top10E=0.09,eRank=376.2,q75/q25=18.56 attn_vo:H=0.8690,top10E=0.10,eRank=326.6,q75/q25=50.77 mlp_w1:H=0.8417,top10E=0.18,eRank=276.1,q75/q25=10.77 mlp_w2:H=0.9135,top10E=0.11,eRank=436.9,q75/q25=6.68 vo_prod:H=0.7817,top10E=0.19,eRank=188.3,q75/q25=2391.43 train_time:352046ms step_avg:88.01ms +[2025-08-22 10:14:25] [Rank 0] PRINT: step:4000/10000 val_loss:4.0200 svd_entropy: attn_qk:H=0.8924,top10E=0.09,eRank=376.2,q75/q25=18.56 attn_vo:H=0.8690,top10E=0.10,eRank=326.6,q75/q25=50.77 mlp_w1:H=0.8417,top10E=0.18,eRank=276.1,q75/q25=10.77 mlp_w2:H=0.9135,top10E=0.11,eRank=436.9,q75/q25=6.68 vo_prod:H=0.7817,top10E=0.19,eRank=188.3,q75/q25=2391.43 train_time:352046ms step_avg:88.01ms +[2025-08-22 10:14:25] [Rank 0] step:4001/10000 train_time:352066ms step_avg:87.99ms +[2025-08-22 10:14:25] [Rank 0] step:4001/10000 train_time:352066ms step_avg:87.99ms +[2025-08-22 10:14:27] [Rank 0] step:4021/10000 train_time:353983ms step_avg:88.03ms +[2025-08-22 10:14:27] [Rank 0] step:4021/10000 train_time:353983ms step_avg:88.03ms +[2025-08-22 10:14:28] [Rank 0] step:4041/10000 train_time:355801ms step_avg:88.05ms +[2025-08-22 10:14:28] [Rank 0] step:4041/10000 train_time:355801ms step_avg:88.05ms +[2025-08-22 10:14:30] [Rank 0] step:4061/10000 train_time:357620ms step_avg:88.06ms +[2025-08-22 10:14:30] [Rank 0] step:4061/10000 train_time:357620ms step_avg:88.06ms +[2025-08-22 10:14:33] [Rank 0] step:4081/10000 train_time:360117ms step_avg:88.24ms +[2025-08-22 10:14:33] [Rank 0] step:4081/10000 train_time:360117ms step_avg:88.24ms +[2025-08-22 10:14:35] [Rank 0] step:4101/10000 train_time:361937ms step_avg:88.26ms +[2025-08-22 10:14:35] [Rank 0] step:4101/10000 train_time:361937ms step_avg:88.26ms +[2025-08-22 10:14:36] [Rank 0] step:4121/10000 train_time:363756ms step_avg:88.27ms +[2025-08-22 10:14:36] [Rank 0] step:4121/10000 train_time:363756ms step_avg:88.27ms +[2025-08-22 10:14:38] [Rank 0] step:4141/10000 train_time:365577ms step_avg:88.28ms +[2025-08-22 10:14:38] [Rank 0] step:4141/10000 train_time:365577ms step_avg:88.28ms +[2025-08-22 10:14:40] [Rank 0] step:4161/10000 train_time:367395ms step_avg:88.29ms +[2025-08-22 10:14:40] [Rank 0] step:4161/10000 train_time:367395ms step_avg:88.29ms +[2025-08-22 10:14:42] [Rank 0] step:4181/10000 train_time:369217ms step_avg:88.31ms +[2025-08-22 10:14:42] [Rank 0] step:4181/10000 train_time:369217ms step_avg:88.31ms +[2025-08-22 10:14:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:14:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:14:57] [Rank 0] PRINT: step:4200/10000 val_loss:4.0091 svd_entropy: attn_qk:H=0.8933,top10E=0.09,eRank=378.6,q75/q25=18.11 attn_vo:H=0.8711,top10E=0.10,eRank=330.9,q75/q25=47.60 mlp_w1:H=0.8448,top10E=0.18,eRank=281.4,q75/q25=10.71 mlp_w2:H=0.9151,top10E=0.11,eRank=441.4,q75/q25=6.61 vo_prod:H=0.7851,top10E=0.18,eRank=192.1,q75/q25=2074.62 train_time:371041ms step_avg:88.34ms +[2025-08-22 10:14:57] [Rank 0] PRINT: step:4200/10000 val_loss:4.0091 svd_entropy: attn_qk:H=0.8933,top10E=0.09,eRank=378.6,q75/q25=18.11 attn_vo:H=0.8711,top10E=0.10,eRank=330.9,q75/q25=47.60 mlp_w1:H=0.8448,top10E=0.18,eRank=281.4,q75/q25=10.71 mlp_w2:H=0.9151,top10E=0.11,eRank=441.4,q75/q25=6.61 vo_prod:H=0.7851,top10E=0.18,eRank=192.1,q75/q25=2074.62 train_time:371041ms step_avg:88.34ms +[2025-08-22 10:14:57] [Rank 0] step:4201/10000 train_time:371060ms step_avg:88.33ms +[2025-08-22 10:14:57] [Rank 0] step:4201/10000 train_time:371060ms step_avg:88.33ms +[2025-08-22 10:14:59] [Rank 0] step:4221/10000 train_time:372884ms step_avg:88.34ms +[2025-08-22 10:14:59] [Rank 0] step:4221/10000 train_time:372884ms step_avg:88.34ms +[2025-08-22 10:15:01] [Rank 0] step:4241/10000 train_time:374705ms step_avg:88.35ms +[2025-08-22 10:15:01] [Rank 0] step:4241/10000 train_time:374705ms step_avg:88.35ms +[2025-08-22 10:15:03] [Rank 0] step:4261/10000 train_time:376523ms step_avg:88.36ms +[2025-08-22 10:15:03] [Rank 0] step:4261/10000 train_time:376523ms step_avg:88.36ms +[2025-08-22 10:15:05] [Rank 0] step:4281/10000 train_time:378344ms step_avg:88.38ms +[2025-08-22 10:15:05] [Rank 0] step:4281/10000 train_time:378344ms step_avg:88.38ms +[2025-08-22 10:15:06] [Rank 0] step:4301/10000 train_time:380163ms step_avg:88.39ms +[2025-08-22 10:15:06] [Rank 0] step:4301/10000 train_time:380163ms step_avg:88.39ms +[2025-08-22 10:15:08] [Rank 0] step:4321/10000 train_time:381985ms step_avg:88.40ms +[2025-08-22 10:15:08] [Rank 0] step:4321/10000 train_time:381985ms step_avg:88.40ms +[2025-08-22 10:15:10] [Rank 0] step:4341/10000 train_time:383804ms step_avg:88.41ms +[2025-08-22 10:15:10] [Rank 0] step:4341/10000 train_time:383804ms step_avg:88.41ms +[2025-08-22 10:15:12] [Rank 0] step:4361/10000 train_time:385626ms step_avg:88.43ms +[2025-08-22 10:15:12] [Rank 0] step:4361/10000 train_time:385626ms step_avg:88.43ms +[2025-08-22 10:15:14] [Rank 0] step:4381/10000 train_time:387448ms step_avg:88.44ms +[2025-08-22 10:15:14] [Rank 0] step:4381/10000 train_time:387448ms step_avg:88.44ms +[2025-08-22 10:15:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:15:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:15:29] [Rank 0] PRINT: step:4400/10000 val_loss:3.9820 svd_entropy: attn_qk:H=0.8942,top10E=0.09,eRank=380.7,q75/q25=17.59 attn_vo:H=0.8730,top10E=0.09,eRank=334.8,q75/q25=45.00 mlp_w1:H=0.8476,top10E=0.18,eRank=286.5,q75/q25=10.69 mlp_w2:H=0.9165,top10E=0.10,eRank=445.6,q75/q25=6.53 vo_prod:H=0.7879,top10E=0.18,eRank=195.5,q75/q25=1813.58 train_time:389273ms step_avg:88.47ms +[2025-08-22 10:15:29] [Rank 0] PRINT: step:4400/10000 val_loss:3.9820 svd_entropy: attn_qk:H=0.8942,top10E=0.09,eRank=380.7,q75/q25=17.59 attn_vo:H=0.8730,top10E=0.09,eRank=334.8,q75/q25=45.00 mlp_w1:H=0.8476,top10E=0.18,eRank=286.5,q75/q25=10.69 mlp_w2:H=0.9165,top10E=0.10,eRank=445.6,q75/q25=6.53 vo_prod:H=0.7879,top10E=0.18,eRank=195.5,q75/q25=1813.58 train_time:389273ms step_avg:88.47ms +[2025-08-22 10:15:29] [Rank 0] step:4401/10000 train_time:389292ms step_avg:88.46ms +[2025-08-22 10:15:29] [Rank 0] step:4401/10000 train_time:389292ms step_avg:88.46ms +[2025-08-22 10:15:31] [Rank 0] step:4421/10000 train_time:391114ms step_avg:88.47ms +[2025-08-22 10:15:31] [Rank 0] step:4421/10000 train_time:391114ms step_avg:88.47ms +[2025-08-22 10:15:33] [Rank 0] step:4441/10000 train_time:392934ms step_avg:88.48ms +[2025-08-22 10:15:33] [Rank 0] step:4441/10000 train_time:392934ms step_avg:88.48ms +[2025-08-22 10:15:35] [Rank 0] step:4461/10000 train_time:394761ms step_avg:88.49ms +[2025-08-22 10:15:35] [Rank 0] step:4461/10000 train_time:394761ms step_avg:88.49ms +[2025-08-22 10:15:37] [Rank 0] step:4481/10000 train_time:396591ms step_avg:88.50ms +[2025-08-22 10:15:37] [Rank 0] step:4481/10000 train_time:396591ms step_avg:88.50ms +[2025-08-22 10:15:38] [Rank 0] step:4501/10000 train_time:398418ms step_avg:88.52ms +[2025-08-22 10:15:38] [Rank 0] step:4501/10000 train_time:398418ms step_avg:88.52ms +[2025-08-22 10:15:40] [Rank 0] step:4521/10000 train_time:400248ms step_avg:88.53ms +[2025-08-22 10:15:40] [Rank 0] step:4521/10000 train_time:400248ms step_avg:88.53ms +[2025-08-22 10:15:42] [Rank 0] step:4541/10000 train_time:402079ms step_avg:88.54ms +[2025-08-22 10:15:42] [Rank 0] step:4541/10000 train_time:402079ms step_avg:88.54ms +[2025-08-22 10:15:44] [Rank 0] step:4561/10000 train_time:403912ms step_avg:88.56ms +[2025-08-22 10:15:44] [Rank 0] step:4561/10000 train_time:403912ms step_avg:88.56ms +[2025-08-22 10:15:46] [Rank 0] step:4581/10000 train_time:405747ms step_avg:88.57ms +[2025-08-22 10:15:46] [Rank 0] step:4581/10000 train_time:405747ms step_avg:88.57ms +[2025-08-22 10:15:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:15:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:16:01] [Rank 0] PRINT: step:4600/10000 val_loss:3.9539 svd_entropy: attn_qk:H=0.8949,top10E=0.09,eRank=382.7,q75/q25=17.18 attn_vo:H=0.8748,top10E=0.09,eRank=338.6,q75/q25=42.84 mlp_w1:H=0.8503,top10E=0.17,eRank=291.3,q75/q25=10.65 mlp_w2:H=0.9178,top10E=0.10,eRank=449.4,q75/q25=6.49 vo_prod:H=0.7908,top10E=0.18,eRank=198.9,q75/q25=1572.98 train_time:407582ms step_avg:88.60ms +[2025-08-22 10:16:01] [Rank 0] PRINT: step:4600/10000 val_loss:3.9539 svd_entropy: attn_qk:H=0.8949,top10E=0.09,eRank=382.7,q75/q25=17.18 attn_vo:H=0.8748,top10E=0.09,eRank=338.6,q75/q25=42.84 mlp_w1:H=0.8503,top10E=0.17,eRank=291.3,q75/q25=10.65 mlp_w2:H=0.9178,top10E=0.10,eRank=449.4,q75/q25=6.49 vo_prod:H=0.7908,top10E=0.18,eRank=198.9,q75/q25=1572.98 train_time:407582ms step_avg:88.60ms +[2025-08-22 10:16:01] [Rank 0] step:4601/10000 train_time:407601ms step_avg:88.59ms +[2025-08-22 10:16:01] [Rank 0] step:4601/10000 train_time:407601ms step_avg:88.59ms +[2025-08-22 10:16:03] [Rank 0] step:4621/10000 train_time:409434ms step_avg:88.60ms +[2025-08-22 10:16:03] [Rank 0] step:4621/10000 train_time:409434ms step_avg:88.60ms +[2025-08-22 10:16:05] [Rank 0] step:4641/10000 train_time:411261ms step_avg:88.61ms +[2025-08-22 10:16:05] [Rank 0] step:4641/10000 train_time:411261ms step_avg:88.61ms +[2025-08-22 10:16:07] [Rank 0] step:4661/10000 train_time:413085ms step_avg:88.63ms +[2025-08-22 10:16:07] [Rank 0] step:4661/10000 train_time:413085ms step_avg:88.63ms +[2025-08-22 10:16:09] [Rank 0] step:4681/10000 train_time:414909ms step_avg:88.64ms +[2025-08-22 10:16:09] [Rank 0] step:4681/10000 train_time:414909ms step_avg:88.64ms +[2025-08-22 10:16:11] [Rank 0] step:4701/10000 train_time:416735ms step_avg:88.65ms +[2025-08-22 10:16:11] [Rank 0] step:4701/10000 train_time:416735ms step_avg:88.65ms +[2025-08-22 10:16:12] [Rank 0] step:4721/10000 train_time:418562ms step_avg:88.66ms +[2025-08-22 10:16:12] [Rank 0] step:4721/10000 train_time:418562ms step_avg:88.66ms +[2025-08-22 10:16:14] [Rank 0] step:4741/10000 train_time:420389ms step_avg:88.67ms +[2025-08-22 10:16:14] [Rank 0] step:4741/10000 train_time:420389ms step_avg:88.67ms +[2025-08-22 10:16:16] [Rank 0] step:4761/10000 train_time:422218ms step_avg:88.68ms +[2025-08-22 10:16:16] [Rank 0] step:4761/10000 train_time:422218ms step_avg:88.68ms +[2025-08-22 10:16:18] [Rank 0] step:4781/10000 train_time:424043ms step_avg:88.69ms +[2025-08-22 10:16:18] [Rank 0] step:4781/10000 train_time:424043ms step_avg:88.69ms +[2025-08-22 10:16:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:16:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:16:34] [Rank 0] PRINT: step:4800/10000 val_loss:3.9407 svd_entropy: attn_qk:H=0.8957,top10E=0.09,eRank=384.5,q75/q25=16.80 attn_vo:H=0.8764,top10E=0.09,eRank=342.1,q75/q25=40.45 mlp_w1:H=0.8528,top10E=0.17,eRank=296.0,q75/q25=10.60 mlp_w2:H=0.9189,top10E=0.10,eRank=452.7,q75/q25=6.41 vo_prod:H=0.7934,top10E=0.18,eRank=202.0,q75/q25=1381.37 train_time:425875ms step_avg:88.72ms +[2025-08-22 10:16:34] [Rank 0] PRINT: step:4800/10000 val_loss:3.9407 svd_entropy: attn_qk:H=0.8957,top10E=0.09,eRank=384.5,q75/q25=16.80 attn_vo:H=0.8764,top10E=0.09,eRank=342.1,q75/q25=40.45 mlp_w1:H=0.8528,top10E=0.17,eRank=296.0,q75/q25=10.60 mlp_w2:H=0.9189,top10E=0.10,eRank=452.7,q75/q25=6.41 vo_prod:H=0.7934,top10E=0.18,eRank=202.0,q75/q25=1381.37 train_time:425875ms step_avg:88.72ms +[2025-08-22 10:16:34] [Rank 0] step:4801/10000 train_time:425895ms step_avg:88.71ms +[2025-08-22 10:16:34] [Rank 0] step:4801/10000 train_time:425895ms step_avg:88.71ms +[2025-08-22 10:16:35] [Rank 0] step:4821/10000 train_time:427727ms step_avg:88.72ms +[2025-08-22 10:16:35] [Rank 0] step:4821/10000 train_time:427727ms step_avg:88.72ms +[2025-08-22 10:16:37] [Rank 0] step:4841/10000 train_time:429551ms step_avg:88.73ms +[2025-08-22 10:16:37] [Rank 0] step:4841/10000 train_time:429551ms step_avg:88.73ms +[2025-08-22 10:16:39] [Rank 0] step:4861/10000 train_time:431377ms step_avg:88.74ms +[2025-08-22 10:16:39] [Rank 0] step:4861/10000 train_time:431377ms step_avg:88.74ms +[2025-08-22 10:16:41] [Rank 0] step:4881/10000 train_time:433201ms step_avg:88.75ms +[2025-08-22 10:16:41] [Rank 0] step:4881/10000 train_time:433201ms step_avg:88.75ms +[2025-08-22 10:16:43] [Rank 0] step:4901/10000 train_time:435024ms step_avg:88.76ms +[2025-08-22 10:16:43] [Rank 0] step:4901/10000 train_time:435024ms step_avg:88.76ms +[2025-08-22 10:16:45] [Rank 0] step:4921/10000 train_time:436851ms step_avg:88.77ms +[2025-08-22 10:16:45] [Rank 0] step:4921/10000 train_time:436851ms step_avg:88.77ms +[2025-08-22 10:16:46] [Rank 0] step:4941/10000 train_time:438681ms step_avg:88.78ms +[2025-08-22 10:16:46] [Rank 0] step:4941/10000 train_time:438681ms step_avg:88.78ms +[2025-08-22 10:16:48] [Rank 0] step:4961/10000 train_time:440507ms step_avg:88.79ms +[2025-08-22 10:16:48] [Rank 0] step:4961/10000 train_time:440507ms step_avg:88.79ms +[2025-08-22 10:16:50] [Rank 0] step:4981/10000 train_time:442337ms step_avg:88.80ms +[2025-08-22 10:16:50] [Rank 0] step:4981/10000 train_time:442337ms step_avg:88.80ms +[2025-08-22 10:16:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:16:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:17:06] [Rank 0] PRINT: step:5000/10000 val_loss:3.9212 svd_entropy: attn_qk:H=0.8963,top10E=0.09,eRank=386.2,q75/q25=16.49 attn_vo:H=0.8779,top10E=0.09,eRank=345.3,q75/q25=38.86 mlp_w1:H=0.8550,top10E=0.17,eRank=300.2,q75/q25=10.58 mlp_w2:H=0.9199,top10E=0.10,eRank=455.7,q75/q25=6.36 vo_prod:H=0.7958,top10E=0.17,eRank=204.8,q75/q25=1226.74 train_time:444169ms step_avg:88.83ms +[2025-08-22 10:17:06] [Rank 0] PRINT: step:5000/10000 val_loss:3.9212 svd_entropy: attn_qk:H=0.8963,top10E=0.09,eRank=386.2,q75/q25=16.49 attn_vo:H=0.8779,top10E=0.09,eRank=345.3,q75/q25=38.86 mlp_w1:H=0.8550,top10E=0.17,eRank=300.2,q75/q25=10.58 mlp_w2:H=0.9199,top10E=0.10,eRank=455.7,q75/q25=6.36 vo_prod:H=0.7958,top10E=0.17,eRank=204.8,q75/q25=1226.74 train_time:444169ms step_avg:88.83ms +[2025-08-22 10:17:06] [Rank 0] step:5001/10000 train_time:444188ms step_avg:88.82ms +[2025-08-22 10:17:06] [Rank 0] step:5001/10000 train_time:444188ms step_avg:88.82ms +[2025-08-22 10:17:08] [Rank 0] step:5021/10000 train_time:446016ms step_avg:88.83ms +[2025-08-22 10:17:08] [Rank 0] step:5021/10000 train_time:446016ms step_avg:88.83ms +[2025-08-22 10:17:10] [Rank 0] step:5041/10000 train_time:447845ms step_avg:88.84ms +[2025-08-22 10:17:10] [Rank 0] step:5041/10000 train_time:447845ms step_avg:88.84ms +[2025-08-22 10:17:11] [Rank 0] step:5061/10000 train_time:449669ms step_avg:88.85ms +[2025-08-22 10:17:11] [Rank 0] step:5061/10000 train_time:449669ms step_avg:88.85ms +[2025-08-22 10:17:13] [Rank 0] step:5081/10000 train_time:451496ms step_avg:88.86ms +[2025-08-22 10:17:13] [Rank 0] step:5081/10000 train_time:451496ms step_avg:88.86ms +[2025-08-22 10:17:15] [Rank 0] step:5101/10000 train_time:453322ms step_avg:88.87ms +[2025-08-22 10:17:15] [Rank 0] step:5101/10000 train_time:453322ms step_avg:88.87ms +[2025-08-22 10:17:17] [Rank 0] step:5121/10000 train_time:455153ms step_avg:88.88ms +[2025-08-22 10:17:17] [Rank 0] step:5121/10000 train_time:455153ms step_avg:88.88ms +[2025-08-22 10:17:19] [Rank 0] step:5141/10000 train_time:456985ms step_avg:88.89ms +[2025-08-22 10:17:19] [Rank 0] step:5141/10000 train_time:456985ms step_avg:88.89ms +[2025-08-22 10:17:21] [Rank 0] step:5161/10000 train_time:458814ms step_avg:88.90ms +[2025-08-22 10:17:21] [Rank 0] step:5161/10000 train_time:458814ms step_avg:88.90ms +[2025-08-22 10:17:22] [Rank 0] step:5181/10000 train_time:460645ms step_avg:88.91ms +[2025-08-22 10:17:22] [Rank 0] step:5181/10000 train_time:460645ms step_avg:88.91ms +[2025-08-22 10:17:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:17:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:17:38] [Rank 0] PRINT: step:5200/10000 val_loss:3.9053 svd_entropy: attn_qk:H=0.8969,top10E=0.09,eRank=387.7,q75/q25=16.19 attn_vo:H=0.8792,top10E=0.09,eRank=348.3,q75/q25=37.33 mlp_w1:H=0.8572,top10E=0.17,eRank=304.3,q75/q25=10.53 mlp_w2:H=0.9208,top10E=0.10,eRank=458.4,q75/q25=6.32 vo_prod:H=0.7981,top10E=0.17,eRank=207.7,q75/q25=1123.01 train_time:462503ms step_avg:88.94ms +[2025-08-22 10:17:38] [Rank 0] PRINT: step:5200/10000 val_loss:3.9053 svd_entropy: attn_qk:H=0.8969,top10E=0.09,eRank=387.7,q75/q25=16.19 attn_vo:H=0.8792,top10E=0.09,eRank=348.3,q75/q25=37.33 mlp_w1:H=0.8572,top10E=0.17,eRank=304.3,q75/q25=10.53 mlp_w2:H=0.9208,top10E=0.10,eRank=458.4,q75/q25=6.32 vo_prod:H=0.7981,top10E=0.17,eRank=207.7,q75/q25=1123.01 train_time:462503ms step_avg:88.94ms +[2025-08-22 10:17:38] [Rank 0] step:5201/10000 train_time:462522ms step_avg:88.93ms +[2025-08-22 10:17:38] [Rank 0] step:5201/10000 train_time:462522ms step_avg:88.93ms +[2025-08-22 10:17:40] [Rank 0] step:5221/10000 train_time:464368ms step_avg:88.94ms +[2025-08-22 10:17:40] [Rank 0] step:5221/10000 train_time:464368ms step_avg:88.94ms +[2025-08-22 10:17:42] [Rank 0] step:5241/10000 train_time:466224ms step_avg:88.96ms +[2025-08-22 10:17:42] [Rank 0] step:5241/10000 train_time:466224ms step_avg:88.96ms +[2025-08-22 10:17:44] [Rank 0] step:5261/10000 train_time:468080ms step_avg:88.97ms +[2025-08-22 10:17:44] [Rank 0] step:5261/10000 train_time:468080ms step_avg:88.97ms +[2025-08-22 10:17:46] [Rank 0] step:5281/10000 train_time:469938ms step_avg:88.99ms +[2025-08-22 10:17:46] [Rank 0] step:5281/10000 train_time:469938ms step_avg:88.99ms +[2025-08-22 10:17:48] [Rank 0] step:5301/10000 train_time:471807ms step_avg:89.00ms +[2025-08-22 10:17:48] [Rank 0] step:5301/10000 train_time:471807ms step_avg:89.00ms +[2025-08-22 10:17:49] [Rank 0] step:5321/10000 train_time:473669ms step_avg:89.02ms +[2025-08-22 10:17:49] [Rank 0] step:5321/10000 train_time:473669ms step_avg:89.02ms +[2025-08-22 10:17:51] [Rank 0] step:5341/10000 train_time:475530ms step_avg:89.03ms +[2025-08-22 10:17:51] [Rank 0] step:5341/10000 train_time:475530ms step_avg:89.03ms +[2025-08-22 10:17:53] [Rank 0] step:5361/10000 train_time:477394ms step_avg:89.05ms +[2025-08-22 10:17:53] [Rank 0] step:5361/10000 train_time:477394ms step_avg:89.05ms +[2025-08-22 10:17:55] [Rank 0] step:5381/10000 train_time:479257ms step_avg:89.06ms +[2025-08-22 10:17:55] [Rank 0] step:5381/10000 train_time:479257ms step_avg:89.06ms +[2025-08-22 10:17:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:17:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:18:11] [Rank 0] PRINT: step:5400/10000 val_loss:3.8923 svd_entropy: attn_qk:H=0.8974,top10E=0.09,eRank=388.9,q75/q25=15.92 attn_vo:H=0.8804,top10E=0.09,eRank=351.0,q75/q25=35.78 mlp_w1:H=0.8592,top10E=0.16,eRank=308.3,q75/q25=10.49 mlp_w2:H=0.9216,top10E=0.10,eRank=460.8,q75/q25=6.29 vo_prod:H=0.8001,top10E=0.17,eRank=210.2,q75/q25=1005.70 train_time:481120ms step_avg:89.10ms +[2025-08-22 10:18:11] [Rank 0] PRINT: step:5400/10000 val_loss:3.8923 svd_entropy: attn_qk:H=0.8974,top10E=0.09,eRank=388.9,q75/q25=15.92 attn_vo:H=0.8804,top10E=0.09,eRank=351.0,q75/q25=35.78 mlp_w1:H=0.8592,top10E=0.16,eRank=308.3,q75/q25=10.49 mlp_w2:H=0.9216,top10E=0.10,eRank=460.8,q75/q25=6.29 vo_prod:H=0.8001,top10E=0.17,eRank=210.2,q75/q25=1005.70 train_time:481120ms step_avg:89.10ms +[2025-08-22 10:18:11] [Rank 0] step:5401/10000 train_time:481139ms step_avg:89.08ms +[2025-08-22 10:18:11] [Rank 0] step:5401/10000 train_time:481139ms step_avg:89.08ms +[2025-08-22 10:18:13] [Rank 0] step:5421/10000 train_time:482996ms step_avg:89.10ms +[2025-08-22 10:18:13] [Rank 0] step:5421/10000 train_time:482996ms step_avg:89.10ms +[2025-08-22 10:18:15] [Rank 0] step:5441/10000 train_time:484849ms step_avg:89.11ms +[2025-08-22 10:18:15] [Rank 0] step:5441/10000 train_time:484849ms step_avg:89.11ms +[2025-08-22 10:18:16] [Rank 0] step:5461/10000 train_time:486709ms step_avg:89.12ms +[2025-08-22 10:18:16] [Rank 0] step:5461/10000 train_time:486709ms step_avg:89.12ms +[2025-08-22 10:18:18] [Rank 0] step:5481/10000 train_time:488568ms step_avg:89.14ms +[2025-08-22 10:18:18] [Rank 0] step:5481/10000 train_time:488568ms step_avg:89.14ms +[2025-08-22 10:18:20] [Rank 0] step:5501/10000 train_time:490433ms step_avg:89.15ms +[2025-08-22 10:18:20] [Rank 0] step:5501/10000 train_time:490433ms step_avg:89.15ms +[2025-08-22 10:18:22] [Rank 0] step:5521/10000 train_time:492297ms step_avg:89.17ms +[2025-08-22 10:18:22] [Rank 0] step:5521/10000 train_time:492297ms step_avg:89.17ms +[2025-08-22 10:18:24] [Rank 0] step:5541/10000 train_time:494156ms step_avg:89.18ms +[2025-08-22 10:18:24] [Rank 0] step:5541/10000 train_time:494156ms step_avg:89.18ms +[2025-08-22 10:18:26] [Rank 0] step:5561/10000 train_time:496017ms step_avg:89.20ms +[2025-08-22 10:18:26] [Rank 0] step:5561/10000 train_time:496017ms step_avg:89.20ms +[2025-08-22 10:18:28] [Rank 0] step:5581/10000 train_time:497877ms step_avg:89.21ms +[2025-08-22 10:18:28] [Rank 0] step:5581/10000 train_time:497877ms step_avg:89.21ms +[2025-08-22 10:18:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:18:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:18:43] [Rank 0] PRINT: step:5600/10000 val_loss:3.8747 svd_entropy: attn_qk:H=0.8978,top10E=0.09,eRank=390.1,q75/q25=15.75 attn_vo:H=0.8815,top10E=0.09,eRank=353.4,q75/q25=34.61 mlp_w1:H=0.8611,top10E=0.16,eRank=311.9,q75/q25=10.42 mlp_w2:H=0.9223,top10E=0.10,eRank=462.9,q75/q25=6.26 vo_prod:H=0.8019,top10E=0.17,eRank=212.5,q75/q25=930.89 train_time:499743ms step_avg:89.24ms +[2025-08-22 10:18:43] [Rank 0] PRINT: step:5600/10000 val_loss:3.8747 svd_entropy: attn_qk:H=0.8978,top10E=0.09,eRank=390.1,q75/q25=15.75 attn_vo:H=0.8815,top10E=0.09,eRank=353.4,q75/q25=34.61 mlp_w1:H=0.8611,top10E=0.16,eRank=311.9,q75/q25=10.42 mlp_w2:H=0.9223,top10E=0.10,eRank=462.9,q75/q25=6.26 vo_prod:H=0.8019,top10E=0.17,eRank=212.5,q75/q25=930.89 train_time:499743ms step_avg:89.24ms +[2025-08-22 10:18:43] [Rank 0] step:5601/10000 train_time:499763ms step_avg:89.23ms +[2025-08-22 10:18:43] [Rank 0] step:5601/10000 train_time:499763ms step_avg:89.23ms +[2025-08-22 10:18:45] [Rank 0] step:5621/10000 train_time:501622ms step_avg:89.24ms +[2025-08-22 10:18:45] [Rank 0] step:5621/10000 train_time:501622ms step_avg:89.24ms +[2025-08-22 10:18:47] [Rank 0] step:5641/10000 train_time:503476ms step_avg:89.25ms +[2025-08-22 10:18:47] [Rank 0] step:5641/10000 train_time:503476ms step_avg:89.25ms +[2025-08-22 10:18:49] [Rank 0] step:5661/10000 train_time:505330ms step_avg:89.27ms +[2025-08-22 10:18:49] [Rank 0] step:5661/10000 train_time:505330ms step_avg:89.27ms +[2025-08-22 10:18:51] [Rank 0] step:5681/10000 train_time:507189ms step_avg:89.28ms +[2025-08-22 10:18:51] [Rank 0] step:5681/10000 train_time:507189ms step_avg:89.28ms +[2025-08-22 10:18:53] [Rank 0] step:5701/10000 train_time:509046ms step_avg:89.29ms +[2025-08-22 10:18:53] [Rank 0] step:5701/10000 train_time:509046ms step_avg:89.29ms +[2025-08-22 10:18:55] [Rank 0] step:5721/10000 train_time:510908ms step_avg:89.30ms +[2025-08-22 10:18:55] [Rank 0] step:5721/10000 train_time:510908ms step_avg:89.30ms +[2025-08-22 10:18:56] [Rank 0] step:5741/10000 train_time:512765ms step_avg:89.32ms +[2025-08-22 10:18:56] [Rank 0] step:5741/10000 train_time:512765ms step_avg:89.32ms +[2025-08-22 10:18:58] [Rank 0] step:5761/10000 train_time:514626ms step_avg:89.33ms +[2025-08-22 10:18:58] [Rank 0] step:5761/10000 train_time:514626ms step_avg:89.33ms +[2025-08-22 10:19:00] [Rank 0] step:5781/10000 train_time:516487ms step_avg:89.34ms +[2025-08-22 10:19:00] [Rank 0] step:5781/10000 train_time:516487ms step_avg:89.34ms +[2025-08-22 10:19:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:19:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:19:16] [Rank 0] PRINT: step:5800/10000 val_loss:3.8669 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.2,q75/q25=15.55 attn_vo:H=0.8826,top10E=0.09,eRank=355.8,q75/q25=33.46 mlp_w1:H=0.8628,top10E=0.16,eRank=315.4,q75/q25=10.38 mlp_w2:H=0.9230,top10E=0.10,eRank=465.0,q75/q25=6.24 vo_prod:H=0.8037,top10E=0.17,eRank=214.6,q75/q25=828.31 train_time:518354ms step_avg:89.37ms +[2025-08-22 10:19:16] [Rank 0] PRINT: step:5800/10000 val_loss:3.8669 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.2,q75/q25=15.55 attn_vo:H=0.8826,top10E=0.09,eRank=355.8,q75/q25=33.46 mlp_w1:H=0.8628,top10E=0.16,eRank=315.4,q75/q25=10.38 mlp_w2:H=0.9230,top10E=0.10,eRank=465.0,q75/q25=6.24 vo_prod:H=0.8037,top10E=0.17,eRank=214.6,q75/q25=828.31 train_time:518354ms step_avg:89.37ms +[2025-08-22 10:19:16] [Rank 0] step:5801/10000 train_time:518374ms step_avg:89.36ms +[2025-08-22 10:19:16] [Rank 0] step:5801/10000 train_time:518374ms step_avg:89.36ms +[2025-08-22 10:19:18] [Rank 0] step:5821/10000 train_time:520236ms step_avg:89.37ms +[2025-08-22 10:19:18] [Rank 0] step:5821/10000 train_time:520236ms step_avg:89.37ms +[2025-08-22 10:19:20] [Rank 0] step:5841/10000 train_time:522091ms step_avg:89.38ms +[2025-08-22 10:19:20] [Rank 0] step:5841/10000 train_time:522091ms step_avg:89.38ms +[2025-08-22 10:19:22] [Rank 0] step:5861/10000 train_time:523956ms step_avg:89.40ms +[2025-08-22 10:19:22] [Rank 0] step:5861/10000 train_time:523956ms step_avg:89.40ms +[2025-08-22 10:19:23] [Rank 0] step:5881/10000 train_time:525818ms step_avg:89.41ms +[2025-08-22 10:19:23] [Rank 0] step:5881/10000 train_time:525818ms step_avg:89.41ms +[2025-08-22 10:19:25] [Rank 0] step:5901/10000 train_time:527681ms step_avg:89.42ms +[2025-08-22 10:19:25] [Rank 0] step:5901/10000 train_time:527681ms step_avg:89.42ms +[2025-08-22 10:19:27] [Rank 0] step:5921/10000 train_time:529545ms step_avg:89.44ms +[2025-08-22 10:19:27] [Rank 0] step:5921/10000 train_time:529545ms step_avg:89.44ms +[2025-08-22 10:19:29] [Rank 0] step:5941/10000 train_time:531415ms step_avg:89.45ms +[2025-08-22 10:19:29] [Rank 0] step:5941/10000 train_time:531415ms step_avg:89.45ms +[2025-08-22 10:19:31] [Rank 0] step:5961/10000 train_time:533280ms step_avg:89.46ms +[2025-08-22 10:19:31] [Rank 0] step:5961/10000 train_time:533280ms step_avg:89.46ms +[2025-08-22 10:19:33] [Rank 0] step:5981/10000 train_time:535146ms step_avg:89.47ms +[2025-08-22 10:19:33] [Rank 0] step:5981/10000 train_time:535146ms step_avg:89.47ms +[2025-08-22 10:19:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:19:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:19:48] [Rank 0] PRINT: step:6000/10000 val_loss:3.8418 svd_entropy: attn_qk:H=0.8987,top10E=0.09,eRank=392.3,q75/q25=15.34 attn_vo:H=0.8836,top10E=0.09,eRank=358.1,q75/q25=32.37 mlp_w1:H=0.8645,top10E=0.16,eRank=318.8,q75/q25=10.35 mlp_w2:H=0.9236,top10E=0.10,eRank=466.8,q75/q25=6.23 vo_prod:H=0.8054,top10E=0.16,eRank=216.8,q75/q25=773.97 train_time:537011ms step_avg:89.50ms +[2025-08-22 10:19:48] [Rank 0] PRINT: step:6000/10000 val_loss:3.8418 svd_entropy: attn_qk:H=0.8987,top10E=0.09,eRank=392.3,q75/q25=15.34 attn_vo:H=0.8836,top10E=0.09,eRank=358.1,q75/q25=32.37 mlp_w1:H=0.8645,top10E=0.16,eRank=318.8,q75/q25=10.35 mlp_w2:H=0.9236,top10E=0.10,eRank=466.8,q75/q25=6.23 vo_prod:H=0.8054,top10E=0.16,eRank=216.8,q75/q25=773.97 train_time:537011ms step_avg:89.50ms +[2025-08-22 10:19:48] [Rank 0] step:6001/10000 train_time:537030ms step_avg:89.49ms +[2025-08-22 10:19:48] [Rank 0] step:6001/10000 train_time:537030ms step_avg:89.49ms +[2025-08-22 10:19:50] [Rank 0] step:6021/10000 train_time:538901ms step_avg:89.50ms +[2025-08-22 10:19:50] [Rank 0] step:6021/10000 train_time:538901ms step_avg:89.50ms +[2025-08-22 10:19:52] [Rank 0] step:6041/10000 train_time:540763ms step_avg:89.52ms +[2025-08-22 10:19:52] [Rank 0] step:6041/10000 train_time:540763ms step_avg:89.52ms +[2025-08-22 10:19:54] [Rank 0] step:6061/10000 train_time:542627ms step_avg:89.53ms +[2025-08-22 10:19:54] [Rank 0] step:6061/10000 train_time:542627ms step_avg:89.53ms +[2025-08-22 10:19:56] [Rank 0] step:6081/10000 train_time:544488ms step_avg:89.54ms +[2025-08-22 10:19:56] [Rank 0] step:6081/10000 train_time:544488ms step_avg:89.54ms +[2025-08-22 10:19:58] [Rank 0] step:6101/10000 train_time:546357ms step_avg:89.55ms +[2025-08-22 10:19:58] [Rank 0] step:6101/10000 train_time:546357ms step_avg:89.55ms +[2025-08-22 10:20:00] [Rank 0] step:6121/10000 train_time:548495ms step_avg:89.61ms +[2025-08-22 10:20:00] [Rank 0] step:6121/10000 train_time:548495ms step_avg:89.61ms +[2025-08-22 10:20:02] [Rank 0] step:6141/10000 train_time:550370ms step_avg:89.62ms +[2025-08-22 10:20:02] [Rank 0] step:6141/10000 train_time:550370ms step_avg:89.62ms +[2025-08-22 10:20:04] [Rank 0] step:6161/10000 train_time:552235ms step_avg:89.63ms +[2025-08-22 10:20:04] [Rank 0] step:6161/10000 train_time:552235ms step_avg:89.63ms +[2025-08-22 10:20:06] [Rank 0] step:6181/10000 train_time:554097ms step_avg:89.65ms +[2025-08-22 10:20:06] [Rank 0] step:6181/10000 train_time:554097ms step_avg:89.65ms +[2025-08-22 10:20:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:20:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:20:21] [Rank 0] PRINT: step:6200/10000 val_loss:3.8259 svd_entropy: attn_qk:H=0.8990,top10E=0.09,eRank=393.2,q75/q25=15.23 attn_vo:H=0.8846,top10E=0.09,eRank=360.2,q75/q25=31.42 mlp_w1:H=0.8660,top10E=0.16,eRank=322.0,q75/q25=10.31 mlp_w2:H=0.9241,top10E=0.10,eRank=468.5,q75/q25=6.22 vo_prod:H=0.8069,top10E=0.16,eRank=218.9,q75/q25=713.29 train_time:555965ms step_avg:89.67ms +[2025-08-22 10:20:21] [Rank 0] PRINT: step:6200/10000 val_loss:3.8259 svd_entropy: attn_qk:H=0.8990,top10E=0.09,eRank=393.2,q75/q25=15.23 attn_vo:H=0.8846,top10E=0.09,eRank=360.2,q75/q25=31.42 mlp_w1:H=0.8660,top10E=0.16,eRank=322.0,q75/q25=10.31 mlp_w2:H=0.9241,top10E=0.10,eRank=468.5,q75/q25=6.22 vo_prod:H=0.8069,top10E=0.16,eRank=218.9,q75/q25=713.29 train_time:555965ms step_avg:89.67ms +[2025-08-22 10:20:21] [Rank 0] step:6201/10000 train_time:555984ms step_avg:89.66ms +[2025-08-22 10:20:21] [Rank 0] step:6201/10000 train_time:555984ms step_avg:89.66ms +[2025-08-22 10:20:23] [Rank 0] step:6221/10000 train_time:557855ms step_avg:89.67ms +[2025-08-22 10:20:23] [Rank 0] step:6221/10000 train_time:557855ms step_avg:89.67ms +[2025-08-22 10:20:25] [Rank 0] step:6241/10000 train_time:559712ms step_avg:89.68ms +[2025-08-22 10:20:25] [Rank 0] step:6241/10000 train_time:559712ms step_avg:89.68ms +[2025-08-22 10:20:27] [Rank 0] step:6261/10000 train_time:561572ms step_avg:89.69ms +[2025-08-22 10:20:27] [Rank 0] step:6261/10000 train_time:561572ms step_avg:89.69ms +[2025-08-22 10:20:29] [Rank 0] step:6281/10000 train_time:563436ms step_avg:89.70ms +[2025-08-22 10:20:29] [Rank 0] step:6281/10000 train_time:563436ms step_avg:89.70ms +[2025-08-22 10:20:30] [Rank 0] step:6301/10000 train_time:565300ms step_avg:89.72ms +[2025-08-22 10:20:30] [Rank 0] step:6301/10000 train_time:565300ms step_avg:89.72ms +[2025-08-22 10:20:32] [Rank 0] step:6321/10000 train_time:567164ms step_avg:89.73ms +[2025-08-22 10:20:32] [Rank 0] step:6321/10000 train_time:567164ms step_avg:89.73ms +[2025-08-22 10:20:34] [Rank 0] step:6341/10000 train_time:569027ms step_avg:89.74ms +[2025-08-22 10:20:34] [Rank 0] step:6341/10000 train_time:569027ms step_avg:89.74ms +[2025-08-22 10:20:36] [Rank 0] step:6361/10000 train_time:570900ms step_avg:89.75ms +[2025-08-22 10:20:36] [Rank 0] step:6361/10000 train_time:570900ms step_avg:89.75ms +[2025-08-22 10:20:38] [Rank 0] step:6381/10000 train_time:572767ms step_avg:89.76ms +[2025-08-22 10:20:38] [Rank 0] step:6381/10000 train_time:572767ms step_avg:89.76ms +[2025-08-22 10:20:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:20:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:20:54] [Rank 0] PRINT: step:6400/10000 val_loss:3.8102 svd_entropy: attn_qk:H=0.8993,top10E=0.09,eRank=394.0,q75/q25=15.05 attn_vo:H=0.8854,top10E=0.08,eRank=362.1,q75/q25=30.55 mlp_w1:H=0.8674,top10E=0.16,eRank=324.8,q75/q25=10.26 mlp_w2:H=0.9246,top10E=0.10,eRank=469.8,q75/q25=6.20 vo_prod:H=0.8083,top10E=0.16,eRank=220.7,q75/q25=649.85 train_time:574633ms step_avg:89.79ms +[2025-08-22 10:20:54] [Rank 0] PRINT: step:6400/10000 val_loss:3.8102 svd_entropy: attn_qk:H=0.8993,top10E=0.09,eRank=394.0,q75/q25=15.05 attn_vo:H=0.8854,top10E=0.08,eRank=362.1,q75/q25=30.55 mlp_w1:H=0.8674,top10E=0.16,eRank=324.8,q75/q25=10.26 mlp_w2:H=0.9246,top10E=0.10,eRank=469.8,q75/q25=6.20 vo_prod:H=0.8083,top10E=0.16,eRank=220.7,q75/q25=649.85 train_time:574633ms step_avg:89.79ms +[2025-08-22 10:20:54] [Rank 0] step:6401/10000 train_time:574653ms step_avg:89.78ms +[2025-08-22 10:20:54] [Rank 0] step:6401/10000 train_time:574653ms step_avg:89.78ms +[2025-08-22 10:20:56] [Rank 0] step:6421/10000 train_time:576526ms step_avg:89.79ms +[2025-08-22 10:20:56] [Rank 0] step:6421/10000 train_time:576526ms step_avg:89.79ms +[2025-08-22 10:20:58] [Rank 0] step:6441/10000 train_time:578389ms step_avg:89.80ms +[2025-08-22 10:20:58] [Rank 0] step:6441/10000 train_time:578389ms step_avg:89.80ms +[2025-08-22 10:20:59] [Rank 0] step:6461/10000 train_time:580254ms step_avg:89.81ms +[2025-08-22 10:20:59] [Rank 0] step:6461/10000 train_time:580254ms step_avg:89.81ms +[2025-08-22 10:21:01] [Rank 0] step:6481/10000 train_time:582126ms step_avg:89.82ms +[2025-08-22 10:21:01] [Rank 0] step:6481/10000 train_time:582126ms step_avg:89.82ms +[2025-08-22 10:21:03] [Rank 0] step:6501/10000 train_time:583985ms step_avg:89.83ms +[2025-08-22 10:21:03] [Rank 0] step:6501/10000 train_time:583985ms step_avg:89.83ms +[2025-08-22 10:21:05] [Rank 0] step:6521/10000 train_time:585846ms step_avg:89.84ms +[2025-08-22 10:21:05] [Rank 0] step:6521/10000 train_time:585846ms step_avg:89.84ms +[2025-08-22 10:21:07] [Rank 0] step:6541/10000 train_time:587712ms step_avg:89.85ms +[2025-08-22 10:21:07] [Rank 0] step:6541/10000 train_time:587712ms step_avg:89.85ms +[2025-08-22 10:21:09] [Rank 0] step:6561/10000 train_time:589579ms step_avg:89.86ms +[2025-08-22 10:21:09] [Rank 0] step:6561/10000 train_time:589579ms step_avg:89.86ms +[2025-08-22 10:21:11] [Rank 0] step:6581/10000 train_time:591442ms step_avg:89.87ms +[2025-08-22 10:21:11] [Rank 0] step:6581/10000 train_time:591442ms step_avg:89.87ms +[2025-08-22 10:21:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:21:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:21:26] [Rank 0] PRINT: step:6600/10000 val_loss:3.7919 svd_entropy: attn_qk:H=0.8996,top10E=0.09,eRank=394.7,q75/q25=14.96 attn_vo:H=0.8861,top10E=0.08,eRank=363.8,q75/q25=29.78 mlp_w1:H=0.8687,top10E=0.15,eRank=327.4,q75/q25=10.24 mlp_w2:H=0.9250,top10E=0.10,eRank=471.2,q75/q25=6.18 vo_prod:H=0.8096,top10E=0.16,eRank=222.4,q75/q25=623.64 train_time:593310ms step_avg:89.90ms +[2025-08-22 10:21:26] [Rank 0] PRINT: step:6600/10000 val_loss:3.7919 svd_entropy: attn_qk:H=0.8996,top10E=0.09,eRank=394.7,q75/q25=14.96 attn_vo:H=0.8861,top10E=0.08,eRank=363.8,q75/q25=29.78 mlp_w1:H=0.8687,top10E=0.15,eRank=327.4,q75/q25=10.24 mlp_w2:H=0.9250,top10E=0.10,eRank=471.2,q75/q25=6.18 vo_prod:H=0.8096,top10E=0.16,eRank=222.4,q75/q25=623.64 train_time:593310ms step_avg:89.90ms +[2025-08-22 10:21:26] [Rank 0] step:6601/10000 train_time:593330ms step_avg:89.88ms +[2025-08-22 10:21:26] [Rank 0] step:6601/10000 train_time:593330ms step_avg:89.88ms +[2025-08-22 10:21:28] [Rank 0] step:6621/10000 train_time:595198ms step_avg:89.90ms +[2025-08-22 10:21:28] [Rank 0] step:6621/10000 train_time:595198ms step_avg:89.90ms +[2025-08-22 10:21:30] [Rank 0] step:6641/10000 train_time:597071ms step_avg:89.91ms +[2025-08-22 10:21:30] [Rank 0] step:6641/10000 train_time:597071ms step_avg:89.91ms +[2025-08-22 10:21:32] [Rank 0] step:6661/10000 train_time:598937ms step_avg:89.92ms +[2025-08-22 10:21:32] [Rank 0] step:6661/10000 train_time:598937ms step_avg:89.92ms +[2025-08-22 10:21:34] [Rank 0] step:6681/10000 train_time:600823ms step_avg:89.93ms +[2025-08-22 10:21:34] [Rank 0] step:6681/10000 train_time:600823ms step_avg:89.93ms +[2025-08-22 10:21:36] [Rank 0] step:6701/10000 train_time:602725ms step_avg:89.95ms +[2025-08-22 10:21:36] [Rank 0] step:6701/10000 train_time:602725ms step_avg:89.95ms +[2025-08-22 10:21:38] [Rank 0] step:6721/10000 train_time:604624ms step_avg:89.96ms +[2025-08-22 10:21:38] [Rank 0] step:6721/10000 train_time:604624ms step_avg:89.96ms +[2025-08-22 10:21:40] [Rank 0] step:6741/10000 train_time:606519ms step_avg:89.97ms +[2025-08-22 10:21:40] [Rank 0] step:6741/10000 train_time:606519ms step_avg:89.97ms +[2025-08-22 10:21:41] [Rank 0] step:6761/10000 train_time:608412ms step_avg:89.99ms +[2025-08-22 10:21:41] [Rank 0] step:6761/10000 train_time:608412ms step_avg:89.99ms +[2025-08-22 10:21:43] [Rank 0] step:6781/10000 train_time:610311ms step_avg:90.00ms +[2025-08-22 10:21:43] [Rank 0] step:6781/10000 train_time:610311ms step_avg:90.00ms +[2025-08-22 10:21:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:21:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:21:59] [Rank 0] PRINT: step:6800/10000 val_loss:3.7740 svd_entropy: attn_qk:H=0.8997,top10E=0.09,eRank=395.1,q75/q25=14.84 attn_vo:H=0.8868,top10E=0.08,eRank=365.4,q75/q25=29.13 mlp_w1:H=0.8698,top10E=0.15,eRank=329.8,q75/q25=10.25 mlp_w2:H=0.9253,top10E=0.10,eRank=472.2,q75/q25=6.18 vo_prod:H=0.8107,top10E=0.16,eRank=223.8,q75/q25=586.93 train_time:612216ms step_avg:90.03ms +[2025-08-22 10:21:59] [Rank 0] PRINT: step:6800/10000 val_loss:3.7740 svd_entropy: attn_qk:H=0.8997,top10E=0.09,eRank=395.1,q75/q25=14.84 attn_vo:H=0.8868,top10E=0.08,eRank=365.4,q75/q25=29.13 mlp_w1:H=0.8698,top10E=0.15,eRank=329.8,q75/q25=10.25 mlp_w2:H=0.9253,top10E=0.10,eRank=472.2,q75/q25=6.18 vo_prod:H=0.8107,top10E=0.16,eRank=223.8,q75/q25=586.93 train_time:612216ms step_avg:90.03ms +[2025-08-22 10:21:59] [Rank 0] step:6801/10000 train_time:612236ms step_avg:90.02ms +[2025-08-22 10:21:59] [Rank 0] step:6801/10000 train_time:612236ms step_avg:90.02ms +[2025-08-22 10:22:01] [Rank 0] step:6821/10000 train_time:614122ms step_avg:90.03ms +[2025-08-22 10:22:01] [Rank 0] step:6821/10000 train_time:614122ms step_avg:90.03ms +[2025-08-22 10:22:03] [Rank 0] step:6841/10000 train_time:616014ms step_avg:90.05ms +[2025-08-22 10:22:03] [Rank 0] step:6841/10000 train_time:616014ms step_avg:90.05ms +[2025-08-22 10:22:05] [Rank 0] step:6861/10000 train_time:617902ms step_avg:90.06ms +[2025-08-22 10:22:05] [Rank 0] step:6861/10000 train_time:617902ms step_avg:90.06ms +[2025-08-22 10:22:07] [Rank 0] step:6881/10000 train_time:619797ms step_avg:90.07ms +[2025-08-22 10:22:07] [Rank 0] step:6881/10000 train_time:619797ms step_avg:90.07ms +[2025-08-22 10:22:09] [Rank 0] step:6901/10000 train_time:621687ms step_avg:90.09ms +[2025-08-22 10:22:09] [Rank 0] step:6901/10000 train_time:621687ms step_avg:90.09ms +[2025-08-22 10:22:10] [Rank 0] step:6921/10000 train_time:623573ms step_avg:90.10ms +[2025-08-22 10:22:10] [Rank 0] step:6921/10000 train_time:623573ms step_avg:90.10ms +[2025-08-22 10:22:12] [Rank 0] step:6941/10000 train_time:625473ms step_avg:90.11ms +[2025-08-22 10:22:12] [Rank 0] step:6941/10000 train_time:625473ms step_avg:90.11ms +[2025-08-22 10:22:14] [Rank 0] step:6961/10000 train_time:627381ms step_avg:90.13ms +[2025-08-22 10:22:14] [Rank 0] step:6961/10000 train_time:627381ms step_avg:90.13ms +[2025-08-22 10:22:16] [Rank 0] step:6981/10000 train_time:629276ms step_avg:90.14ms +[2025-08-22 10:22:16] [Rank 0] step:6981/10000 train_time:629276ms step_avg:90.14ms +[2025-08-22 10:22:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:22:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:22:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.7594 svd_entropy: attn_qk:H=0.8999,top10E=0.09,eRank=395.5,q75/q25=14.74 attn_vo:H=0.8874,top10E=0.08,eRank=366.8,q75/q25=28.63 mlp_w1:H=0.8708,top10E=0.15,eRank=332.0,q75/q25=10.19 mlp_w2:H=0.9256,top10E=0.10,eRank=473.2,q75/q25=6.17 vo_prod:H=0.8118,top10E=0.16,eRank=225.2,q75/q25=551.62 train_time:631177ms step_avg:90.17ms +[2025-08-22 10:22:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.7594 svd_entropy: attn_qk:H=0.8999,top10E=0.09,eRank=395.5,q75/q25=14.74 attn_vo:H=0.8874,top10E=0.08,eRank=366.8,q75/q25=28.63 mlp_w1:H=0.8708,top10E=0.15,eRank=332.0,q75/q25=10.19 mlp_w2:H=0.9256,top10E=0.10,eRank=473.2,q75/q25=6.17 vo_prod:H=0.8118,top10E=0.16,eRank=225.2,q75/q25=551.62 train_time:631177ms step_avg:90.17ms +[2025-08-22 10:22:32] [Rank 0] step:7001/10000 train_time:631196ms step_avg:90.16ms +[2025-08-22 10:22:32] [Rank 0] step:7001/10000 train_time:631196ms step_avg:90.16ms +[2025-08-22 10:22:34] [Rank 0] step:7021/10000 train_time:633092ms step_avg:90.17ms +[2025-08-22 10:22:34] [Rank 0] step:7021/10000 train_time:633092ms step_avg:90.17ms +[2025-08-22 10:22:36] [Rank 0] step:7041/10000 train_time:634985ms step_avg:90.18ms +[2025-08-22 10:22:36] [Rank 0] step:7041/10000 train_time:634985ms step_avg:90.18ms +[2025-08-22 10:22:38] [Rank 0] step:7061/10000 train_time:636875ms step_avg:90.20ms +[2025-08-22 10:22:38] [Rank 0] step:7061/10000 train_time:636875ms step_avg:90.20ms +[2025-08-22 10:22:40] [Rank 0] step:7081/10000 train_time:638762ms step_avg:90.21ms +[2025-08-22 10:22:40] [Rank 0] step:7081/10000 train_time:638762ms step_avg:90.21ms +[2025-08-22 10:22:42] [Rank 0] step:7101/10000 train_time:640661ms step_avg:90.22ms +[2025-08-22 10:22:42] [Rank 0] step:7101/10000 train_time:640661ms step_avg:90.22ms +[2025-08-22 10:22:43] [Rank 0] step:7121/10000 train_time:642549ms step_avg:90.23ms +[2025-08-22 10:22:43] [Rank 0] step:7121/10000 train_time:642549ms step_avg:90.23ms +[2025-08-22 10:22:45] [Rank 0] step:7141/10000 train_time:644441ms step_avg:90.25ms +[2025-08-22 10:22:45] [Rank 0] step:7141/10000 train_time:644441ms step_avg:90.25ms +[2025-08-22 10:22:47] [Rank 0] step:7161/10000 train_time:646335ms step_avg:90.26ms +[2025-08-22 10:22:47] [Rank 0] step:7161/10000 train_time:646335ms step_avg:90.26ms +[2025-08-22 10:22:49] [Rank 0] step:7181/10000 train_time:648230ms step_avg:90.27ms +[2025-08-22 10:22:49] [Rank 0] step:7181/10000 train_time:648230ms step_avg:90.27ms +[2025-08-22 10:22:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:22:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:23:05] [Rank 0] PRINT: step:7200/10000 val_loss:3.7469 svd_entropy: attn_qk:H=0.9000,top10E=0.08,eRank=395.9,q75/q25=14.69 attn_vo:H=0.8880,top10E=0.08,eRank=368.1,q75/q25=28.10 mlp_w1:H=0.8718,top10E=0.15,eRank=334.0,q75/q25=10.17 mlp_w2:H=0.9259,top10E=0.10,eRank=474.1,q75/q25=6.17 vo_prod:H=0.8128,top10E=0.16,eRank=226.7,q75/q25=527.58 train_time:650198ms step_avg:90.31ms +[2025-08-22 10:23:05] [Rank 0] PRINT: step:7200/10000 val_loss:3.7469 svd_entropy: attn_qk:H=0.9000,top10E=0.08,eRank=395.9,q75/q25=14.69 attn_vo:H=0.8880,top10E=0.08,eRank=368.1,q75/q25=28.10 mlp_w1:H=0.8718,top10E=0.15,eRank=334.0,q75/q25=10.17 mlp_w2:H=0.9259,top10E=0.10,eRank=474.1,q75/q25=6.17 vo_prod:H=0.8128,top10E=0.16,eRank=226.7,q75/q25=527.58 train_time:650198ms step_avg:90.31ms +[2025-08-22 10:23:05] [Rank 0] step:7201/10000 train_time:650218ms step_avg:90.30ms +[2025-08-22 10:23:05] [Rank 0] step:7201/10000 train_time:650218ms step_avg:90.30ms +[2025-08-22 10:23:07] [Rank 0] step:7221/10000 train_time:652114ms step_avg:90.31ms +[2025-08-22 10:23:07] [Rank 0] step:7221/10000 train_time:652114ms step_avg:90.31ms +[2025-08-22 10:23:09] [Rank 0] step:7241/10000 train_time:654002ms step_avg:90.32ms +[2025-08-22 10:23:09] [Rank 0] step:7241/10000 train_time:654002ms step_avg:90.32ms +[2025-08-22 10:23:11] [Rank 0] step:7261/10000 train_time:655889ms step_avg:90.33ms +[2025-08-22 10:23:11] [Rank 0] step:7261/10000 train_time:655889ms step_avg:90.33ms +[2025-08-22 10:23:12] [Rank 0] step:7281/10000 train_time:657790ms step_avg:90.34ms +[2025-08-22 10:23:12] [Rank 0] step:7281/10000 train_time:657790ms step_avg:90.34ms +[2025-08-22 10:23:14] [Rank 0] step:7301/10000 train_time:659679ms step_avg:90.35ms +[2025-08-22 10:23:14] [Rank 0] step:7301/10000 train_time:659679ms step_avg:90.35ms +[2025-08-22 10:23:16] [Rank 0] step:7321/10000 train_time:661586ms step_avg:90.37ms +[2025-08-22 10:23:16] [Rank 0] step:7321/10000 train_time:661586ms step_avg:90.37ms +[2025-08-22 10:23:18] [Rank 0] step:7341/10000 train_time:663479ms step_avg:90.38ms +[2025-08-22 10:23:18] [Rank 0] step:7341/10000 train_time:663479ms step_avg:90.38ms +[2025-08-22 10:23:20] [Rank 0] step:7361/10000 train_time:665384ms step_avg:90.39ms +[2025-08-22 10:23:20] [Rank 0] step:7361/10000 train_time:665384ms step_avg:90.39ms +[2025-08-22 10:23:22] [Rank 0] step:7381/10000 train_time:667286ms step_avg:90.41ms +[2025-08-22 10:23:22] [Rank 0] step:7381/10000 train_time:667286ms step_avg:90.41ms +[2025-08-22 10:23:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:23:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:23:37] [Rank 0] PRINT: step:7400/10000 val_loss:3.7224 svd_entropy: attn_qk:H=0.9001,top10E=0.08,eRank=396.2,q75/q25=14.62 attn_vo:H=0.8884,top10E=0.08,eRank=369.2,q75/q25=27.66 mlp_w1:H=0.8726,top10E=0.15,eRank=335.7,q75/q25=10.14 mlp_w2:H=0.9262,top10E=0.10,eRank=474.9,q75/q25=6.17 vo_prod:H=0.8136,top10E=0.16,eRank=227.7,q75/q25=506.90 train_time:669170ms step_avg:90.43ms +[2025-08-22 10:23:37] [Rank 0] PRINT: step:7400/10000 val_loss:3.7224 svd_entropy: attn_qk:H=0.9001,top10E=0.08,eRank=396.2,q75/q25=14.62 attn_vo:H=0.8884,top10E=0.08,eRank=369.2,q75/q25=27.66 mlp_w1:H=0.8726,top10E=0.15,eRank=335.7,q75/q25=10.14 mlp_w2:H=0.9262,top10E=0.10,eRank=474.9,q75/q25=6.17 vo_prod:H=0.8136,top10E=0.16,eRank=227.7,q75/q25=506.90 train_time:669170ms step_avg:90.43ms +[2025-08-22 10:23:38] [Rank 0] step:7401/10000 train_time:669189ms step_avg:90.42ms +[2025-08-22 10:23:38] [Rank 0] step:7401/10000 train_time:669189ms step_avg:90.42ms +[2025-08-22 10:23:39] [Rank 0] step:7421/10000 train_time:671080ms step_avg:90.43ms +[2025-08-22 10:23:39] [Rank 0] step:7421/10000 train_time:671080ms step_avg:90.43ms +[2025-08-22 10:23:41] [Rank 0] step:7441/10000 train_time:672971ms step_avg:90.44ms +[2025-08-22 10:23:41] [Rank 0] step:7441/10000 train_time:672971ms step_avg:90.44ms +[2025-08-22 10:23:43] [Rank 0] step:7461/10000 train_time:674868ms step_avg:90.45ms +[2025-08-22 10:23:43] [Rank 0] step:7461/10000 train_time:674868ms step_avg:90.45ms +[2025-08-22 10:23:45] [Rank 0] step:7481/10000 train_time:676775ms step_avg:90.47ms +[2025-08-22 10:23:45] [Rank 0] step:7481/10000 train_time:676775ms step_avg:90.47ms +[2025-08-22 10:23:47] [Rank 0] step:7501/10000 train_time:678677ms step_avg:90.48ms +[2025-08-22 10:23:47] [Rank 0] step:7501/10000 train_time:678677ms step_avg:90.48ms +[2025-08-22 10:23:49] [Rank 0] step:7521/10000 train_time:680583ms step_avg:90.49ms +[2025-08-22 10:23:49] [Rank 0] step:7521/10000 train_time:680583ms step_avg:90.49ms +[2025-08-22 10:23:51] [Rank 0] step:7541/10000 train_time:682495ms step_avg:90.50ms +[2025-08-22 10:23:51] [Rank 0] step:7541/10000 train_time:682495ms step_avg:90.50ms +[2025-08-22 10:23:53] [Rank 0] step:7561/10000 train_time:684389ms step_avg:90.52ms +[2025-08-22 10:23:53] [Rank 0] step:7561/10000 train_time:684389ms step_avg:90.52ms +[2025-08-22 10:23:55] [Rank 0] step:7581/10000 train_time:686358ms step_avg:90.54ms +[2025-08-22 10:23:55] [Rank 0] step:7581/10000 train_time:686358ms step_avg:90.54ms +[2025-08-22 10:23:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:23:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:24:10] [Rank 0] PRINT: step:7600/10000 val_loss:3.7152 svd_entropy: attn_qk:H=0.9002,top10E=0.08,eRank=396.4,q75/q25=14.58 attn_vo:H=0.8889,top10E=0.08,eRank=370.2,q75/q25=27.25 mlp_w1:H=0.8733,top10E=0.15,eRank=337.4,q75/q25=10.12 mlp_w2:H=0.9264,top10E=0.10,eRank=475.7,q75/q25=6.15 vo_prod:H=0.8144,top10E=0.16,eRank=228.8,q75/q25=482.38 train_time:688345ms step_avg:90.57ms +[2025-08-22 10:24:10] [Rank 0] PRINT: step:7600/10000 val_loss:3.7152 svd_entropy: attn_qk:H=0.9002,top10E=0.08,eRank=396.4,q75/q25=14.58 attn_vo:H=0.8889,top10E=0.08,eRank=370.2,q75/q25=27.25 mlp_w1:H=0.8733,top10E=0.15,eRank=337.4,q75/q25=10.12 mlp_w2:H=0.9264,top10E=0.10,eRank=475.7,q75/q25=6.15 vo_prod:H=0.8144,top10E=0.16,eRank=228.8,q75/q25=482.38 train_time:688345ms step_avg:90.57ms +[2025-08-22 10:24:10] [Rank 0] step:7601/10000 train_time:688363ms step_avg:90.56ms +[2025-08-22 10:24:10] [Rank 0] step:7601/10000 train_time:688363ms step_avg:90.56ms +[2025-08-22 10:24:12] [Rank 0] step:7621/10000 train_time:690249ms step_avg:90.57ms +[2025-08-22 10:24:12] [Rank 0] step:7621/10000 train_time:690249ms step_avg:90.57ms +[2025-08-22 10:24:14] [Rank 0] step:7641/10000 train_time:692144ms step_avg:90.58ms +[2025-08-22 10:24:14] [Rank 0] step:7641/10000 train_time:692144ms step_avg:90.58ms +[2025-08-22 10:24:16] [Rank 0] step:7661/10000 train_time:694043ms step_avg:90.59ms +[2025-08-22 10:24:16] [Rank 0] step:7661/10000 train_time:694043ms step_avg:90.59ms +[2025-08-22 10:24:18] [Rank 0] step:7681/10000 train_time:695938ms step_avg:90.61ms +[2025-08-22 10:24:18] [Rank 0] step:7681/10000 train_time:695938ms step_avg:90.61ms +[2025-08-22 10:24:20] [Rank 0] step:7701/10000 train_time:697833ms step_avg:90.62ms +[2025-08-22 10:24:20] [Rank 0] step:7701/10000 train_time:697833ms step_avg:90.62ms +[2025-08-22 10:24:22] [Rank 0] step:7721/10000 train_time:699745ms step_avg:90.63ms +[2025-08-22 10:24:22] [Rank 0] step:7721/10000 train_time:699745ms step_avg:90.63ms +[2025-08-22 10:24:24] [Rank 0] step:7741/10000 train_time:701644ms step_avg:90.64ms +[2025-08-22 10:24:24] [Rank 0] step:7741/10000 train_time:701644ms step_avg:90.64ms +[2025-08-22 10:24:26] [Rank 0] step:7761/10000 train_time:703549ms step_avg:90.65ms +[2025-08-22 10:24:26] [Rank 0] step:7761/10000 train_time:703549ms step_avg:90.65ms +[2025-08-22 10:24:28] [Rank 0] step:7781/10000 train_time:705448ms step_avg:90.66ms +[2025-08-22 10:24:28] [Rank 0] step:7781/10000 train_time:705448ms step_avg:90.66ms +[2025-08-22 10:24:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:24:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:24:43] [Rank 0] PRINT: step:7800/10000 val_loss:3.7024 svd_entropy: attn_qk:H=0.9003,top10E=0.08,eRank=396.7,q75/q25=14.62 attn_vo:H=0.8893,top10E=0.08,eRank=371.2,q75/q25=26.88 mlp_w1:H=0.8740,top10E=0.15,eRank=338.8,q75/q25=10.09 mlp_w2:H=0.9267,top10E=0.10,eRank=476.4,q75/q25=6.15 vo_prod:H=0.8151,top10E=0.16,eRank=229.8,q75/q25=463.24 train_time:707360ms step_avg:90.69ms +[2025-08-22 10:24:43] [Rank 0] PRINT: step:7800/10000 val_loss:3.7024 svd_entropy: attn_qk:H=0.9003,top10E=0.08,eRank=396.7,q75/q25=14.62 attn_vo:H=0.8893,top10E=0.08,eRank=371.2,q75/q25=26.88 mlp_w1:H=0.8740,top10E=0.15,eRank=338.8,q75/q25=10.09 mlp_w2:H=0.9267,top10E=0.10,eRank=476.4,q75/q25=6.15 vo_prod:H=0.8151,top10E=0.16,eRank=229.8,q75/q25=463.24 train_time:707360ms step_avg:90.69ms +[2025-08-22 10:24:43] [Rank 0] step:7801/10000 train_time:707379ms step_avg:90.68ms +[2025-08-22 10:24:43] [Rank 0] step:7801/10000 train_time:707379ms step_avg:90.68ms +[2025-08-22 10:24:45] [Rank 0] step:7821/10000 train_time:709274ms step_avg:90.69ms +[2025-08-22 10:24:45] [Rank 0] step:7821/10000 train_time:709274ms step_avg:90.69ms +[2025-08-22 10:24:47] [Rank 0] step:7841/10000 train_time:711165ms step_avg:90.70ms +[2025-08-22 10:24:47] [Rank 0] step:7841/10000 train_time:711165ms step_avg:90.70ms +[2025-08-22 10:24:49] [Rank 0] step:7861/10000 train_time:713067ms step_avg:90.71ms +[2025-08-22 10:24:49] [Rank 0] step:7861/10000 train_time:713067ms step_avg:90.71ms +[2025-08-22 10:24:51] [Rank 0] step:7881/10000 train_time:714972ms step_avg:90.72ms +[2025-08-22 10:24:51] [Rank 0] step:7881/10000 train_time:714972ms step_avg:90.72ms +[2025-08-22 10:24:53] [Rank 0] step:7901/10000 train_time:716865ms step_avg:90.73ms +[2025-08-22 10:24:53] [Rank 0] step:7901/10000 train_time:716865ms step_avg:90.73ms +[2025-08-22 10:24:55] [Rank 0] step:7921/10000 train_time:718767ms step_avg:90.74ms +[2025-08-22 10:24:55] [Rank 0] step:7921/10000 train_time:718767ms step_avg:90.74ms +[2025-08-22 10:24:57] [Rank 0] step:7941/10000 train_time:720860ms step_avg:90.78ms +[2025-08-22 10:24:57] [Rank 0] step:7941/10000 train_time:720860ms step_avg:90.78ms +[2025-08-22 10:24:59] [Rank 0] step:7961/10000 train_time:722732ms step_avg:90.78ms +[2025-08-22 10:24:59] [Rank 0] step:7961/10000 train_time:722732ms step_avg:90.78ms +[2025-08-22 10:25:00] [Rank 0] step:7981/10000 train_time:724626ms step_avg:90.79ms +[2025-08-22 10:25:00] [Rank 0] step:7981/10000 train_time:724626ms step_avg:90.79ms +[2025-08-22 10:25:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:25:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:25:16] [Rank 0] PRINT: step:8000/10000 val_loss:3.6803 svd_entropy: attn_qk:H=0.9004,top10E=0.08,eRank=396.9,q75/q25=14.55 attn_vo:H=0.8897,top10E=0.08,eRank=372.0,q75/q25=26.59 mlp_w1:H=0.8746,top10E=0.15,eRank=340.1,q75/q25=10.06 mlp_w2:H=0.9269,top10E=0.10,eRank=477.1,q75/q25=6.14 vo_prod:H=0.8158,top10E=0.15,eRank=230.8,q75/q25=450.47 train_time:726535ms step_avg:90.82ms +[2025-08-22 10:25:16] [Rank 0] PRINT: step:8000/10000 val_loss:3.6803 svd_entropy: attn_qk:H=0.9004,top10E=0.08,eRank=396.9,q75/q25=14.55 attn_vo:H=0.8897,top10E=0.08,eRank=372.0,q75/q25=26.59 mlp_w1:H=0.8746,top10E=0.15,eRank=340.1,q75/q25=10.06 mlp_w2:H=0.9269,top10E=0.10,eRank=477.1,q75/q25=6.14 vo_prod:H=0.8158,top10E=0.15,eRank=230.8,q75/q25=450.47 train_time:726535ms step_avg:90.82ms +[2025-08-22 10:25:16] [Rank 0] step:8001/10000 train_time:726553ms step_avg:90.81ms +[2025-08-22 10:25:16] [Rank 0] step:8001/10000 train_time:726553ms step_avg:90.81ms +[2025-08-22 10:25:18] [Rank 0] step:8021/10000 train_time:728450ms step_avg:90.82ms +[2025-08-22 10:25:18] [Rank 0] step:8021/10000 train_time:728450ms step_avg:90.82ms +[2025-08-22 10:25:20] [Rank 0] step:8041/10000 train_time:730356ms step_avg:90.83ms +[2025-08-22 10:25:20] [Rank 0] step:8041/10000 train_time:730356ms step_avg:90.83ms +[2025-08-22 10:25:22] [Rank 0] step:8061/10000 train_time:732260ms step_avg:90.84ms +[2025-08-22 10:25:22] [Rank 0] step:8061/10000 train_time:732260ms step_avg:90.84ms +[2025-08-22 10:25:24] [Rank 0] step:8081/10000 train_time:734151ms step_avg:90.85ms +[2025-08-22 10:25:24] [Rank 0] step:8081/10000 train_time:734151ms step_avg:90.85ms +[2025-08-22 10:25:26] [Rank 0] step:8101/10000 train_time:736059ms step_avg:90.86ms +[2025-08-22 10:25:26] [Rank 0] step:8101/10000 train_time:736059ms step_avg:90.86ms +[2025-08-22 10:25:28] [Rank 0] step:8121/10000 train_time:737961ms step_avg:90.87ms +[2025-08-22 10:25:28] [Rank 0] step:8121/10000 train_time:737961ms step_avg:90.87ms +[2025-08-22 10:25:30] [Rank 0] step:8141/10000 train_time:740523ms step_avg:90.96ms +[2025-08-22 10:25:30] [Rank 0] step:8141/10000 train_time:740523ms step_avg:90.96ms +[2025-08-22 10:25:32] [Rank 0] step:8161/10000 train_time:742443ms step_avg:90.97ms +[2025-08-22 10:25:32] [Rank 0] step:8161/10000 train_time:742443ms step_avg:90.97ms +[2025-08-22 10:25:34] [Rank 0] step:8181/10000 train_time:744377ms step_avg:90.99ms +[2025-08-22 10:25:34] [Rank 0] step:8181/10000 train_time:744377ms step_avg:90.99ms +[2025-08-22 10:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:25:50] [Rank 0] PRINT: step:8200/10000 val_loss:3.6685 svd_entropy: attn_qk:H=0.9004,top10E=0.08,eRank=397.0,q75/q25=14.49 attn_vo:H=0.8900,top10E=0.08,eRank=372.8,q75/q25=26.29 mlp_w1:H=0.8751,top10E=0.15,eRank=341.3,q75/q25=10.04 mlp_w2:H=0.9270,top10E=0.10,eRank=477.6,q75/q25=6.14 vo_prod:H=0.8164,top10E=0.15,eRank=231.6,q75/q25=438.39 train_time:746339ms step_avg:91.02ms +[2025-08-22 10:25:50] [Rank 0] PRINT: step:8200/10000 val_loss:3.6685 svd_entropy: attn_qk:H=0.9004,top10E=0.08,eRank=397.0,q75/q25=14.49 attn_vo:H=0.8900,top10E=0.08,eRank=372.8,q75/q25=26.29 mlp_w1:H=0.8751,top10E=0.15,eRank=341.3,q75/q25=10.04 mlp_w2:H=0.9270,top10E=0.10,eRank=477.6,q75/q25=6.14 vo_prod:H=0.8164,top10E=0.15,eRank=231.6,q75/q25=438.39 train_time:746339ms step_avg:91.02ms +[2025-08-22 10:25:50] [Rank 0] step:8201/10000 train_time:746358ms step_avg:91.01ms +[2025-08-22 10:25:50] [Rank 0] step:8201/10000 train_time:746358ms step_avg:91.01ms +[2025-08-22 10:25:52] [Rank 0] step:8221/10000 train_time:748284ms step_avg:91.02ms +[2025-08-22 10:25:52] [Rank 0] step:8221/10000 train_time:748284ms step_avg:91.02ms +[2025-08-22 10:25:54] [Rank 0] step:8241/10000 train_time:750217ms step_avg:91.03ms +[2025-08-22 10:25:54] [Rank 0] step:8241/10000 train_time:750217ms step_avg:91.03ms +[2025-08-22 10:25:56] [Rank 0] step:8261/10000 train_time:752150ms step_avg:91.05ms +[2025-08-22 10:25:56] [Rank 0] step:8261/10000 train_time:752150ms step_avg:91.05ms +[2025-08-22 10:25:57] [Rank 0] step:8281/10000 train_time:754073ms step_avg:91.06ms +[2025-08-22 10:25:57] [Rank 0] step:8281/10000 train_time:754073ms step_avg:91.06ms +[2025-08-22 10:25:59] [Rank 0] step:8301/10000 train_time:756000ms step_avg:91.07ms +[2025-08-22 10:25:59] [Rank 0] step:8301/10000 train_time:756000ms step_avg:91.07ms +[2025-08-22 10:26:01] [Rank 0] step:8321/10000 train_time:757989ms step_avg:91.09ms +[2025-08-22 10:26:01] [Rank 0] step:8321/10000 train_time:757989ms step_avg:91.09ms +[2025-08-22 10:26:03] [Rank 0] step:8341/10000 train_time:760006ms step_avg:91.12ms +[2025-08-22 10:26:03] [Rank 0] step:8341/10000 train_time:760006ms step_avg:91.12ms +[2025-08-22 10:26:05] [Rank 0] step:8361/10000 train_time:761934ms step_avg:91.13ms +[2025-08-22 10:26:05] [Rank 0] step:8361/10000 train_time:761934ms step_avg:91.13ms +[2025-08-22 10:26:07] [Rank 0] step:8381/10000 train_time:763860ms step_avg:91.14ms +[2025-08-22 10:26:07] [Rank 0] step:8381/10000 train_time:763860ms step_avg:91.14ms +[2025-08-22 10:26:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:26:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:26:23] [Rank 0] PRINT: step:8400/10000 val_loss:3.6567 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.1,q75/q25=14.47 attn_vo:H=0.8903,top10E=0.08,eRank=373.5,q75/q25=25.99 mlp_w1:H=0.8756,top10E=0.15,eRank=342.4,q75/q25=10.02 mlp_w2:H=0.9272,top10E=0.09,eRank=478.2,q75/q25=6.13 vo_prod:H=0.8169,top10E=0.15,eRank=232.3,q75/q25=427.13 train_time:765791ms step_avg:91.17ms +[2025-08-22 10:26:23] [Rank 0] PRINT: step:8400/10000 val_loss:3.6567 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.1,q75/q25=14.47 attn_vo:H=0.8903,top10E=0.08,eRank=373.5,q75/q25=25.99 mlp_w1:H=0.8756,top10E=0.15,eRank=342.4,q75/q25=10.02 mlp_w2:H=0.9272,top10E=0.09,eRank=478.2,q75/q25=6.13 vo_prod:H=0.8169,top10E=0.15,eRank=232.3,q75/q25=427.13 train_time:765791ms step_avg:91.17ms +[2025-08-22 10:26:23] [Rank 0] step:8401/10000 train_time:765809ms step_avg:91.16ms +[2025-08-22 10:26:23] [Rank 0] step:8401/10000 train_time:765809ms step_avg:91.16ms +[2025-08-22 10:26:25] [Rank 0] step:8421/10000 train_time:767725ms step_avg:91.17ms +[2025-08-22 10:26:25] [Rank 0] step:8421/10000 train_time:767725ms step_avg:91.17ms +[2025-08-22 10:26:27] [Rank 0] step:8441/10000 train_time:769647ms step_avg:91.18ms +[2025-08-22 10:26:27] [Rank 0] step:8441/10000 train_time:769647ms step_avg:91.18ms +[2025-08-22 10:26:29] [Rank 0] step:8461/10000 train_time:771567ms step_avg:91.19ms +[2025-08-22 10:26:29] [Rank 0] step:8461/10000 train_time:771567ms step_avg:91.19ms +[2025-08-22 10:26:30] [Rank 0] step:8481/10000 train_time:773495ms step_avg:91.20ms +[2025-08-22 10:26:30] [Rank 0] step:8481/10000 train_time:773495ms step_avg:91.20ms +[2025-08-22 10:26:32] [Rank 0] step:8501/10000 train_time:775443ms step_avg:91.22ms +[2025-08-22 10:26:32] [Rank 0] step:8501/10000 train_time:775443ms step_avg:91.22ms +[2025-08-22 10:26:34] [Rank 0] step:8521/10000 train_time:777372ms step_avg:91.23ms +[2025-08-22 10:26:34] [Rank 0] step:8521/10000 train_time:777372ms step_avg:91.23ms +[2025-08-22 10:26:36] [Rank 0] step:8541/10000 train_time:779312ms step_avg:91.24ms +[2025-08-22 10:26:36] [Rank 0] step:8541/10000 train_time:779312ms step_avg:91.24ms +[2025-08-22 10:26:38] [Rank 0] step:8561/10000 train_time:781243ms step_avg:91.26ms +[2025-08-22 10:26:38] [Rank 0] step:8561/10000 train_time:781243ms step_avg:91.26ms +[2025-08-22 10:26:40] [Rank 0] step:8581/10000 train_time:783170ms step_avg:91.27ms +[2025-08-22 10:26:40] [Rank 0] step:8581/10000 train_time:783170ms step_avg:91.27ms +[2025-08-22 10:26:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:26:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:26:56] [Rank 0] PRINT: step:8600/10000 val_loss:3.6432 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.2,q75/q25=14.46 attn_vo:H=0.8905,top10E=0.08,eRank=374.1,q75/q25=25.80 mlp_w1:H=0.8760,top10E=0.15,eRank=343.3,q75/q25=10.00 mlp_w2:H=0.9273,top10E=0.09,eRank=478.6,q75/q25=6.13 vo_prod:H=0.8173,top10E=0.15,eRank=233.0,q75/q25=417.35 train_time:785096ms step_avg:91.29ms +[2025-08-22 10:26:56] [Rank 0] PRINT: step:8600/10000 val_loss:3.6432 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.2,q75/q25=14.46 attn_vo:H=0.8905,top10E=0.08,eRank=374.1,q75/q25=25.80 mlp_w1:H=0.8760,top10E=0.15,eRank=343.3,q75/q25=10.00 mlp_w2:H=0.9273,top10E=0.09,eRank=478.6,q75/q25=6.13 vo_prod:H=0.8173,top10E=0.15,eRank=233.0,q75/q25=417.35 train_time:785096ms step_avg:91.29ms +[2025-08-22 10:26:56] [Rank 0] step:8601/10000 train_time:785115ms step_avg:91.28ms +[2025-08-22 10:26:56] [Rank 0] step:8601/10000 train_time:785115ms step_avg:91.28ms +[2025-08-22 10:26:58] [Rank 0] step:8621/10000 train_time:787031ms step_avg:91.29ms +[2025-08-22 10:26:58] [Rank 0] step:8621/10000 train_time:787031ms step_avg:91.29ms +[2025-08-22 10:27:00] [Rank 0] step:8641/10000 train_time:788952ms step_avg:91.30ms +[2025-08-22 10:27:00] [Rank 0] step:8641/10000 train_time:788952ms step_avg:91.30ms +[2025-08-22 10:27:02] [Rank 0] step:8661/10000 train_time:790876ms step_avg:91.31ms +[2025-08-22 10:27:02] [Rank 0] step:8661/10000 train_time:790876ms step_avg:91.31ms +[2025-08-22 10:27:04] [Rank 0] step:8681/10000 train_time:792881ms step_avg:91.34ms +[2025-08-22 10:27:04] [Rank 0] step:8681/10000 train_time:792881ms step_avg:91.34ms +[2025-08-22 10:27:06] [Rank 0] step:8701/10000 train_time:794850ms step_avg:91.35ms +[2025-08-22 10:27:06] [Rank 0] step:8701/10000 train_time:794850ms step_avg:91.35ms +[2025-08-22 10:27:08] [Rank 0] step:8721/10000 train_time:796782ms step_avg:91.36ms +[2025-08-22 10:27:08] [Rank 0] step:8721/10000 train_time:796782ms step_avg:91.36ms +[2025-08-22 10:27:09] [Rank 0] step:8741/10000 train_time:798699ms step_avg:91.37ms +[2025-08-22 10:27:09] [Rank 0] step:8741/10000 train_time:798699ms step_avg:91.37ms +[2025-08-22 10:27:11] [Rank 0] step:8761/10000 train_time:800626ms step_avg:91.39ms +[2025-08-22 10:27:11] [Rank 0] step:8761/10000 train_time:800626ms step_avg:91.39ms +[2025-08-22 10:27:13] [Rank 0] step:8781/10000 train_time:802557ms step_avg:91.40ms +[2025-08-22 10:27:13] [Rank 0] step:8781/10000 train_time:802557ms step_avg:91.40ms +[2025-08-22 10:27:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:27:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:27:29] [Rank 0] PRINT: step:8800/10000 val_loss:3.6297 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.2,q75/q25=14.45 attn_vo:H=0.8908,top10E=0.08,eRank=374.6,q75/q25=25.60 mlp_w1:H=0.8764,top10E=0.15,eRank=344.2,q75/q25=9.98 mlp_w2:H=0.9275,top10E=0.09,eRank=479.0,q75/q25=6.12 vo_prod:H=0.8178,top10E=0.15,eRank=233.6,q75/q25=407.29 train_time:804489ms step_avg:91.42ms +[2025-08-22 10:27:29] [Rank 0] PRINT: step:8800/10000 val_loss:3.6297 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.2,q75/q25=14.45 attn_vo:H=0.8908,top10E=0.08,eRank=374.6,q75/q25=25.60 mlp_w1:H=0.8764,top10E=0.15,eRank=344.2,q75/q25=9.98 mlp_w2:H=0.9275,top10E=0.09,eRank=479.0,q75/q25=6.12 vo_prod:H=0.8178,top10E=0.15,eRank=233.6,q75/q25=407.29 train_time:804489ms step_avg:91.42ms +[2025-08-22 10:27:29] [Rank 0] step:8801/10000 train_time:804509ms step_avg:91.41ms +[2025-08-22 10:27:29] [Rank 0] step:8801/10000 train_time:804509ms step_avg:91.41ms +[2025-08-22 10:27:31] [Rank 0] step:8821/10000 train_time:806448ms step_avg:91.42ms +[2025-08-22 10:27:31] [Rank 0] step:8821/10000 train_time:806448ms step_avg:91.42ms +[2025-08-22 10:27:33] [Rank 0] step:8841/10000 train_time:808396ms step_avg:91.44ms +[2025-08-22 10:27:33] [Rank 0] step:8841/10000 train_time:808396ms step_avg:91.44ms +[2025-08-22 10:27:35] [Rank 0] step:8861/10000 train_time:810319ms step_avg:91.45ms +[2025-08-22 10:27:35] [Rank 0] step:8861/10000 train_time:810319ms step_avg:91.45ms +[2025-08-22 10:27:37] [Rank 0] step:8881/10000 train_time:812249ms step_avg:91.46ms +[2025-08-22 10:27:37] [Rank 0] step:8881/10000 train_time:812249ms step_avg:91.46ms +[2025-08-22 10:27:39] [Rank 0] step:8901/10000 train_time:814194ms step_avg:91.47ms +[2025-08-22 10:27:39] [Rank 0] step:8901/10000 train_time:814194ms step_avg:91.47ms +[2025-08-22 10:27:41] [Rank 0] step:8921/10000 train_time:816140ms step_avg:91.49ms +[2025-08-22 10:27:41] [Rank 0] step:8921/10000 train_time:816140ms step_avg:91.49ms +[2025-08-22 10:27:43] [Rank 0] step:8941/10000 train_time:818076ms step_avg:91.50ms +[2025-08-22 10:27:43] [Rank 0] step:8941/10000 train_time:818076ms step_avg:91.50ms +[2025-08-22 10:27:45] [Rank 0] step:8961/10000 train_time:820006ms step_avg:91.51ms +[2025-08-22 10:27:45] [Rank 0] step:8961/10000 train_time:820006ms step_avg:91.51ms +[2025-08-22 10:27:46] [Rank 0] step:8981/10000 train_time:821934ms step_avg:91.52ms +[2025-08-22 10:27:46] [Rank 0] step:8981/10000 train_time:821934ms step_avg:91.52ms +[2025-08-22 10:27:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:27:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:28:02] [Rank 0] PRINT: step:9000/10000 val_loss:3.6179 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.43 attn_vo:H=0.8910,top10E=0.08,eRank=375.1,q75/q25=25.44 mlp_w1:H=0.8767,top10E=0.15,eRank=344.9,q75/q25=9.95 mlp_w2:H=0.9276,top10E=0.09,eRank=479.4,q75/q25=6.12 vo_prod:H=0.8181,top10E=0.15,eRank=234.1,q75/q25=400.67 train_time:823865ms step_avg:91.54ms +[2025-08-22 10:28:02] [Rank 0] PRINT: step:9000/10000 val_loss:3.6179 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.43 attn_vo:H=0.8910,top10E=0.08,eRank=375.1,q75/q25=25.44 mlp_w1:H=0.8767,top10E=0.15,eRank=344.9,q75/q25=9.95 mlp_w2:H=0.9276,top10E=0.09,eRank=479.4,q75/q25=6.12 vo_prod:H=0.8181,top10E=0.15,eRank=234.1,q75/q25=400.67 train_time:823865ms step_avg:91.54ms +[2025-08-22 10:28:02] [Rank 0] step:9001/10000 train_time:823884ms step_avg:91.53ms +[2025-08-22 10:28:02] [Rank 0] step:9001/10000 train_time:823884ms step_avg:91.53ms +[2025-08-22 10:28:04] [Rank 0] step:9021/10000 train_time:825822ms step_avg:91.54ms +[2025-08-22 10:28:04] [Rank 0] step:9021/10000 train_time:825822ms step_avg:91.54ms +[2025-08-22 10:28:06] [Rank 0] step:9041/10000 train_time:827750ms step_avg:91.56ms +[2025-08-22 10:28:06] [Rank 0] step:9041/10000 train_time:827750ms step_avg:91.56ms +[2025-08-22 10:28:08] [Rank 0] step:9061/10000 train_time:829763ms step_avg:91.58ms +[2025-08-22 10:28:08] [Rank 0] step:9061/10000 train_time:829763ms step_avg:91.58ms +[2025-08-22 10:28:10] [Rank 0] step:9081/10000 train_time:831783ms step_avg:91.60ms +[2025-08-22 10:28:10] [Rank 0] step:9081/10000 train_time:831783ms step_avg:91.60ms +[2025-08-22 10:28:12] [Rank 0] step:9101/10000 train_time:833725ms step_avg:91.61ms +[2025-08-22 10:28:12] [Rank 0] step:9101/10000 train_time:833725ms step_avg:91.61ms +[2025-08-22 10:28:14] [Rank 0] step:9121/10000 train_time:835658ms step_avg:91.62ms +[2025-08-22 10:28:14] [Rank 0] step:9121/10000 train_time:835658ms step_avg:91.62ms +[2025-08-22 10:28:16] [Rank 0] step:9141/10000 train_time:837575ms step_avg:91.63ms +[2025-08-22 10:28:16] [Rank 0] step:9141/10000 train_time:837575ms step_avg:91.63ms +[2025-08-22 10:28:18] [Rank 0] step:9161/10000 train_time:839496ms step_avg:91.64ms +[2025-08-22 10:28:18] [Rank 0] step:9161/10000 train_time:839496ms step_avg:91.64ms +[2025-08-22 10:28:19] [Rank 0] step:9181/10000 train_time:841459ms step_avg:91.65ms +[2025-08-22 10:28:19] [Rank 0] step:9181/10000 train_time:841459ms step_avg:91.65ms +[2025-08-22 10:28:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:28:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:28:35] [Rank 0] PRINT: step:9200/10000 val_loss:3.6097 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.44 attn_vo:H=0.8911,top10E=0.08,eRank=375.5,q75/q25=25.37 mlp_w1:H=0.8770,top10E=0.15,eRank=345.5,q75/q25=9.94 mlp_w2:H=0.9277,top10E=0.09,eRank=479.8,q75/q25=6.12 vo_prod:H=0.8184,top10E=0.15,eRank=234.5,q75/q25=394.31 train_time:843386ms step_avg:91.67ms +[2025-08-22 10:28:35] [Rank 0] PRINT: step:9200/10000 val_loss:3.6097 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.44 attn_vo:H=0.8911,top10E=0.08,eRank=375.5,q75/q25=25.37 mlp_w1:H=0.8770,top10E=0.15,eRank=345.5,q75/q25=9.94 mlp_w2:H=0.9277,top10E=0.09,eRank=479.8,q75/q25=6.12 vo_prod:H=0.8184,top10E=0.15,eRank=234.5,q75/q25=394.31 train_time:843386ms step_avg:91.67ms +[2025-08-22 10:28:35] [Rank 0] step:9201/10000 train_time:843405ms step_avg:91.66ms +[2025-08-22 10:28:35] [Rank 0] step:9201/10000 train_time:843405ms step_avg:91.66ms +[2025-08-22 10:28:37] [Rank 0] step:9221/10000 train_time:845355ms step_avg:91.68ms +[2025-08-22 10:28:37] [Rank 0] step:9221/10000 train_time:845355ms step_avg:91.68ms +[2025-08-22 10:28:39] [Rank 0] step:9241/10000 train_time:847291ms step_avg:91.69ms +[2025-08-22 10:28:39] [Rank 0] step:9241/10000 train_time:847291ms step_avg:91.69ms +[2025-08-22 10:28:41] [Rank 0] step:9261/10000 train_time:849226ms step_avg:91.70ms +[2025-08-22 10:28:41] [Rank 0] step:9261/10000 train_time:849226ms step_avg:91.70ms +[2025-08-22 10:28:43] [Rank 0] step:9281/10000 train_time:851145ms step_avg:91.71ms +[2025-08-22 10:28:43] [Rank 0] step:9281/10000 train_time:851145ms step_avg:91.71ms +[2025-08-22 10:28:45] [Rank 0] step:9301/10000 train_time:853068ms step_avg:91.72ms +[2025-08-22 10:28:45] [Rank 0] step:9301/10000 train_time:853068ms step_avg:91.72ms +[2025-08-22 10:28:47] [Rank 0] step:9321/10000 train_time:855000ms step_avg:91.73ms +[2025-08-22 10:28:47] [Rank 0] step:9321/10000 train_time:855000ms step_avg:91.73ms +[2025-08-22 10:28:49] [Rank 0] step:9341/10000 train_time:856931ms step_avg:91.74ms +[2025-08-22 10:28:49] [Rank 0] step:9341/10000 train_time:856931ms step_avg:91.74ms +[2025-08-22 10:28:51] [Rank 0] step:9361/10000 train_time:858867ms step_avg:91.75ms +[2025-08-22 10:28:51] [Rank 0] step:9361/10000 train_time:858867ms step_avg:91.75ms +[2025-08-22 10:28:53] [Rank 0] step:9381/10000 train_time:860810ms step_avg:91.76ms +[2025-08-22 10:28:53] [Rank 0] step:9381/10000 train_time:860810ms step_avg:91.76ms +[2025-08-22 10:28:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:28:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:29:08] [Rank 0] PRINT: step:9400/10000 val_loss:3.5993 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.41 attn_vo:H=0.8913,top10E=0.08,eRank=375.9,q75/q25=25.22 mlp_w1:H=0.8772,top10E=0.15,eRank=345.9,q75/q25=9.93 mlp_w2:H=0.9278,top10E=0.09,eRank=480.0,q75/q25=6.12 vo_prod:H=0.8187,top10E=0.15,eRank=234.9,q75/q25=391.86 train_time:862749ms step_avg:91.78ms +[2025-08-22 10:29:08] [Rank 0] PRINT: step:9400/10000 val_loss:3.5993 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.41 attn_vo:H=0.8913,top10E=0.08,eRank=375.9,q75/q25=25.22 mlp_w1:H=0.8772,top10E=0.15,eRank=345.9,q75/q25=9.93 mlp_w2:H=0.9278,top10E=0.09,eRank=480.0,q75/q25=6.12 vo_prod:H=0.8187,top10E=0.15,eRank=234.9,q75/q25=391.86 train_time:862749ms step_avg:91.78ms +[2025-08-22 10:29:08] [Rank 0] step:9401/10000 train_time:862769ms step_avg:91.77ms +[2025-08-22 10:29:08] [Rank 0] step:9401/10000 train_time:862769ms step_avg:91.77ms +[2025-08-22 10:29:10] [Rank 0] step:9421/10000 train_time:864773ms step_avg:91.79ms +[2025-08-22 10:29:10] [Rank 0] step:9421/10000 train_time:864773ms step_avg:91.79ms +[2025-08-22 10:29:12] [Rank 0] step:9441/10000 train_time:866782ms step_avg:91.81ms +[2025-08-22 10:29:12] [Rank 0] step:9441/10000 train_time:866782ms step_avg:91.81ms +[2025-08-22 10:29:14] [Rank 0] step:9461/10000 train_time:868716ms step_avg:91.82ms +[2025-08-22 10:29:14] [Rank 0] step:9461/10000 train_time:868716ms step_avg:91.82ms +[2025-08-22 10:29:16] [Rank 0] step:9481/10000 train_time:870649ms step_avg:91.83ms +[2025-08-22 10:29:16] [Rank 0] step:9481/10000 train_time:870649ms step_avg:91.83ms +[2025-08-22 10:29:18] [Rank 0] step:9501/10000 train_time:872589ms step_avg:91.84ms +[2025-08-22 10:29:18] [Rank 0] step:9501/10000 train_time:872589ms step_avg:91.84ms +[2025-08-22 10:29:20] [Rank 0] step:9521/10000 train_time:874510ms step_avg:91.85ms +[2025-08-22 10:29:20] [Rank 0] step:9521/10000 train_time:874510ms step_avg:91.85ms +[2025-08-22 10:29:22] [Rank 0] step:9541/10000 train_time:876443ms step_avg:91.86ms +[2025-08-22 10:29:22] [Rank 0] step:9541/10000 train_time:876443ms step_avg:91.86ms +[2025-08-22 10:29:24] [Rank 0] step:9561/10000 train_time:878367ms step_avg:91.87ms +[2025-08-22 10:29:24] [Rank 0] step:9561/10000 train_time:878367ms step_avg:91.87ms +[2025-08-22 10:29:26] [Rank 0] step:9581/10000 train_time:880299ms step_avg:91.88ms +[2025-08-22 10:29:26] [Rank 0] step:9581/10000 train_time:880299ms step_avg:91.88ms +[2025-08-22 10:29:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:29:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:29:41] [Rank 0] PRINT: step:9600/10000 val_loss:3.5896 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.41 attn_vo:H=0.8914,top10E=0.08,eRank=376.2,q75/q25=25.13 mlp_w1:H=0.8773,top10E=0.14,eRank=346.3,q75/q25=9.93 mlp_w2:H=0.9279,top10E=0.09,eRank=480.3,q75/q25=6.12 vo_prod:H=0.8189,top10E=0.15,eRank=235.2,q75/q25=388.88 train_time:882248ms step_avg:91.90ms +[2025-08-22 10:29:41] [Rank 0] PRINT: step:9600/10000 val_loss:3.5896 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.41 attn_vo:H=0.8914,top10E=0.08,eRank=376.2,q75/q25=25.13 mlp_w1:H=0.8773,top10E=0.14,eRank=346.3,q75/q25=9.93 mlp_w2:H=0.9279,top10E=0.09,eRank=480.3,q75/q25=6.12 vo_prod:H=0.8189,top10E=0.15,eRank=235.2,q75/q25=388.88 train_time:882248ms step_avg:91.90ms +[2025-08-22 10:29:42] [Rank 0] step:9601/10000 train_time:882267ms step_avg:91.89ms +[2025-08-22 10:29:42] [Rank 0] step:9601/10000 train_time:882267ms step_avg:91.89ms +[2025-08-22 10:29:43] [Rank 0] step:9621/10000 train_time:884215ms step_avg:91.90ms +[2025-08-22 10:29:43] [Rank 0] step:9621/10000 train_time:884215ms step_avg:91.90ms +[2025-08-22 10:29:45] [Rank 0] step:9641/10000 train_time:886161ms step_avg:91.92ms +[2025-08-22 10:29:45] [Rank 0] step:9641/10000 train_time:886161ms step_avg:91.92ms +[2025-08-22 10:29:47] [Rank 0] step:9661/10000 train_time:888126ms step_avg:91.93ms +[2025-08-22 10:29:47] [Rank 0] step:9661/10000 train_time:888126ms step_avg:91.93ms +[2025-08-22 10:29:49] [Rank 0] step:9681/10000 train_time:890080ms step_avg:91.94ms +[2025-08-22 10:29:49] [Rank 0] step:9681/10000 train_time:890080ms step_avg:91.94ms +[2025-08-22 10:29:51] [Rank 0] step:9701/10000 train_time:892050ms step_avg:91.95ms +[2025-08-22 10:29:51] [Rank 0] step:9701/10000 train_time:892050ms step_avg:91.95ms +[2025-08-22 10:29:53] [Rank 0] step:9721/10000 train_time:894008ms step_avg:91.97ms +[2025-08-22 10:29:53] [Rank 0] step:9721/10000 train_time:894008ms step_avg:91.97ms +[2025-08-22 10:29:55] [Rank 0] step:9741/10000 train_time:895983ms step_avg:91.98ms +[2025-08-22 10:29:55] [Rank 0] step:9741/10000 train_time:895983ms step_avg:91.98ms +[2025-08-22 10:29:57] [Rank 0] step:9761/10000 train_time:897946ms step_avg:91.99ms +[2025-08-22 10:29:57] [Rank 0] step:9761/10000 train_time:897946ms step_avg:91.99ms +[2025-08-22 10:29:59] [Rank 0] step:9781/10000 train_time:899915ms step_avg:92.01ms +[2025-08-22 10:29:59] [Rank 0] step:9781/10000 train_time:899915ms step_avg:92.01ms +[2025-08-22 10:30:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:30:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:30:15] [Rank 0] PRINT: step:9800/10000 val_loss:3.5808 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.41 attn_vo:H=0.8915,top10E=0.08,eRank=376.4,q75/q25=25.06 mlp_w1:H=0.8775,top10E=0.14,eRank=346.7,q75/q25=9.92 mlp_w2:H=0.9279,top10E=0.09,eRank=480.5,q75/q25=6.12 vo_prod:H=0.8191,top10E=0.15,eRank=235.5,q75/q25=385.27 train_time:901898ms step_avg:92.03ms +[2025-08-22 10:30:15] [Rank 0] PRINT: step:9800/10000 val_loss:3.5808 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.41 attn_vo:H=0.8915,top10E=0.08,eRank=376.4,q75/q25=25.06 mlp_w1:H=0.8775,top10E=0.14,eRank=346.7,q75/q25=9.92 mlp_w2:H=0.9279,top10E=0.09,eRank=480.5,q75/q25=6.12 vo_prod:H=0.8191,top10E=0.15,eRank=235.5,q75/q25=385.27 train_time:901898ms step_avg:92.03ms +[2025-08-22 10:30:15] [Rank 0] step:9801/10000 train_time:901917ms step_avg:92.02ms +[2025-08-22 10:30:15] [Rank 0] step:9801/10000 train_time:901917ms step_avg:92.02ms +[2025-08-22 10:30:17] [Rank 0] step:9821/10000 train_time:903866ms step_avg:92.03ms +[2025-08-22 10:30:17] [Rank 0] step:9821/10000 train_time:903866ms step_avg:92.03ms +[2025-08-22 10:30:19] [Rank 0] step:9841/10000 train_time:905825ms step_avg:92.05ms +[2025-08-22 10:30:19] [Rank 0] step:9841/10000 train_time:905825ms step_avg:92.05ms +[2025-08-22 10:30:21] [Rank 0] step:9861/10000 train_time:907768ms step_avg:92.06ms +[2025-08-22 10:30:21] [Rank 0] step:9861/10000 train_time:907768ms step_avg:92.06ms +[2025-08-22 10:30:23] [Rank 0] step:9881/10000 train_time:909713ms step_avg:92.07ms +[2025-08-22 10:30:23] [Rank 0] step:9881/10000 train_time:909713ms step_avg:92.07ms +[2025-08-22 10:30:25] [Rank 0] step:9901/10000 train_time:911677ms step_avg:92.08ms +[2025-08-22 10:30:25] [Rank 0] step:9901/10000 train_time:911677ms step_avg:92.08ms +[2025-08-22 10:30:26] [Rank 0] step:9921/10000 train_time:913626ms step_avg:92.09ms +[2025-08-22 10:30:26] [Rank 0] step:9921/10000 train_time:913626ms step_avg:92.09ms +[2025-08-22 10:30:28] [Rank 0] step:9941/10000 train_time:915590ms step_avg:92.10ms +[2025-08-22 10:30:28] [Rank 0] step:9941/10000 train_time:915590ms step_avg:92.10ms +[2025-08-22 10:30:30] [Rank 0] step:9961/10000 train_time:917537ms step_avg:92.11ms +[2025-08-22 10:30:30] [Rank 0] step:9961/10000 train_time:917537ms step_avg:92.11ms +[2025-08-22 10:30:32] [Rank 0] step:9981/10000 train_time:919494ms step_avg:92.12ms +[2025-08-22 10:30:32] [Rank 0] step:9981/10000 train_time:919494ms step_avg:92.12ms +[2025-08-22 10:30:34] [Rank 0] step:10000/10000 train_time:921361ms step_avg:92.14ms +[2025-08-22 10:30:34] [Rank 0] step:10000/10000 train_time:921361ms step_avg:92.14ms +[2025-08-22 10:30:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:30:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:30:48] [Rank 0] PRINT: step:10000/10000 val_loss:3.5726 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.40 attn_vo:H=0.8915,top10E=0.08,eRank=376.5,q75/q25=25.02 mlp_w1:H=0.8776,top10E=0.14,eRank=346.9,q75/q25=9.92 mlp_w2:H=0.9280,top10E=0.09,eRank=480.6,q75/q25=6.12 vo_prod:H=0.8192,top10E=0.15,eRank=235.6,q75/q25=382.50 train_time:921466ms step_avg:92.15ms +[2025-08-22 10:30:48] [Rank 0] PRINT: step:10000/10000 val_loss:3.5726 svd_entropy: attn_qk:H=0.9005,top10E=0.08,eRank=397.3,q75/q25=14.40 attn_vo:H=0.8915,top10E=0.08,eRank=376.5,q75/q25=25.02 mlp_w1:H=0.8776,top10E=0.14,eRank=346.9,q75/q25=9.92 mlp_w2:H=0.9280,top10E=0.09,eRank=480.6,q75/q25=6.12 vo_prod:H=0.8192,top10E=0.15,eRank=235.6,q75/q25=382.50 train_time:921466ms step_avg:92.15ms +[2025-08-22 10:30:48] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 10:30:48 2025 --- +[2025-08-22 10:30:48] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 10:30:48 2025 --- +[2025-08-22 10:30:48] [Rank 0] PRINT: Peak memory allocated: 11485 MiB reserved: 15976 MiB +[2025-08-22 10:30:48] [Rank 0] PRINT: Peak memory allocated: 11485 MiB reserved: 15976 MiB diff --git a/logs_svd_gated/mode_3_param_gated_seed_42/config.json b/logs_svd_gated/mode_3_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..d07292e6f2ee19bb30f4f683d7a4bc4592f844fd --- /dev/null +++ b/logs_svd_gated/mode_3_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 3, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "052b2a05-897d-4285-a5b3-ea01f6368a43", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_3_param_gated_seed_42/training_log_052b2a05-897d-4285-a5b3-ea01f6368a43.txt b/logs_svd_gated/mode_3_param_gated_seed_42/training_log_052b2a05-897d-4285-a5b3-ea01f6368a43.txt new file mode 100644 index 0000000000000000000000000000000000000000..d12ab04bb0287d13e9cd240be4a7dfe7f581d3cd --- /dev/null +++ b/logs_svd_gated/mode_3_param_gated_seed_42/training_log_052b2a05-897d-4285-a5b3-ea01f6368a43.txt @@ -0,0 +1,2926 @@ +[2025-08-22 15:16:51] [Rank 0] PRINT: --- Script Start: Fri Aug 22 15:16:51 2025 --- +[2025-08-22 15:16:51] [Rank 0] PRINT: --- Script Start: Fri Aug 22 15:16:51 2025 --- +[2025-08-22 15:16:51] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=3, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 15:16:51] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=3, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 15:16:51] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 15:16:51] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 15:16:51] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 15:16:51] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 15:16:51] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_3_param_gated_seed_42 +[2025-08-22 15:16:51] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_3_param_gated_seed_42 +[2025-08-22 15:16:51] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 15:16:51] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 15:16:51] [Rank 0] PRINT: Constructing model... +[2025-08-22 15:16:51] [Rank 0] PRINT: Constructing model... +[2025-08-22 15:16:53] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 15:16:53] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 15:16:53] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 15:16:53] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 15:16:53] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 15:16:53] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 15:16:53] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-08-22 15:16:53] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-08-22 15:16:53] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.05). +[2025-08-22 15:16:53] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.05). +[2025-08-22 15:16:53] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 15:16:53] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 15:16:53] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-08-22 15:16:53] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-08-22 15:16:53] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 15:16:53] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 15:16:53] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 15:16:53] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 15:16:53] [Rank 0] PRINT: Starting warmup... +[2025-08-22 15:16:53] [Rank 0] PRINT: Starting warmup... +[2025-08-22 15:17:37] [Rank 0] PRINT: Warmup complete. +[2025-08-22 15:17:37] [Rank 0] PRINT: Warmup complete. +[2025-08-22 15:17:37] [Rank 0] PRINT: Starting training... +[2025-08-22 15:17:37] [Rank 0] PRINT: Starting training... +[2025-08-22 15:17:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:17:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:17:55] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 15:17:55] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 15:17:56] [Rank 0] step:21/10000 train_time:1752ms step_avg:83.43ms +[2025-08-22 15:17:56] [Rank 0] step:21/10000 train_time:1752ms step_avg:83.43ms +[2025-08-22 15:17:58] [Rank 0] step:41/10000 train_time:3461ms step_avg:84.41ms +[2025-08-22 15:17:58] [Rank 0] step:41/10000 train_time:3461ms step_avg:84.41ms +[2025-08-22 15:18:00] [Rank 0] step:61/10000 train_time:5273ms step_avg:86.45ms +[2025-08-22 15:18:00] [Rank 0] step:61/10000 train_time:5273ms step_avg:86.45ms +[2025-08-22 15:18:02] [Rank 0] step:81/10000 train_time:7070ms step_avg:87.28ms +[2025-08-22 15:18:02] [Rank 0] step:81/10000 train_time:7070ms step_avg:87.28ms +[2025-08-22 15:18:04] [Rank 0] step:101/10000 train_time:8787ms step_avg:87.00ms +[2025-08-22 15:18:04] [Rank 0] step:101/10000 train_time:8787ms step_avg:87.00ms +[2025-08-22 15:18:05] [Rank 0] step:121/10000 train_time:10507ms step_avg:86.83ms +[2025-08-22 15:18:05] [Rank 0] step:121/10000 train_time:10507ms step_avg:86.83ms +[2025-08-22 15:18:07] [Rank 0] step:141/10000 train_time:12225ms step_avg:86.70ms +[2025-08-22 15:18:07] [Rank 0] step:141/10000 train_time:12225ms step_avg:86.70ms +[2025-08-22 15:18:09] [Rank 0] step:161/10000 train_time:13987ms step_avg:86.88ms +[2025-08-22 15:18:09] [Rank 0] step:161/10000 train_time:13987ms step_avg:86.88ms +[2025-08-22 15:18:10] [Rank 0] step:181/10000 train_time:15707ms step_avg:86.78ms +[2025-08-22 15:18:10] [Rank 0] step:181/10000 train_time:15707ms step_avg:86.78ms +[2025-08-22 15:18:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:18:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:18:26] [Rank 0] PRINT: step:200/10000 val_loss:6.3696 svd_entropy: attn_qk:H=0.8291,top10E=0.18,eRank=253.4,q75/q25=12.96 attn_vo:H=0.7549,top10E=0.25,eRank=164.8,q75/q25=19.39 mlp_w1:H=0.3968,top10E=0.82,eRank=19.3,q75/q25=5.10 mlp_w2:H=0.4203,top10E=0.79,eRank=18.1,q75/q25=7.13 vo_prod:H=0.5406,top10E=0.58,eRank=45.4,q75/q25=162.50 train_time:17427ms step_avg:87.14ms +[2025-08-22 15:18:26] [Rank 0] PRINT: step:200/10000 val_loss:6.3696 svd_entropy: attn_qk:H=0.8291,top10E=0.18,eRank=253.4,q75/q25=12.96 attn_vo:H=0.7549,top10E=0.25,eRank=164.8,q75/q25=19.39 mlp_w1:H=0.3968,top10E=0.82,eRank=19.3,q75/q25=5.10 mlp_w2:H=0.4203,top10E=0.79,eRank=18.1,q75/q25=7.13 vo_prod:H=0.5406,top10E=0.58,eRank=45.4,q75/q25=162.50 train_time:17427ms step_avg:87.14ms +[2025-08-22 15:18:26] [Rank 0] step:201/10000 train_time:17446ms step_avg:86.79ms +[2025-08-22 15:18:26] [Rank 0] step:201/10000 train_time:17446ms step_avg:86.79ms +[2025-08-22 15:18:28] [Rank 0] step:221/10000 train_time:19153ms step_avg:86.67ms +[2025-08-22 15:18:28] [Rank 0] step:221/10000 train_time:19153ms step_avg:86.67ms +[2025-08-22 15:18:30] [Rank 0] step:241/10000 train_time:20866ms step_avg:86.58ms +[2025-08-22 15:18:30] [Rank 0] step:241/10000 train_time:20866ms step_avg:86.58ms +[2025-08-22 15:18:31] [Rank 0] step:261/10000 train_time:22577ms step_avg:86.50ms +[2025-08-22 15:18:31] [Rank 0] step:261/10000 train_time:22577ms step_avg:86.50ms +[2025-08-22 15:18:33] [Rank 0] step:281/10000 train_time:24289ms step_avg:86.44ms +[2025-08-22 15:18:33] [Rank 0] step:281/10000 train_time:24289ms step_avg:86.44ms +[2025-08-22 15:18:35] [Rank 0] step:301/10000 train_time:26000ms step_avg:86.38ms +[2025-08-22 15:18:35] [Rank 0] step:301/10000 train_time:26000ms step_avg:86.38ms +[2025-08-22 15:18:36] [Rank 0] step:321/10000 train_time:27712ms step_avg:86.33ms +[2025-08-22 15:18:36] [Rank 0] step:321/10000 train_time:27712ms step_avg:86.33ms +[2025-08-22 15:18:38] [Rank 0] step:341/10000 train_time:29423ms step_avg:86.29ms +[2025-08-22 15:18:38] [Rank 0] step:341/10000 train_time:29423ms step_avg:86.29ms +[2025-08-22 15:18:40] [Rank 0] step:361/10000 train_time:31135ms step_avg:86.25ms +[2025-08-22 15:18:40] [Rank 0] step:361/10000 train_time:31135ms step_avg:86.25ms +[2025-08-22 15:18:42] [Rank 0] step:381/10000 train_time:32847ms step_avg:86.21ms +[2025-08-22 15:18:42] [Rank 0] step:381/10000 train_time:32847ms step_avg:86.21ms +[2025-08-22 15:18:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:18:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:18:57] [Rank 0] PRINT: step:400/10000 val_loss:5.7559 svd_entropy: attn_qk:H=0.8113,top10E=0.17,eRank=227.2,q75/q25=24.44 attn_vo:H=0.6975,top10E=0.27,eRank=121.7,q75/q25=21.90 mlp_w1:H=0.5966,top10E=0.53,eRank=60.0,q75/q25=6.93 mlp_w2:H=0.6290,top10E=0.49,eRank=69.0,q75/q25=7.68 vo_prod:H=0.5542,top10E=0.52,eRank=52.2,q75/q25=222.32 train_time:34561ms step_avg:86.40ms +[2025-08-22 15:18:57] [Rank 0] PRINT: step:400/10000 val_loss:5.7559 svd_entropy: attn_qk:H=0.8113,top10E=0.17,eRank=227.2,q75/q25=24.44 attn_vo:H=0.6975,top10E=0.27,eRank=121.7,q75/q25=21.90 mlp_w1:H=0.5966,top10E=0.53,eRank=60.0,q75/q25=6.93 mlp_w2:H=0.6290,top10E=0.49,eRank=69.0,q75/q25=7.68 vo_prod:H=0.5542,top10E=0.52,eRank=52.2,q75/q25=222.32 train_time:34561ms step_avg:86.40ms +[2025-08-22 15:18:57] [Rank 0] step:401/10000 train_time:34578ms step_avg:86.23ms +[2025-08-22 15:18:57] [Rank 0] step:401/10000 train_time:34578ms step_avg:86.23ms +[2025-08-22 15:18:59] [Rank 0] step:421/10000 train_time:36290ms step_avg:86.20ms +[2025-08-22 15:18:59] [Rank 0] step:421/10000 train_time:36290ms step_avg:86.20ms +[2025-08-22 15:19:01] [Rank 0] step:441/10000 train_time:37995ms step_avg:86.16ms +[2025-08-22 15:19:01] [Rank 0] step:441/10000 train_time:37995ms step_avg:86.16ms +[2025-08-22 15:19:02] [Rank 0] step:461/10000 train_time:39756ms step_avg:86.24ms +[2025-08-22 15:19:02] [Rank 0] step:461/10000 train_time:39756ms step_avg:86.24ms +[2025-08-22 15:19:04] [Rank 0] step:481/10000 train_time:41556ms step_avg:86.39ms +[2025-08-22 15:19:04] [Rank 0] step:481/10000 train_time:41556ms step_avg:86.39ms +[2025-08-22 15:19:06] [Rank 0] step:501/10000 train_time:43263ms step_avg:86.35ms +[2025-08-22 15:19:06] [Rank 0] step:501/10000 train_time:43263ms step_avg:86.35ms +[2025-08-22 15:19:08] [Rank 0] step:521/10000 train_time:44971ms step_avg:86.32ms +[2025-08-22 15:19:08] [Rank 0] step:521/10000 train_time:44971ms step_avg:86.32ms +[2025-08-22 15:19:09] [Rank 0] step:541/10000 train_time:46680ms step_avg:86.28ms +[2025-08-22 15:19:09] [Rank 0] step:541/10000 train_time:46680ms step_avg:86.28ms +[2025-08-22 15:19:11] [Rank 0] step:561/10000 train_time:48388ms step_avg:86.25ms +[2025-08-22 15:19:11] [Rank 0] step:561/10000 train_time:48388ms step_avg:86.25ms +[2025-08-22 15:19:13] [Rank 0] step:581/10000 train_time:50097ms step_avg:86.22ms +[2025-08-22 15:19:13] [Rank 0] step:581/10000 train_time:50097ms step_avg:86.22ms +[2025-08-22 15:19:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:19:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:19:28] [Rank 0] PRINT: step:600/10000 val_loss:5.4291 svd_entropy: attn_qk:H=0.8140,top10E=0.16,eRank=229.8,q75/q25=44.39 attn_vo:H=0.6982,top10E=0.25,eRank=121.3,q75/q25=28.14 mlp_w1:H=0.6780,top10E=0.39,eRank=98.4,q75/q25=6.26 mlp_w2:H=0.7474,top10E=0.30,eRank=146.2,q75/q25=8.01 vo_prod:H=0.5790,top10E=0.46,eRank=59.1,q75/q25=445.24 train_time:51810ms step_avg:86.35ms +[2025-08-22 15:19:28] [Rank 0] PRINT: step:600/10000 val_loss:5.4291 svd_entropy: attn_qk:H=0.8140,top10E=0.16,eRank=229.8,q75/q25=44.39 attn_vo:H=0.6982,top10E=0.25,eRank=121.3,q75/q25=28.14 mlp_w1:H=0.6780,top10E=0.39,eRank=98.4,q75/q25=6.26 mlp_w2:H=0.7474,top10E=0.30,eRank=146.2,q75/q25=8.01 vo_prod:H=0.5790,top10E=0.46,eRank=59.1,q75/q25=445.24 train_time:51810ms step_avg:86.35ms +[2025-08-22 15:19:28] [Rank 0] step:601/10000 train_time:51829ms step_avg:86.24ms +[2025-08-22 15:19:28] [Rank 0] step:601/10000 train_time:51829ms step_avg:86.24ms +[2025-08-22 15:19:30] [Rank 0] step:621/10000 train_time:53542ms step_avg:86.22ms +[2025-08-22 15:19:30] [Rank 0] step:621/10000 train_time:53542ms step_avg:86.22ms +[2025-08-22 15:19:32] [Rank 0] step:641/10000 train_time:55250ms step_avg:86.19ms +[2025-08-22 15:19:32] [Rank 0] step:641/10000 train_time:55250ms step_avg:86.19ms +[2025-08-22 15:19:33] [Rank 0] step:661/10000 train_time:56959ms step_avg:86.17ms +[2025-08-22 15:19:33] [Rank 0] step:661/10000 train_time:56959ms step_avg:86.17ms +[2025-08-22 15:19:35] [Rank 0] step:681/10000 train_time:58668ms step_avg:86.15ms +[2025-08-22 15:19:35] [Rank 0] step:681/10000 train_time:58668ms step_avg:86.15ms +[2025-08-22 15:19:37] [Rank 0] step:701/10000 train_time:60378ms step_avg:86.13ms +[2025-08-22 15:19:37] [Rank 0] step:701/10000 train_time:60378ms step_avg:86.13ms +[2025-08-22 15:19:38] [Rank 0] step:721/10000 train_time:62087ms step_avg:86.11ms +[2025-08-22 15:19:38] [Rank 0] step:721/10000 train_time:62087ms step_avg:86.11ms +[2025-08-22 15:19:40] [Rank 0] step:741/10000 train_time:63798ms step_avg:86.10ms +[2025-08-22 15:19:40] [Rank 0] step:741/10000 train_time:63798ms step_avg:86.10ms +[2025-08-22 15:19:42] [Rank 0] step:761/10000 train_time:65522ms step_avg:86.10ms +[2025-08-22 15:19:42] [Rank 0] step:761/10000 train_time:65522ms step_avg:86.10ms +[2025-08-22 15:19:44] [Rank 0] step:781/10000 train_time:67247ms step_avg:86.10ms +[2025-08-22 15:19:44] [Rank 0] step:781/10000 train_time:67247ms step_avg:86.10ms +[2025-08-22 15:19:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:19:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:19:59] [Rank 0] PRINT: step:800/10000 val_loss:5.1785 svd_entropy: attn_qk:H=0.8245,top10E=0.15,eRank=244.2,q75/q25=64.39 attn_vo:H=0.7185,top10E=0.22,eRank=134.5,q75/q25=49.55 mlp_w1:H=0.7114,top10E=0.33,eRank=121.8,q75/q25=6.90 mlp_w2:H=0.7960,top10E=0.23,eRank=199.8,q75/q25=8.82 vo_prod:H=0.6053,top10E=0.41,eRank=67.1,q75/q25=1885.41 train_time:68975ms step_avg:86.22ms +[2025-08-22 15:19:59] [Rank 0] PRINT: step:800/10000 val_loss:5.1785 svd_entropy: attn_qk:H=0.8245,top10E=0.15,eRank=244.2,q75/q25=64.39 attn_vo:H=0.7185,top10E=0.22,eRank=134.5,q75/q25=49.55 mlp_w1:H=0.7114,top10E=0.33,eRank=121.8,q75/q25=6.90 mlp_w2:H=0.7960,top10E=0.23,eRank=199.8,q75/q25=8.82 vo_prod:H=0.6053,top10E=0.41,eRank=67.1,q75/q25=1885.41 train_time:68975ms step_avg:86.22ms +[2025-08-22 15:19:59] [Rank 0] step:801/10000 train_time:68994ms step_avg:86.13ms +[2025-08-22 15:19:59] [Rank 0] step:801/10000 train_time:68994ms step_avg:86.13ms +[2025-08-22 15:20:01] [Rank 0] step:821/10000 train_time:70727ms step_avg:86.15ms +[2025-08-22 15:20:01] [Rank 0] step:821/10000 train_time:70727ms step_avg:86.15ms +[2025-08-22 15:20:03] [Rank 0] step:841/10000 train_time:72456ms step_avg:86.15ms +[2025-08-22 15:20:03] [Rank 0] step:841/10000 train_time:72456ms step_avg:86.15ms +[2025-08-22 15:20:04] [Rank 0] step:861/10000 train_time:74176ms step_avg:86.15ms +[2025-08-22 15:20:04] [Rank 0] step:861/10000 train_time:74176ms step_avg:86.15ms +[2025-08-22 15:20:06] [Rank 0] step:881/10000 train_time:75992ms step_avg:86.26ms +[2025-08-22 15:20:06] [Rank 0] step:881/10000 train_time:75992ms step_avg:86.26ms +[2025-08-22 15:20:08] [Rank 0] step:901/10000 train_time:77814ms step_avg:86.36ms +[2025-08-22 15:20:08] [Rank 0] step:901/10000 train_time:77814ms step_avg:86.36ms +[2025-08-22 15:20:10] [Rank 0] step:921/10000 train_time:79534ms step_avg:86.36ms +[2025-08-22 15:20:10] [Rank 0] step:921/10000 train_time:79534ms step_avg:86.36ms +[2025-08-22 15:20:11] [Rank 0] step:941/10000 train_time:81262ms step_avg:86.36ms +[2025-08-22 15:20:11] [Rank 0] step:941/10000 train_time:81262ms step_avg:86.36ms +[2025-08-22 15:20:13] [Rank 0] step:961/10000 train_time:82982ms step_avg:86.35ms +[2025-08-22 15:20:13] [Rank 0] step:961/10000 train_time:82982ms step_avg:86.35ms +[2025-08-22 15:20:15] [Rank 0] step:981/10000 train_time:84703ms step_avg:86.34ms +[2025-08-22 15:20:15] [Rank 0] step:981/10000 train_time:84703ms step_avg:86.34ms +[2025-08-22 15:20:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:20:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:20:30] [Rank 0] PRINT: step:1000/10000 val_loss:5.0242 svd_entropy: attn_qk:H=0.8356,top10E=0.14,eRank=261.3,q75/q25=74.65 attn_vo:H=0.7417,top10E=0.20,eRank=152.2,q75/q25=90.33 mlp_w1:H=0.7327,top10E=0.30,eRank=139.4,q75/q25=7.57 mlp_w2:H=0.8232,top10E=0.20,eRank=238.5,q75/q25=9.37 vo_prod:H=0.6282,top10E=0.36,eRank=75.3,q75/q25=7353.55 train_time:86427ms step_avg:86.43ms +[2025-08-22 15:20:30] [Rank 0] PRINT: step:1000/10000 val_loss:5.0242 svd_entropy: attn_qk:H=0.8356,top10E=0.14,eRank=261.3,q75/q25=74.65 attn_vo:H=0.7417,top10E=0.20,eRank=152.2,q75/q25=90.33 mlp_w1:H=0.7327,top10E=0.30,eRank=139.4,q75/q25=7.57 mlp_w2:H=0.8232,top10E=0.20,eRank=238.5,q75/q25=9.37 vo_prod:H=0.6282,top10E=0.36,eRank=75.3,q75/q25=7353.55 train_time:86427ms step_avg:86.43ms +[2025-08-22 15:20:30] [Rank 0] step:1001/10000 train_time:86446ms step_avg:86.36ms +[2025-08-22 15:20:30] [Rank 0] step:1001/10000 train_time:86446ms step_avg:86.36ms +[2025-08-22 15:20:32] [Rank 0] step:1021/10000 train_time:88164ms step_avg:86.35ms +[2025-08-22 15:20:32] [Rank 0] step:1021/10000 train_time:88164ms step_avg:86.35ms +[2025-08-22 15:20:34] [Rank 0] step:1041/10000 train_time:89880ms step_avg:86.34ms +[2025-08-22 15:20:34] [Rank 0] step:1041/10000 train_time:89880ms step_avg:86.34ms +[2025-08-22 15:20:36] [Rank 0] step:1061/10000 train_time:91602ms step_avg:86.34ms +[2025-08-22 15:20:36] [Rank 0] step:1061/10000 train_time:91602ms step_avg:86.34ms +[2025-08-22 15:20:37] [Rank 0] step:1081/10000 train_time:93320ms step_avg:86.33ms +[2025-08-22 15:20:37] [Rank 0] step:1081/10000 train_time:93320ms step_avg:86.33ms +[2025-08-22 15:20:39] [Rank 0] step:1101/10000 train_time:95041ms step_avg:86.32ms +[2025-08-22 15:20:39] [Rank 0] step:1101/10000 train_time:95041ms step_avg:86.32ms +[2025-08-22 15:20:41] [Rank 0] step:1121/10000 train_time:96762ms step_avg:86.32ms +[2025-08-22 15:20:41] [Rank 0] step:1121/10000 train_time:96762ms step_avg:86.32ms +[2025-08-22 15:20:42] [Rank 0] step:1141/10000 train_time:98484ms step_avg:86.31ms +[2025-08-22 15:20:42] [Rank 0] step:1141/10000 train_time:98484ms step_avg:86.31ms +[2025-08-22 15:20:44] [Rank 0] step:1161/10000 train_time:100207ms step_avg:86.31ms +[2025-08-22 15:20:44] [Rank 0] step:1161/10000 train_time:100207ms step_avg:86.31ms +[2025-08-22 15:20:46] [Rank 0] step:1181/10000 train_time:101929ms step_avg:86.31ms +[2025-08-22 15:20:46] [Rank 0] step:1181/10000 train_time:101929ms step_avg:86.31ms +[2025-08-22 15:20:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:20:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:21:01] [Rank 0] PRINT: step:1200/10000 val_loss:4.9030 svd_entropy: attn_qk:H=0.8451,top10E=0.13,eRank=277.1,q75/q25=73.69 attn_vo:H=0.7632,top10E=0.18,eRank=171.6,q75/q25=134.97 mlp_w1:H=0.7491,top10E=0.28,eRank=154.4,q75/q25=8.27 mlp_w2:H=0.8421,top10E=0.17,eRank=269.9,q75/q25=9.72 vo_prod:H=0.6505,top10E=0.33,eRank=84.9,q75/q25=18274.88 train_time:103653ms step_avg:86.38ms +[2025-08-22 15:21:01] [Rank 0] PRINT: step:1200/10000 val_loss:4.9030 svd_entropy: attn_qk:H=0.8451,top10E=0.13,eRank=277.1,q75/q25=73.69 attn_vo:H=0.7632,top10E=0.18,eRank=171.6,q75/q25=134.97 mlp_w1:H=0.7491,top10E=0.28,eRank=154.4,q75/q25=8.27 mlp_w2:H=0.8421,top10E=0.17,eRank=269.9,q75/q25=9.72 vo_prod:H=0.6505,top10E=0.33,eRank=84.9,q75/q25=18274.88 train_time:103653ms step_avg:86.38ms +[2025-08-22 15:21:01] [Rank 0] step:1201/10000 train_time:103673ms step_avg:86.32ms +[2025-08-22 15:21:01] [Rank 0] step:1201/10000 train_time:103673ms step_avg:86.32ms +[2025-08-22 15:21:03] [Rank 0] step:1221/10000 train_time:105391ms step_avg:86.32ms +[2025-08-22 15:21:03] [Rank 0] step:1221/10000 train_time:105391ms step_avg:86.32ms +[2025-08-22 15:21:05] [Rank 0] step:1241/10000 train_time:107111ms step_avg:86.31ms +[2025-08-22 15:21:05] [Rank 0] step:1241/10000 train_time:107111ms step_avg:86.31ms +[2025-08-22 15:21:07] [Rank 0] step:1261/10000 train_time:108834ms step_avg:86.31ms +[2025-08-22 15:21:07] [Rank 0] step:1261/10000 train_time:108834ms step_avg:86.31ms +[2025-08-22 15:21:08] [Rank 0] step:1281/10000 train_time:110610ms step_avg:86.35ms +[2025-08-22 15:21:08] [Rank 0] step:1281/10000 train_time:110610ms step_avg:86.35ms +[2025-08-22 15:21:10] [Rank 0] step:1301/10000 train_time:112380ms step_avg:86.38ms +[2025-08-22 15:21:10] [Rank 0] step:1301/10000 train_time:112380ms step_avg:86.38ms +[2025-08-22 15:21:12] [Rank 0] step:1321/10000 train_time:114166ms step_avg:86.42ms +[2025-08-22 15:21:12] [Rank 0] step:1321/10000 train_time:114166ms step_avg:86.42ms +[2025-08-22 15:21:14] [Rank 0] step:1341/10000 train_time:115890ms step_avg:86.42ms +[2025-08-22 15:21:14] [Rank 0] step:1341/10000 train_time:115890ms step_avg:86.42ms +[2025-08-22 15:21:15] [Rank 0] step:1361/10000 train_time:117616ms step_avg:86.42ms +[2025-08-22 15:21:15] [Rank 0] step:1361/10000 train_time:117616ms step_avg:86.42ms +[2025-08-22 15:21:17] [Rank 0] step:1381/10000 train_time:119341ms step_avg:86.42ms +[2025-08-22 15:21:17] [Rank 0] step:1381/10000 train_time:119341ms step_avg:86.42ms +[2025-08-22 15:21:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:21:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:21:33] [Rank 0] PRINT: step:1400/10000 val_loss:4.8271 svd_entropy: attn_qk:H=0.8530,top10E=0.12,eRank=291.3,q75/q25=66.09 attn_vo:H=0.7815,top10E=0.16,eRank=190.6,q75/q25=166.01 mlp_w1:H=0.7626,top10E=0.26,eRank=168.0,q75/q25=8.88 mlp_w2:H=0.8568,top10E=0.16,eRank=297.5,q75/q25=9.69 vo_prod:H=0.6706,top10E=0.31,eRank=95.0,q75/q25=29011.87 train_time:121068ms step_avg:86.48ms +[2025-08-22 15:21:33] [Rank 0] PRINT: step:1400/10000 val_loss:4.8271 svd_entropy: attn_qk:H=0.8530,top10E=0.12,eRank=291.3,q75/q25=66.09 attn_vo:H=0.7815,top10E=0.16,eRank=190.6,q75/q25=166.01 mlp_w1:H=0.7626,top10E=0.26,eRank=168.0,q75/q25=8.88 mlp_w2:H=0.8568,top10E=0.16,eRank=297.5,q75/q25=9.69 vo_prod:H=0.6706,top10E=0.31,eRank=95.0,q75/q25=29011.87 train_time:121068ms step_avg:86.48ms +[2025-08-22 15:21:33] [Rank 0] step:1401/10000 train_time:121086ms step_avg:86.43ms +[2025-08-22 15:21:33] [Rank 0] step:1401/10000 train_time:121086ms step_avg:86.43ms +[2025-08-22 15:21:34] [Rank 0] step:1421/10000 train_time:122807ms step_avg:86.42ms +[2025-08-22 15:21:34] [Rank 0] step:1421/10000 train_time:122807ms step_avg:86.42ms +[2025-08-22 15:21:36] [Rank 0] step:1441/10000 train_time:124525ms step_avg:86.42ms +[2025-08-22 15:21:36] [Rank 0] step:1441/10000 train_time:124525ms step_avg:86.42ms +[2025-08-22 15:21:38] [Rank 0] step:1461/10000 train_time:126244ms step_avg:86.41ms +[2025-08-22 15:21:38] [Rank 0] step:1461/10000 train_time:126244ms step_avg:86.41ms +[2025-08-22 15:21:40] [Rank 0] step:1481/10000 train_time:127964ms step_avg:86.40ms +[2025-08-22 15:21:40] [Rank 0] step:1481/10000 train_time:127964ms step_avg:86.40ms +[2025-08-22 15:21:41] [Rank 0] step:1501/10000 train_time:129693ms step_avg:86.40ms +[2025-08-22 15:21:41] [Rank 0] step:1501/10000 train_time:129693ms step_avg:86.40ms +[2025-08-22 15:21:43] [Rank 0] step:1521/10000 train_time:131423ms step_avg:86.41ms +[2025-08-22 15:21:43] [Rank 0] step:1521/10000 train_time:131423ms step_avg:86.41ms +[2025-08-22 15:21:45] [Rank 0] step:1541/10000 train_time:133153ms step_avg:86.41ms +[2025-08-22 15:21:45] [Rank 0] step:1541/10000 train_time:133153ms step_avg:86.41ms +[2025-08-22 15:21:47] [Rank 0] step:1561/10000 train_time:134884ms step_avg:86.41ms +[2025-08-22 15:21:47] [Rank 0] step:1561/10000 train_time:134884ms step_avg:86.41ms +[2025-08-22 15:21:48] [Rank 0] step:1581/10000 train_time:136616ms step_avg:86.41ms +[2025-08-22 15:21:48] [Rank 0] step:1581/10000 train_time:136616ms step_avg:86.41ms +[2025-08-22 15:21:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:21:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:22:04] [Rank 0] PRINT: step:1600/10000 val_loss:4.7392 svd_entropy: attn_qk:H=0.8596,top10E=0.12,eRank=303.9,q75/q25=56.06 attn_vo:H=0.7967,top10E=0.15,eRank=208.5,q75/q25=177.16 mlp_w1:H=0.7743,top10E=0.25,eRank=180.5,q75/q25=9.36 mlp_w2:H=0.8686,top10E=0.14,eRank=321.4,q75/q25=9.45 vo_prod:H=0.6878,top10E=0.29,eRank=105.0,q75/q25=35006.81 train_time:138352ms step_avg:86.47ms +[2025-08-22 15:22:04] [Rank 0] PRINT: step:1600/10000 val_loss:4.7392 svd_entropy: attn_qk:H=0.8596,top10E=0.12,eRank=303.9,q75/q25=56.06 attn_vo:H=0.7967,top10E=0.15,eRank=208.5,q75/q25=177.16 mlp_w1:H=0.7743,top10E=0.25,eRank=180.5,q75/q25=9.36 mlp_w2:H=0.8686,top10E=0.14,eRank=321.4,q75/q25=9.45 vo_prod:H=0.6878,top10E=0.29,eRank=105.0,q75/q25=35006.81 train_time:138352ms step_avg:86.47ms +[2025-08-22 15:22:04] [Rank 0] step:1601/10000 train_time:138371ms step_avg:86.43ms +[2025-08-22 15:22:04] [Rank 0] step:1601/10000 train_time:138371ms step_avg:86.43ms +[2025-08-22 15:22:06] [Rank 0] step:1621/10000 train_time:140091ms step_avg:86.42ms +[2025-08-22 15:22:06] [Rank 0] step:1621/10000 train_time:140091ms step_avg:86.42ms +[2025-08-22 15:22:07] [Rank 0] step:1641/10000 train_time:141819ms step_avg:86.42ms +[2025-08-22 15:22:07] [Rank 0] step:1641/10000 train_time:141819ms step_avg:86.42ms +[2025-08-22 15:22:09] [Rank 0] step:1661/10000 train_time:143548ms step_avg:86.42ms +[2025-08-22 15:22:09] [Rank 0] step:1661/10000 train_time:143548ms step_avg:86.42ms +[2025-08-22 15:22:11] [Rank 0] step:1681/10000 train_time:145278ms step_avg:86.42ms +[2025-08-22 15:22:11] [Rank 0] step:1681/10000 train_time:145278ms step_avg:86.42ms +[2025-08-22 15:22:13] [Rank 0] step:1701/10000 train_time:147093ms step_avg:86.47ms +[2025-08-22 15:22:13] [Rank 0] step:1701/10000 train_time:147093ms step_avg:86.47ms +[2025-08-22 15:22:14] [Rank 0] step:1721/10000 train_time:148907ms step_avg:86.52ms +[2025-08-22 15:22:14] [Rank 0] step:1721/10000 train_time:148907ms step_avg:86.52ms +[2025-08-22 15:22:16] [Rank 0] step:1741/10000 train_time:150640ms step_avg:86.53ms +[2025-08-22 15:22:16] [Rank 0] step:1741/10000 train_time:150640ms step_avg:86.53ms +[2025-08-22 15:22:18] [Rank 0] step:1761/10000 train_time:152372ms step_avg:86.53ms +[2025-08-22 15:22:18] [Rank 0] step:1761/10000 train_time:152372ms step_avg:86.53ms +[2025-08-22 15:22:20] [Rank 0] step:1781/10000 train_time:154103ms step_avg:86.53ms +[2025-08-22 15:22:20] [Rank 0] step:1781/10000 train_time:154103ms step_avg:86.53ms +[2025-08-22 15:22:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:22:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:22:35] [Rank 0] PRINT: step:1800/10000 val_loss:4.6425 svd_entropy: attn_qk:H=0.8651,top10E=0.11,eRank=314.8,q75/q25=47.59 attn_vo:H=0.8096,top10E=0.14,eRank=225.3,q75/q25=169.94 mlp_w1:H=0.7842,top10E=0.24,eRank=192.0,q75/q25=9.75 mlp_w2:H=0.8784,top10E=0.13,eRank=343.0,q75/q25=9.10 vo_prod:H=0.7027,top10E=0.28,eRank=115.0,q75/q25=34256.95 train_time:155838ms step_avg:86.58ms +[2025-08-22 15:22:35] [Rank 0] PRINT: step:1800/10000 val_loss:4.6425 svd_entropy: attn_qk:H=0.8651,top10E=0.11,eRank=314.8,q75/q25=47.59 attn_vo:H=0.8096,top10E=0.14,eRank=225.3,q75/q25=169.94 mlp_w1:H=0.7842,top10E=0.24,eRank=192.0,q75/q25=9.75 mlp_w2:H=0.8784,top10E=0.13,eRank=343.0,q75/q25=9.10 vo_prod:H=0.7027,top10E=0.28,eRank=115.0,q75/q25=34256.95 train_time:155838ms step_avg:86.58ms +[2025-08-22 15:22:35] [Rank 0] step:1801/10000 train_time:155857ms step_avg:86.54ms +[2025-08-22 15:22:35] [Rank 0] step:1801/10000 train_time:155857ms step_avg:86.54ms +[2025-08-22 15:22:37] [Rank 0] step:1821/10000 train_time:157581ms step_avg:86.54ms +[2025-08-22 15:22:37] [Rank 0] step:1821/10000 train_time:157581ms step_avg:86.54ms +[2025-08-22 15:22:39] [Rank 0] step:1841/10000 train_time:159309ms step_avg:86.53ms +[2025-08-22 15:22:39] [Rank 0] step:1841/10000 train_time:159309ms step_avg:86.53ms +[2025-08-22 15:22:40] [Rank 0] step:1861/10000 train_time:161038ms step_avg:86.53ms +[2025-08-22 15:22:40] [Rank 0] step:1861/10000 train_time:161038ms step_avg:86.53ms +[2025-08-22 15:22:42] [Rank 0] step:1881/10000 train_time:162768ms step_avg:86.53ms +[2025-08-22 15:22:42] [Rank 0] step:1881/10000 train_time:162768ms step_avg:86.53ms +[2025-08-22 15:22:44] [Rank 0] step:1901/10000 train_time:164497ms step_avg:86.53ms +[2025-08-22 15:22:44] [Rank 0] step:1901/10000 train_time:164497ms step_avg:86.53ms +[2025-08-22 15:22:45] [Rank 0] step:1921/10000 train_time:166228ms step_avg:86.53ms +[2025-08-22 15:22:45] [Rank 0] step:1921/10000 train_time:166228ms step_avg:86.53ms +[2025-08-22 15:22:47] [Rank 0] step:1941/10000 train_time:167958ms step_avg:86.53ms +[2025-08-22 15:22:47] [Rank 0] step:1941/10000 train_time:167958ms step_avg:86.53ms +[2025-08-22 15:22:49] [Rank 0] step:1961/10000 train_time:169690ms step_avg:86.53ms +[2025-08-22 15:22:49] [Rank 0] step:1961/10000 train_time:169690ms step_avg:86.53ms +[2025-08-22 15:22:51] [Rank 0] step:1981/10000 train_time:171424ms step_avg:86.53ms +[2025-08-22 15:22:51] [Rank 0] step:1981/10000 train_time:171424ms step_avg:86.53ms +[2025-08-22 15:22:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:22:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:23:06] [Rank 0] PRINT: step:2000/10000 val_loss:4.5680 svd_entropy: attn_qk:H=0.8697,top10E=0.11,eRank=324.4,q75/q25=40.66 attn_vo:H=0.8205,top10E=0.14,eRank=241.0,q75/q25=157.95 mlp_w1:H=0.7931,top10E=0.23,eRank=202.9,q75/q25=10.01 mlp_w2:H=0.8866,top10E=0.13,eRank=362.1,q75/q25=8.69 vo_prod:H=0.7159,top10E=0.26,eRank=124.6,q75/q25=29163.08 train_time:173161ms step_avg:86.58ms +[2025-08-22 15:23:06] [Rank 0] PRINT: step:2000/10000 val_loss:4.5680 svd_entropy: attn_qk:H=0.8697,top10E=0.11,eRank=324.4,q75/q25=40.66 attn_vo:H=0.8205,top10E=0.14,eRank=241.0,q75/q25=157.95 mlp_w1:H=0.7931,top10E=0.23,eRank=202.9,q75/q25=10.01 mlp_w2:H=0.8866,top10E=0.13,eRank=362.1,q75/q25=8.69 vo_prod:H=0.7159,top10E=0.26,eRank=124.6,q75/q25=29163.08 train_time:173161ms step_avg:86.58ms +[2025-08-22 15:23:06] [Rank 0] step:2001/10000 train_time:173180ms step_avg:86.55ms +[2025-08-22 15:23:06] [Rank 0] step:2001/10000 train_time:173180ms step_avg:86.55ms +[2025-08-22 15:23:08] [Rank 0] step:2021/10000 train_time:174911ms step_avg:86.55ms +[2025-08-22 15:23:08] [Rank 0] step:2021/10000 train_time:174911ms step_avg:86.55ms +[2025-08-22 15:23:10] [Rank 0] step:2041/10000 train_time:177316ms step_avg:86.88ms +[2025-08-22 15:23:10] [Rank 0] step:2041/10000 train_time:177316ms step_avg:86.88ms +[2025-08-22 15:23:12] [Rank 0] step:2061/10000 train_time:179048ms step_avg:86.87ms +[2025-08-22 15:23:12] [Rank 0] step:2061/10000 train_time:179048ms step_avg:86.87ms +[2025-08-22 15:23:14] [Rank 0] step:2081/10000 train_time:180780ms step_avg:86.87ms +[2025-08-22 15:23:14] [Rank 0] step:2081/10000 train_time:180780ms step_avg:86.87ms +[2025-08-22 15:23:16] [Rank 0] step:2101/10000 train_time:182588ms step_avg:86.91ms +[2025-08-22 15:23:16] [Rank 0] step:2101/10000 train_time:182588ms step_avg:86.91ms +[2025-08-22 15:23:18] [Rank 0] step:2121/10000 train_time:184391ms step_avg:86.94ms +[2025-08-22 15:23:18] [Rank 0] step:2121/10000 train_time:184391ms step_avg:86.94ms +[2025-08-22 15:23:19] [Rank 0] step:2141/10000 train_time:186127ms step_avg:86.93ms +[2025-08-22 15:23:19] [Rank 0] step:2141/10000 train_time:186127ms step_avg:86.93ms +[2025-08-22 15:23:21] [Rank 0] step:2161/10000 train_time:187863ms step_avg:86.93ms +[2025-08-22 15:23:21] [Rank 0] step:2161/10000 train_time:187863ms step_avg:86.93ms +[2025-08-22 15:23:23] [Rank 0] step:2181/10000 train_time:189601ms step_avg:86.93ms +[2025-08-22 15:23:23] [Rank 0] step:2181/10000 train_time:189601ms step_avg:86.93ms +[2025-08-22 15:23:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:23:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:23:38] [Rank 0] PRINT: step:2200/10000 val_loss:4.4454 svd_entropy: attn_qk:H=0.8734,top10E=0.11,eRank=332.3,q75/q25=35.46 attn_vo:H=0.8294,top10E=0.13,eRank=254.7,q75/q25=140.16 mlp_w1:H=0.8009,top10E=0.22,eRank=213.2,q75/q25=10.18 mlp_w2:H=0.8934,top10E=0.12,eRank=378.9,q75/q25=8.27 vo_prod:H=0.7270,top10E=0.25,eRank=133.5,q75/q25=22636.11 train_time:191341ms step_avg:86.97ms +[2025-08-22 15:23:38] [Rank 0] PRINT: step:2200/10000 val_loss:4.4454 svd_entropy: attn_qk:H=0.8734,top10E=0.11,eRank=332.3,q75/q25=35.46 attn_vo:H=0.8294,top10E=0.13,eRank=254.7,q75/q25=140.16 mlp_w1:H=0.8009,top10E=0.22,eRank=213.2,q75/q25=10.18 mlp_w2:H=0.8934,top10E=0.12,eRank=378.9,q75/q25=8.27 vo_prod:H=0.7270,top10E=0.25,eRank=133.5,q75/q25=22636.11 train_time:191341ms step_avg:86.97ms +[2025-08-22 15:23:38] [Rank 0] step:2201/10000 train_time:191360ms step_avg:86.94ms +[2025-08-22 15:23:38] [Rank 0] step:2201/10000 train_time:191360ms step_avg:86.94ms +[2025-08-22 15:23:40] [Rank 0] step:2221/10000 train_time:193097ms step_avg:86.94ms +[2025-08-22 15:23:40] [Rank 0] step:2221/10000 train_time:193097ms step_avg:86.94ms +[2025-08-22 15:23:42] [Rank 0] step:2241/10000 train_time:194865ms step_avg:86.95ms +[2025-08-22 15:23:42] [Rank 0] step:2241/10000 train_time:194865ms step_avg:86.95ms +[2025-08-22 15:23:44] [Rank 0] step:2261/10000 train_time:196640ms step_avg:86.97ms +[2025-08-22 15:23:44] [Rank 0] step:2261/10000 train_time:196640ms step_avg:86.97ms +[2025-08-22 15:23:45] [Rank 0] step:2281/10000 train_time:198414ms step_avg:86.99ms +[2025-08-22 15:23:45] [Rank 0] step:2281/10000 train_time:198414ms step_avg:86.99ms +[2025-08-22 15:23:47] [Rank 0] step:2301/10000 train_time:200190ms step_avg:87.00ms +[2025-08-22 15:23:47] [Rank 0] step:2301/10000 train_time:200190ms step_avg:87.00ms +[2025-08-22 15:23:49] [Rank 0] step:2321/10000 train_time:201966ms step_avg:87.02ms +[2025-08-22 15:23:49] [Rank 0] step:2321/10000 train_time:201966ms step_avg:87.02ms +[2025-08-22 15:23:51] [Rank 0] step:2341/10000 train_time:203744ms step_avg:87.03ms +[2025-08-22 15:23:51] [Rank 0] step:2341/10000 train_time:203744ms step_avg:87.03ms +[2025-08-22 15:23:53] [Rank 0] step:2361/10000 train_time:205520ms step_avg:87.05ms +[2025-08-22 15:23:53] [Rank 0] step:2361/10000 train_time:205520ms step_avg:87.05ms +[2025-08-22 15:23:54] [Rank 0] step:2381/10000 train_time:207299ms step_avg:87.06ms +[2025-08-22 15:23:54] [Rank 0] step:2381/10000 train_time:207299ms step_avg:87.06ms +[2025-08-22 15:23:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:23:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:24:10] [Rank 0] PRINT: step:2400/10000 val_loss:4.3452 svd_entropy: attn_qk:H=0.8761,top10E=0.10,eRank=338.2,q75/q25=31.41 attn_vo:H=0.8363,top10E=0.13,eRank=266.1,q75/q25=121.81 mlp_w1:H=0.8082,top10E=0.21,eRank=223.1,q75/q25=10.25 mlp_w2:H=0.8993,top10E=0.11,eRank=393.8,q75/q25=7.82 vo_prod:H=0.7349,top10E=0.24,eRank=140.4,q75/q25=16759.98 train_time:209078ms step_avg:87.12ms +[2025-08-22 15:24:10] [Rank 0] PRINT: step:2400/10000 val_loss:4.3452 svd_entropy: attn_qk:H=0.8761,top10E=0.10,eRank=338.2,q75/q25=31.41 attn_vo:H=0.8363,top10E=0.13,eRank=266.1,q75/q25=121.81 mlp_w1:H=0.8082,top10E=0.21,eRank=223.1,q75/q25=10.25 mlp_w2:H=0.8993,top10E=0.11,eRank=393.8,q75/q25=7.82 vo_prod:H=0.7349,top10E=0.24,eRank=140.4,q75/q25=16759.98 train_time:209078ms step_avg:87.12ms +[2025-08-22 15:24:10] [Rank 0] step:2401/10000 train_time:209098ms step_avg:87.09ms +[2025-08-22 15:24:10] [Rank 0] step:2401/10000 train_time:209098ms step_avg:87.09ms +[2025-08-22 15:24:12] [Rank 0] step:2421/10000 train_time:210879ms step_avg:87.10ms +[2025-08-22 15:24:12] [Rank 0] step:2421/10000 train_time:210879ms step_avg:87.10ms +[2025-08-22 15:24:14] [Rank 0] step:2441/10000 train_time:212651ms step_avg:87.12ms +[2025-08-22 15:24:14] [Rank 0] step:2441/10000 train_time:212651ms step_avg:87.12ms +[2025-08-22 15:24:15] [Rank 0] step:2461/10000 train_time:214424ms step_avg:87.13ms +[2025-08-22 15:24:15] [Rank 0] step:2461/10000 train_time:214424ms step_avg:87.13ms +[2025-08-22 15:24:17] [Rank 0] step:2481/10000 train_time:216205ms step_avg:87.14ms +[2025-08-22 15:24:17] [Rank 0] step:2481/10000 train_time:216205ms step_avg:87.14ms +[2025-08-22 15:24:19] [Rank 0] step:2501/10000 train_time:218062ms step_avg:87.19ms +[2025-08-22 15:24:19] [Rank 0] step:2501/10000 train_time:218062ms step_avg:87.19ms +[2025-08-22 15:24:21] [Rank 0] step:2521/10000 train_time:219938ms step_avg:87.24ms +[2025-08-22 15:24:21] [Rank 0] step:2521/10000 train_time:219938ms step_avg:87.24ms +[2025-08-22 15:24:23] [Rank 0] step:2541/10000 train_time:221715ms step_avg:87.26ms +[2025-08-22 15:24:23] [Rank 0] step:2541/10000 train_time:221715ms step_avg:87.26ms +[2025-08-22 15:24:24] [Rank 0] step:2561/10000 train_time:223492ms step_avg:87.27ms +[2025-08-22 15:24:24] [Rank 0] step:2561/10000 train_time:223492ms step_avg:87.27ms +[2025-08-22 15:24:26] [Rank 0] step:2581/10000 train_time:225271ms step_avg:87.28ms +[2025-08-22 15:24:26] [Rank 0] step:2581/10000 train_time:225271ms step_avg:87.28ms +[2025-08-22 15:24:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:24:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:24:42] [Rank 0] PRINT: step:2600/10000 val_loss:4.2738 svd_entropy: attn_qk:H=0.8787,top10E=0.10,eRank=344.1,q75/q25=28.44 attn_vo:H=0.8421,top10E=0.12,eRank=276.1,q75/q25=105.47 mlp_w1:H=0.8145,top10E=0.21,eRank=232.1,q75/q25=10.26 mlp_w2:H=0.9042,top10E=0.11,eRank=406.8,q75/q25=7.41 vo_prod:H=0.7415,top10E=0.24,eRank=146.4,q75/q25=12145.42 train_time:227053ms step_avg:87.33ms +[2025-08-22 15:24:42] [Rank 0] PRINT: step:2600/10000 val_loss:4.2738 svd_entropy: attn_qk:H=0.8787,top10E=0.10,eRank=344.1,q75/q25=28.44 attn_vo:H=0.8421,top10E=0.12,eRank=276.1,q75/q25=105.47 mlp_w1:H=0.8145,top10E=0.21,eRank=232.1,q75/q25=10.26 mlp_w2:H=0.9042,top10E=0.11,eRank=406.8,q75/q25=7.41 vo_prod:H=0.7415,top10E=0.24,eRank=146.4,q75/q25=12145.42 train_time:227053ms step_avg:87.33ms +[2025-08-22 15:24:42] [Rank 0] step:2601/10000 train_time:227073ms step_avg:87.30ms +[2025-08-22 15:24:42] [Rank 0] step:2601/10000 train_time:227073ms step_avg:87.30ms +[2025-08-22 15:24:44] [Rank 0] step:2621/10000 train_time:228848ms step_avg:87.31ms +[2025-08-22 15:24:44] [Rank 0] step:2621/10000 train_time:228848ms step_avg:87.31ms +[2025-08-22 15:24:46] [Rank 0] step:2641/10000 train_time:230624ms step_avg:87.32ms +[2025-08-22 15:24:46] [Rank 0] step:2641/10000 train_time:230624ms step_avg:87.32ms +[2025-08-22 15:24:47] [Rank 0] step:2661/10000 train_time:232402ms step_avg:87.34ms +[2025-08-22 15:24:47] [Rank 0] step:2661/10000 train_time:232402ms step_avg:87.34ms +[2025-08-22 15:24:49] [Rank 0] step:2681/10000 train_time:234178ms step_avg:87.35ms +[2025-08-22 15:24:49] [Rank 0] step:2681/10000 train_time:234178ms step_avg:87.35ms +[2025-08-22 15:24:51] [Rank 0] step:2701/10000 train_time:235958ms step_avg:87.36ms +[2025-08-22 15:24:51] [Rank 0] step:2701/10000 train_time:235958ms step_avg:87.36ms +[2025-08-22 15:24:53] [Rank 0] step:2721/10000 train_time:237739ms step_avg:87.37ms +[2025-08-22 15:24:53] [Rank 0] step:2721/10000 train_time:237739ms step_avg:87.37ms +[2025-08-22 15:24:54] [Rank 0] step:2741/10000 train_time:239519ms step_avg:87.38ms +[2025-08-22 15:24:54] [Rank 0] step:2741/10000 train_time:239519ms step_avg:87.38ms +[2025-08-22 15:24:56] [Rank 0] step:2761/10000 train_time:241299ms step_avg:87.40ms +[2025-08-22 15:24:56] [Rank 0] step:2761/10000 train_time:241299ms step_avg:87.40ms +[2025-08-22 15:24:58] [Rank 0] step:2781/10000 train_time:243079ms step_avg:87.41ms +[2025-08-22 15:24:58] [Rank 0] step:2781/10000 train_time:243079ms step_avg:87.41ms +[2025-08-22 15:25:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:25:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:25:14] [Rank 0] PRINT: step:2800/10000 val_loss:4.2251 svd_entropy: attn_qk:H=0.8811,top10E=0.10,eRank=349.5,q75/q25=25.90 attn_vo:H=0.8471,top10E=0.12,eRank=285.1,q75/q25=92.07 mlp_w1:H=0.8200,top10E=0.20,eRank=240.4,q75/q25=10.24 mlp_w2:H=0.9084,top10E=0.11,eRank=418.4,q75/q25=7.06 vo_prod:H=0.7475,top10E=0.23,eRank=152.1,q75/q25=8747.98 train_time:244862ms step_avg:87.45ms +[2025-08-22 15:25:14] [Rank 0] PRINT: step:2800/10000 val_loss:4.2251 svd_entropy: attn_qk:H=0.8811,top10E=0.10,eRank=349.5,q75/q25=25.90 attn_vo:H=0.8471,top10E=0.12,eRank=285.1,q75/q25=92.07 mlp_w1:H=0.8200,top10E=0.20,eRank=240.4,q75/q25=10.24 mlp_w2:H=0.9084,top10E=0.11,eRank=418.4,q75/q25=7.06 vo_prod:H=0.7475,top10E=0.23,eRank=152.1,q75/q25=8747.98 train_time:244862ms step_avg:87.45ms +[2025-08-22 15:25:14] [Rank 0] step:2801/10000 train_time:244881ms step_avg:87.43ms +[2025-08-22 15:25:14] [Rank 0] step:2801/10000 train_time:244881ms step_avg:87.43ms +[2025-08-22 15:25:16] [Rank 0] step:2821/10000 train_time:246674ms step_avg:87.44ms +[2025-08-22 15:25:16] [Rank 0] step:2821/10000 train_time:246674ms step_avg:87.44ms +[2025-08-22 15:25:17] [Rank 0] step:2841/10000 train_time:248450ms step_avg:87.45ms +[2025-08-22 15:25:17] [Rank 0] step:2841/10000 train_time:248450ms step_avg:87.45ms +[2025-08-22 15:25:19] [Rank 0] step:2861/10000 train_time:250225ms step_avg:87.46ms +[2025-08-22 15:25:19] [Rank 0] step:2861/10000 train_time:250225ms step_avg:87.46ms +[2025-08-22 15:25:21] [Rank 0] step:2881/10000 train_time:252097ms step_avg:87.50ms +[2025-08-22 15:25:21] [Rank 0] step:2881/10000 train_time:252097ms step_avg:87.50ms +[2025-08-22 15:25:23] [Rank 0] step:2901/10000 train_time:253870ms step_avg:87.51ms +[2025-08-22 15:25:23] [Rank 0] step:2901/10000 train_time:253870ms step_avg:87.51ms +[2025-08-22 15:25:25] [Rank 0] step:2921/10000 train_time:255755ms step_avg:87.56ms +[2025-08-22 15:25:25] [Rank 0] step:2921/10000 train_time:255755ms step_avg:87.56ms +[2025-08-22 15:25:26] [Rank 0] step:2941/10000 train_time:257529ms step_avg:87.57ms +[2025-08-22 15:25:26] [Rank 0] step:2941/10000 train_time:257529ms step_avg:87.57ms +[2025-08-22 15:25:28] [Rank 0] step:2961/10000 train_time:259307ms step_avg:87.57ms +[2025-08-22 15:25:28] [Rank 0] step:2961/10000 train_time:259307ms step_avg:87.57ms +[2025-08-22 15:25:30] [Rank 0] step:2981/10000 train_time:261089ms step_avg:87.58ms +[2025-08-22 15:25:30] [Rank 0] step:2981/10000 train_time:261089ms step_avg:87.58ms +[2025-08-22 15:25:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:25:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:25:46] [Rank 0] PRINT: step:3000/10000 val_loss:4.1736 svd_entropy: attn_qk:H=0.8832,top10E=0.10,eRank=354.2,q75/q25=24.12 attn_vo:H=0.8514,top10E=0.11,eRank=292.9,q75/q25=80.69 mlp_w1:H=0.8252,top10E=0.20,eRank=248.3,q75/q25=10.17 mlp_w2:H=0.9120,top10E=0.10,eRank=428.4,q75/q25=6.74 vo_prod:H=0.7529,top10E=0.22,eRank=157.4,q75/q25=6504.76 train_time:262878ms step_avg:87.63ms +[2025-08-22 15:25:46] [Rank 0] PRINT: step:3000/10000 val_loss:4.1736 svd_entropy: attn_qk:H=0.8832,top10E=0.10,eRank=354.2,q75/q25=24.12 attn_vo:H=0.8514,top10E=0.11,eRank=292.9,q75/q25=80.69 mlp_w1:H=0.8252,top10E=0.20,eRank=248.3,q75/q25=10.17 mlp_w2:H=0.9120,top10E=0.10,eRank=428.4,q75/q25=6.74 vo_prod:H=0.7529,top10E=0.22,eRank=157.4,q75/q25=6504.76 train_time:262878ms step_avg:87.63ms +[2025-08-22 15:25:46] [Rank 0] step:3001/10000 train_time:262897ms step_avg:87.60ms +[2025-08-22 15:25:46] [Rank 0] step:3001/10000 train_time:262897ms step_avg:87.60ms +[2025-08-22 15:25:48] [Rank 0] step:3021/10000 train_time:264677ms step_avg:87.61ms +[2025-08-22 15:25:48] [Rank 0] step:3021/10000 train_time:264677ms step_avg:87.61ms +[2025-08-22 15:25:49] [Rank 0] step:3041/10000 train_time:266456ms step_avg:87.62ms +[2025-08-22 15:25:49] [Rank 0] step:3041/10000 train_time:266456ms step_avg:87.62ms +[2025-08-22 15:25:51] [Rank 0] step:3061/10000 train_time:268236ms step_avg:87.63ms +[2025-08-22 15:25:51] [Rank 0] step:3061/10000 train_time:268236ms step_avg:87.63ms +[2025-08-22 15:25:53] [Rank 0] step:3081/10000 train_time:270016ms step_avg:87.64ms +[2025-08-22 15:25:53] [Rank 0] step:3081/10000 train_time:270016ms step_avg:87.64ms +[2025-08-22 15:25:55] [Rank 0] step:3101/10000 train_time:271797ms step_avg:87.65ms +[2025-08-22 15:25:55] [Rank 0] step:3101/10000 train_time:271797ms step_avg:87.65ms +[2025-08-22 15:25:56] [Rank 0] step:3121/10000 train_time:273578ms step_avg:87.66ms +[2025-08-22 15:25:56] [Rank 0] step:3121/10000 train_time:273578ms step_avg:87.66ms +[2025-08-22 15:25:58] [Rank 0] step:3141/10000 train_time:275359ms step_avg:87.67ms +[2025-08-22 15:25:58] [Rank 0] step:3141/10000 train_time:275359ms step_avg:87.67ms +[2025-08-22 15:26:00] [Rank 0] step:3161/10000 train_time:277142ms step_avg:87.68ms +[2025-08-22 15:26:00] [Rank 0] step:3161/10000 train_time:277142ms step_avg:87.68ms +[2025-08-22 15:26:02] [Rank 0] step:3181/10000 train_time:278927ms step_avg:87.69ms +[2025-08-22 15:26:02] [Rank 0] step:3181/10000 train_time:278927ms step_avg:87.69ms +[2025-08-22 15:26:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:26:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:26:17] [Rank 0] PRINT: step:3200/10000 val_loss:4.1410 svd_entropy: attn_qk:H=0.8849,top10E=0.10,eRank=358.3,q75/q25=22.51 attn_vo:H=0.8550,top10E=0.11,eRank=299.9,q75/q25=72.22 mlp_w1:H=0.8298,top10E=0.19,eRank=255.7,q75/q25=10.06 mlp_w2:H=0.9150,top10E=0.10,eRank=437.2,q75/q25=6.48 vo_prod:H=0.7577,top10E=0.22,eRank=162.4,q75/q25=5080.58 train_time:280716ms step_avg:87.72ms +[2025-08-22 15:26:17] [Rank 0] PRINT: step:3200/10000 val_loss:4.1410 svd_entropy: attn_qk:H=0.8849,top10E=0.10,eRank=358.3,q75/q25=22.51 attn_vo:H=0.8550,top10E=0.11,eRank=299.9,q75/q25=72.22 mlp_w1:H=0.8298,top10E=0.19,eRank=255.7,q75/q25=10.06 mlp_w2:H=0.9150,top10E=0.10,eRank=437.2,q75/q25=6.48 vo_prod:H=0.7577,top10E=0.22,eRank=162.4,q75/q25=5080.58 train_time:280716ms step_avg:87.72ms +[2025-08-22 15:26:18] [Rank 0] step:3201/10000 train_time:280735ms step_avg:87.70ms +[2025-08-22 15:26:18] [Rank 0] step:3201/10000 train_time:280735ms step_avg:87.70ms +[2025-08-22 15:26:19] [Rank 0] step:3221/10000 train_time:282521ms step_avg:87.71ms +[2025-08-22 15:26:19] [Rank 0] step:3221/10000 train_time:282521ms step_avg:87.71ms +[2025-08-22 15:26:21] [Rank 0] step:3241/10000 train_time:284298ms step_avg:87.72ms +[2025-08-22 15:26:21] [Rank 0] step:3241/10000 train_time:284298ms step_avg:87.72ms +[2025-08-22 15:26:23] [Rank 0] step:3261/10000 train_time:286079ms step_avg:87.73ms +[2025-08-22 15:26:23] [Rank 0] step:3261/10000 train_time:286079ms step_avg:87.73ms +[2025-08-22 15:26:25] [Rank 0] step:3281/10000 train_time:287963ms step_avg:87.77ms +[2025-08-22 15:26:25] [Rank 0] step:3281/10000 train_time:287963ms step_avg:87.77ms +[2025-08-22 15:26:27] [Rank 0] step:3301/10000 train_time:289827ms step_avg:87.80ms +[2025-08-22 15:26:27] [Rank 0] step:3301/10000 train_time:289827ms step_avg:87.80ms +[2025-08-22 15:26:28] [Rank 0] step:3321/10000 train_time:291610ms step_avg:87.81ms +[2025-08-22 15:26:28] [Rank 0] step:3321/10000 train_time:291610ms step_avg:87.81ms +[2025-08-22 15:26:30] [Rank 0] step:3341/10000 train_time:293394ms step_avg:87.82ms +[2025-08-22 15:26:30] [Rank 0] step:3341/10000 train_time:293394ms step_avg:87.82ms +[2025-08-22 15:26:32] [Rank 0] step:3361/10000 train_time:295180ms step_avg:87.82ms +[2025-08-22 15:26:32] [Rank 0] step:3361/10000 train_time:295180ms step_avg:87.82ms +[2025-08-22 15:26:34] [Rank 0] step:3381/10000 train_time:296965ms step_avg:87.83ms +[2025-08-22 15:26:34] [Rank 0] step:3381/10000 train_time:296965ms step_avg:87.83ms +[2025-08-22 15:26:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:26:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:26:49] [Rank 0] PRINT: step:3400/10000 val_loss:4.0962 svd_entropy: attn_qk:H=0.8866,top10E=0.10,eRank=362.2,q75/q25=21.31 attn_vo:H=0.8585,top10E=0.11,eRank=306.6,q75/q25=64.36 mlp_w1:H=0.8340,top10E=0.19,eRank=262.7,q75/q25=9.96 mlp_w2:H=0.9178,top10E=0.10,eRank=445.2,q75/q25=6.24 vo_prod:H=0.7624,top10E=0.21,eRank=167.3,q75/q25=3890.04 train_time:298754ms step_avg:87.87ms +[2025-08-22 15:26:49] [Rank 0] PRINT: step:3400/10000 val_loss:4.0962 svd_entropy: attn_qk:H=0.8866,top10E=0.10,eRank=362.2,q75/q25=21.31 attn_vo:H=0.8585,top10E=0.11,eRank=306.6,q75/q25=64.36 mlp_w1:H=0.8340,top10E=0.19,eRank=262.7,q75/q25=9.96 mlp_w2:H=0.9178,top10E=0.10,eRank=445.2,q75/q25=6.24 vo_prod:H=0.7624,top10E=0.21,eRank=167.3,q75/q25=3890.04 train_time:298754ms step_avg:87.87ms +[2025-08-22 15:26:49] [Rank 0] step:3401/10000 train_time:298772ms step_avg:87.85ms +[2025-08-22 15:26:49] [Rank 0] step:3401/10000 train_time:298772ms step_avg:87.85ms +[2025-08-22 15:26:51] [Rank 0] step:3421/10000 train_time:300552ms step_avg:87.86ms +[2025-08-22 15:26:51] [Rank 0] step:3421/10000 train_time:300552ms step_avg:87.86ms +[2025-08-22 15:26:53] [Rank 0] step:3441/10000 train_time:302334ms step_avg:87.86ms +[2025-08-22 15:26:53] [Rank 0] step:3441/10000 train_time:302334ms step_avg:87.86ms +[2025-08-22 15:26:55] [Rank 0] step:3461/10000 train_time:304117ms step_avg:87.87ms +[2025-08-22 15:26:55] [Rank 0] step:3461/10000 train_time:304117ms step_avg:87.87ms +[2025-08-22 15:26:56] [Rank 0] step:3481/10000 train_time:305902ms step_avg:87.88ms +[2025-08-22 15:26:56] [Rank 0] step:3481/10000 train_time:305902ms step_avg:87.88ms +[2025-08-22 15:26:58] [Rank 0] step:3501/10000 train_time:307690ms step_avg:87.89ms +[2025-08-22 15:26:58] [Rank 0] step:3501/10000 train_time:307690ms step_avg:87.89ms +[2025-08-22 15:27:00] [Rank 0] step:3521/10000 train_time:309477ms step_avg:87.89ms +[2025-08-22 15:27:00] [Rank 0] step:3521/10000 train_time:309477ms step_avg:87.89ms +[2025-08-22 15:27:02] [Rank 0] step:3541/10000 train_time:311263ms step_avg:87.90ms +[2025-08-22 15:27:02] [Rank 0] step:3541/10000 train_time:311263ms step_avg:87.90ms +[2025-08-22 15:27:04] [Rank 0] step:3561/10000 train_time:313052ms step_avg:87.91ms +[2025-08-22 15:27:04] [Rank 0] step:3561/10000 train_time:313052ms step_avg:87.91ms +[2025-08-22 15:27:05] [Rank 0] step:3581/10000 train_time:314842ms step_avg:87.92ms +[2025-08-22 15:27:05] [Rank 0] step:3581/10000 train_time:314842ms step_avg:87.92ms +[2025-08-22 15:27:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:27:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:27:21] [Rank 0] PRINT: step:3600/10000 val_loss:4.0800 svd_entropy: attn_qk:H=0.8880,top10E=0.10,eRank=365.6,q75/q25=20.32 attn_vo:H=0.8614,top10E=0.11,eRank=312.4,q75/q25=58.74 mlp_w1:H=0.8379,top10E=0.19,eRank=269.3,q75/q25=9.85 mlp_w2:H=0.9201,top10E=0.10,eRank=452.0,q75/q25=6.03 vo_prod:H=0.7666,top10E=0.21,eRank=171.9,q75/q25=3158.87 train_time:316635ms step_avg:87.95ms +[2025-08-22 15:27:21] [Rank 0] PRINT: step:3600/10000 val_loss:4.0800 svd_entropy: attn_qk:H=0.8880,top10E=0.10,eRank=365.6,q75/q25=20.32 attn_vo:H=0.8614,top10E=0.11,eRank=312.4,q75/q25=58.74 mlp_w1:H=0.8379,top10E=0.19,eRank=269.3,q75/q25=9.85 mlp_w2:H=0.9201,top10E=0.10,eRank=452.0,q75/q25=6.03 vo_prod:H=0.7666,top10E=0.21,eRank=171.9,q75/q25=3158.87 train_time:316635ms step_avg:87.95ms +[2025-08-22 15:27:21] [Rank 0] step:3601/10000 train_time:316654ms step_avg:87.93ms +[2025-08-22 15:27:21] [Rank 0] step:3601/10000 train_time:316654ms step_avg:87.93ms +[2025-08-22 15:27:23] [Rank 0] step:3621/10000 train_time:318440ms step_avg:87.94ms +[2025-08-22 15:27:23] [Rank 0] step:3621/10000 train_time:318440ms step_avg:87.94ms +[2025-08-22 15:27:25] [Rank 0] step:3641/10000 train_time:320221ms step_avg:87.95ms +[2025-08-22 15:27:25] [Rank 0] step:3641/10000 train_time:320221ms step_avg:87.95ms +[2025-08-22 15:27:27] [Rank 0] step:3661/10000 train_time:322004ms step_avg:87.96ms +[2025-08-22 15:27:27] [Rank 0] step:3661/10000 train_time:322004ms step_avg:87.96ms +[2025-08-22 15:27:28] [Rank 0] step:3681/10000 train_time:323871ms step_avg:87.98ms +[2025-08-22 15:27:28] [Rank 0] step:3681/10000 train_time:323871ms step_avg:87.98ms +[2025-08-22 15:27:30] [Rank 0] step:3701/10000 train_time:325789ms step_avg:88.03ms +[2025-08-22 15:27:30] [Rank 0] step:3701/10000 train_time:325789ms step_avg:88.03ms +[2025-08-22 15:27:32] [Rank 0] step:3721/10000 train_time:327603ms step_avg:88.04ms +[2025-08-22 15:27:32] [Rank 0] step:3721/10000 train_time:327603ms step_avg:88.04ms +[2025-08-22 15:27:34] [Rank 0] step:3741/10000 train_time:329422ms step_avg:88.06ms +[2025-08-22 15:27:34] [Rank 0] step:3741/10000 train_time:329422ms step_avg:88.06ms +[2025-08-22 15:27:36] [Rank 0] step:3761/10000 train_time:331241ms step_avg:88.07ms +[2025-08-22 15:27:36] [Rank 0] step:3761/10000 train_time:331241ms step_avg:88.07ms +[2025-08-22 15:27:38] [Rank 0] step:3781/10000 train_time:333064ms step_avg:88.09ms +[2025-08-22 15:27:38] [Rank 0] step:3781/10000 train_time:333064ms step_avg:88.09ms +[2025-08-22 15:27:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:27:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:27:53] [Rank 0] PRINT: step:3800/10000 val_loss:4.0252 svd_entropy: attn_qk:H=0.8892,top10E=0.09,eRank=368.6,q75/q25=19.42 attn_vo:H=0.8640,top10E=0.10,eRank=317.7,q75/q25=54.26 mlp_w1:H=0.8414,top10E=0.18,eRank=275.4,q75/q25=9.71 mlp_w2:H=0.9220,top10E=0.10,eRank=457.8,q75/q25=5.84 vo_prod:H=0.7705,top10E=0.20,eRank=176.2,q75/q25=2599.50 train_time:334891ms step_avg:88.13ms +[2025-08-22 15:27:53] [Rank 0] PRINT: step:3800/10000 val_loss:4.0252 svd_entropy: attn_qk:H=0.8892,top10E=0.09,eRank=368.6,q75/q25=19.42 attn_vo:H=0.8640,top10E=0.10,eRank=317.7,q75/q25=54.26 mlp_w1:H=0.8414,top10E=0.18,eRank=275.4,q75/q25=9.71 mlp_w2:H=0.9220,top10E=0.10,eRank=457.8,q75/q25=5.84 vo_prod:H=0.7705,top10E=0.20,eRank=176.2,q75/q25=2599.50 train_time:334891ms step_avg:88.13ms +[2025-08-22 15:27:53] [Rank 0] step:3801/10000 train_time:334911ms step_avg:88.11ms +[2025-08-22 15:27:53] [Rank 0] step:3801/10000 train_time:334911ms step_avg:88.11ms +[2025-08-22 15:27:55] [Rank 0] step:3821/10000 train_time:336754ms step_avg:88.13ms +[2025-08-22 15:27:55] [Rank 0] step:3821/10000 train_time:336754ms step_avg:88.13ms +[2025-08-22 15:27:57] [Rank 0] step:3841/10000 train_time:338574ms step_avg:88.15ms +[2025-08-22 15:27:57] [Rank 0] step:3841/10000 train_time:338574ms step_avg:88.15ms +[2025-08-22 15:27:59] [Rank 0] step:3861/10000 train_time:340392ms step_avg:88.16ms +[2025-08-22 15:27:59] [Rank 0] step:3861/10000 train_time:340392ms step_avg:88.16ms +[2025-08-22 15:28:00] [Rank 0] step:3881/10000 train_time:342211ms step_avg:88.18ms +[2025-08-22 15:28:00] [Rank 0] step:3881/10000 train_time:342211ms step_avg:88.18ms +[2025-08-22 15:28:02] [Rank 0] step:3901/10000 train_time:344033ms step_avg:88.19ms +[2025-08-22 15:28:02] [Rank 0] step:3901/10000 train_time:344033ms step_avg:88.19ms +[2025-08-22 15:28:04] [Rank 0] step:3921/10000 train_time:345853ms step_avg:88.21ms +[2025-08-22 15:28:04] [Rank 0] step:3921/10000 train_time:345853ms step_avg:88.21ms +[2025-08-22 15:28:06] [Rank 0] step:3941/10000 train_time:347673ms step_avg:88.22ms +[2025-08-22 15:28:06] [Rank 0] step:3941/10000 train_time:347673ms step_avg:88.22ms +[2025-08-22 15:28:08] [Rank 0] step:3961/10000 train_time:349493ms step_avg:88.23ms +[2025-08-22 15:28:08] [Rank 0] step:3961/10000 train_time:349493ms step_avg:88.23ms +[2025-08-22 15:28:10] [Rank 0] step:3981/10000 train_time:351315ms step_avg:88.25ms +[2025-08-22 15:28:10] [Rank 0] step:3981/10000 train_time:351315ms step_avg:88.25ms +[2025-08-22 15:28:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:28:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:28:25] [Rank 0] PRINT: step:4000/10000 val_loss:3.9946 svd_entropy: attn_qk:H=0.8903,top10E=0.09,eRank=371.2,q75/q25=18.67 attn_vo:H=0.8664,top10E=0.10,eRank=322.4,q75/q25=50.56 mlp_w1:H=0.8446,top10E=0.18,eRank=281.1,q75/q25=9.59 mlp_w2:H=0.9237,top10E=0.09,eRank=463.2,q75/q25=5.68 vo_prod:H=0.7740,top10E=0.20,eRank=180.1,q75/q25=2219.67 train_time:353138ms step_avg:88.28ms +[2025-08-22 15:28:25] [Rank 0] PRINT: step:4000/10000 val_loss:3.9946 svd_entropy: attn_qk:H=0.8903,top10E=0.09,eRank=371.2,q75/q25=18.67 attn_vo:H=0.8664,top10E=0.10,eRank=322.4,q75/q25=50.56 mlp_w1:H=0.8446,top10E=0.18,eRank=281.1,q75/q25=9.59 mlp_w2:H=0.9237,top10E=0.09,eRank=463.2,q75/q25=5.68 vo_prod:H=0.7740,top10E=0.20,eRank=180.1,q75/q25=2219.67 train_time:353138ms step_avg:88.28ms +[2025-08-22 15:28:25] [Rank 0] step:4001/10000 train_time:353156ms step_avg:88.27ms +[2025-08-22 15:28:25] [Rank 0] step:4001/10000 train_time:353156ms step_avg:88.27ms +[2025-08-22 15:28:27] [Rank 0] step:4021/10000 train_time:354991ms step_avg:88.28ms +[2025-08-22 15:28:27] [Rank 0] step:4021/10000 train_time:354991ms step_avg:88.28ms +[2025-08-22 15:28:28] [Rank 0] step:4041/10000 train_time:356808ms step_avg:88.30ms +[2025-08-22 15:28:28] [Rank 0] step:4041/10000 train_time:356808ms step_avg:88.30ms +[2025-08-22 15:28:30] [Rank 0] step:4061/10000 train_time:358625ms step_avg:88.31ms +[2025-08-22 15:28:30] [Rank 0] step:4061/10000 train_time:358625ms step_avg:88.31ms +[2025-08-22 15:28:33] [Rank 0] step:4081/10000 train_time:360794ms step_avg:88.41ms +[2025-08-22 15:28:33] [Rank 0] step:4081/10000 train_time:360794ms step_avg:88.41ms +[2025-08-22 15:28:34] [Rank 0] step:4101/10000 train_time:362671ms step_avg:88.43ms +[2025-08-22 15:28:34] [Rank 0] step:4101/10000 train_time:362671ms step_avg:88.43ms +[2025-08-22 15:28:36] [Rank 0] step:4121/10000 train_time:364489ms step_avg:88.45ms +[2025-08-22 15:28:36] [Rank 0] step:4121/10000 train_time:364489ms step_avg:88.45ms +[2025-08-22 15:28:38] [Rank 0] step:4141/10000 train_time:366311ms step_avg:88.46ms +[2025-08-22 15:28:38] [Rank 0] step:4141/10000 train_time:366311ms step_avg:88.46ms +[2025-08-22 15:28:40] [Rank 0] step:4161/10000 train_time:368131ms step_avg:88.47ms +[2025-08-22 15:28:40] [Rank 0] step:4161/10000 train_time:368131ms step_avg:88.47ms +[2025-08-22 15:28:42] [Rank 0] step:4181/10000 train_time:369952ms step_avg:88.48ms +[2025-08-22 15:28:42] [Rank 0] step:4181/10000 train_time:369952ms step_avg:88.48ms +[2025-08-22 15:28:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:28:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:28:57] [Rank 0] PRINT: step:4200/10000 val_loss:3.9779 svd_entropy: attn_qk:H=0.8912,top10E=0.09,eRank=373.6,q75/q25=18.11 attn_vo:H=0.8685,top10E=0.10,eRank=326.9,q75/q25=47.02 mlp_w1:H=0.8476,top10E=0.18,eRank=286.4,q75/q25=9.44 mlp_w2:H=0.9252,top10E=0.09,eRank=467.8,q75/q25=5.55 vo_prod:H=0.7774,top10E=0.19,eRank=184.0,q75/q25=1895.62 train_time:371777ms step_avg:88.52ms +[2025-08-22 15:28:57] [Rank 0] PRINT: step:4200/10000 val_loss:3.9779 svd_entropy: attn_qk:H=0.8912,top10E=0.09,eRank=373.6,q75/q25=18.11 attn_vo:H=0.8685,top10E=0.10,eRank=326.9,q75/q25=47.02 mlp_w1:H=0.8476,top10E=0.18,eRank=286.4,q75/q25=9.44 mlp_w2:H=0.9252,top10E=0.09,eRank=467.8,q75/q25=5.55 vo_prod:H=0.7774,top10E=0.19,eRank=184.0,q75/q25=1895.62 train_time:371777ms step_avg:88.52ms +[2025-08-22 15:28:57] [Rank 0] step:4201/10000 train_time:371796ms step_avg:88.50ms +[2025-08-22 15:28:57] [Rank 0] step:4201/10000 train_time:371796ms step_avg:88.50ms +[2025-08-22 15:28:59] [Rank 0] step:4221/10000 train_time:373617ms step_avg:88.51ms +[2025-08-22 15:28:59] [Rank 0] step:4221/10000 train_time:373617ms step_avg:88.51ms +[2025-08-22 15:29:01] [Rank 0] step:4241/10000 train_time:375441ms step_avg:88.53ms +[2025-08-22 15:29:01] [Rank 0] step:4241/10000 train_time:375441ms step_avg:88.53ms +[2025-08-22 15:29:03] [Rank 0] step:4261/10000 train_time:377263ms step_avg:88.54ms +[2025-08-22 15:29:03] [Rank 0] step:4261/10000 train_time:377263ms step_avg:88.54ms +[2025-08-22 15:29:05] [Rank 0] step:4281/10000 train_time:379086ms step_avg:88.55ms +[2025-08-22 15:29:05] [Rank 0] step:4281/10000 train_time:379086ms step_avg:88.55ms +[2025-08-22 15:29:07] [Rank 0] step:4301/10000 train_time:380908ms step_avg:88.56ms +[2025-08-22 15:29:07] [Rank 0] step:4301/10000 train_time:380908ms step_avg:88.56ms +[2025-08-22 15:29:08] [Rank 0] step:4321/10000 train_time:382732ms step_avg:88.57ms +[2025-08-22 15:29:08] [Rank 0] step:4321/10000 train_time:382732ms step_avg:88.57ms +[2025-08-22 15:29:10] [Rank 0] step:4341/10000 train_time:384552ms step_avg:88.59ms +[2025-08-22 15:29:10] [Rank 0] step:4341/10000 train_time:384552ms step_avg:88.59ms +[2025-08-22 15:29:12] [Rank 0] step:4361/10000 train_time:386374ms step_avg:88.60ms +[2025-08-22 15:29:12] [Rank 0] step:4361/10000 train_time:386374ms step_avg:88.60ms +[2025-08-22 15:29:14] [Rank 0] step:4381/10000 train_time:388198ms step_avg:88.61ms +[2025-08-22 15:29:14] [Rank 0] step:4381/10000 train_time:388198ms step_avg:88.61ms +[2025-08-22 15:29:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:29:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:29:30] [Rank 0] PRINT: step:4400/10000 val_loss:3.9509 svd_entropy: attn_qk:H=0.8921,top10E=0.09,eRank=375.8,q75/q25=17.67 attn_vo:H=0.8705,top10E=0.10,eRank=330.9,q75/q25=44.27 mlp_w1:H=0.8502,top10E=0.17,eRank=291.4,q75/q25=9.33 mlp_w2:H=0.9266,top10E=0.09,eRank=472.0,q75/q25=5.44 vo_prod:H=0.7806,top10E=0.19,eRank=187.7,q75/q25=1658.93 train_time:390028ms step_avg:88.64ms +[2025-08-22 15:29:30] [Rank 0] PRINT: step:4400/10000 val_loss:3.9509 svd_entropy: attn_qk:H=0.8921,top10E=0.09,eRank=375.8,q75/q25=17.67 attn_vo:H=0.8705,top10E=0.10,eRank=330.9,q75/q25=44.27 mlp_w1:H=0.8502,top10E=0.17,eRank=291.4,q75/q25=9.33 mlp_w2:H=0.9266,top10E=0.09,eRank=472.0,q75/q25=5.44 vo_prod:H=0.7806,top10E=0.19,eRank=187.7,q75/q25=1658.93 train_time:390028ms step_avg:88.64ms +[2025-08-22 15:29:30] [Rank 0] step:4401/10000 train_time:390046ms step_avg:88.63ms +[2025-08-22 15:29:30] [Rank 0] step:4401/10000 train_time:390046ms step_avg:88.63ms +[2025-08-22 15:29:31] [Rank 0] step:4421/10000 train_time:391878ms step_avg:88.64ms +[2025-08-22 15:29:31] [Rank 0] step:4421/10000 train_time:391878ms step_avg:88.64ms +[2025-08-22 15:29:33] [Rank 0] step:4441/10000 train_time:393695ms step_avg:88.65ms +[2025-08-22 15:29:33] [Rank 0] step:4441/10000 train_time:393695ms step_avg:88.65ms +[2025-08-22 15:29:35] [Rank 0] step:4461/10000 train_time:395610ms step_avg:88.68ms +[2025-08-22 15:29:35] [Rank 0] step:4461/10000 train_time:395610ms step_avg:88.68ms +[2025-08-22 15:29:37] [Rank 0] step:4481/10000 train_time:397485ms step_avg:88.70ms +[2025-08-22 15:29:37] [Rank 0] step:4481/10000 train_time:397485ms step_avg:88.70ms +[2025-08-22 15:29:39] [Rank 0] step:4501/10000 train_time:399308ms step_avg:88.72ms +[2025-08-22 15:29:39] [Rank 0] step:4501/10000 train_time:399308ms step_avg:88.72ms +[2025-08-22 15:29:41] [Rank 0] step:4521/10000 train_time:401134ms step_avg:88.73ms +[2025-08-22 15:29:41] [Rank 0] step:4521/10000 train_time:401134ms step_avg:88.73ms +[2025-08-22 15:29:43] [Rank 0] step:4541/10000 train_time:402961ms step_avg:88.74ms +[2025-08-22 15:29:43] [Rank 0] step:4541/10000 train_time:402961ms step_avg:88.74ms +[2025-08-22 15:29:44] [Rank 0] step:4561/10000 train_time:404789ms step_avg:88.75ms +[2025-08-22 15:29:44] [Rank 0] step:4561/10000 train_time:404789ms step_avg:88.75ms +[2025-08-22 15:29:46] [Rank 0] step:4581/10000 train_time:406618ms step_avg:88.76ms +[2025-08-22 15:29:46] [Rank 0] step:4581/10000 train_time:406618ms step_avg:88.76ms +[2025-08-22 15:29:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:29:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:30:02] [Rank 0] PRINT: step:4600/10000 val_loss:3.9264 svd_entropy: attn_qk:H=0.8930,top10E=0.09,eRank=377.9,q75/q25=17.21 attn_vo:H=0.8723,top10E=0.10,eRank=334.8,q75/q25=41.81 mlp_w1:H=0.8528,top10E=0.17,eRank=296.3,q75/q25=9.17 mlp_w2:H=0.9278,top10E=0.09,eRank=475.9,q75/q25=5.34 vo_prod:H=0.7837,top10E=0.19,eRank=191.2,q75/q25=1426.16 train_time:408451ms step_avg:88.79ms +[2025-08-22 15:30:02] [Rank 0] PRINT: step:4600/10000 val_loss:3.9264 svd_entropy: attn_qk:H=0.8930,top10E=0.09,eRank=377.9,q75/q25=17.21 attn_vo:H=0.8723,top10E=0.10,eRank=334.8,q75/q25=41.81 mlp_w1:H=0.8528,top10E=0.17,eRank=296.3,q75/q25=9.17 mlp_w2:H=0.9278,top10E=0.09,eRank=475.9,q75/q25=5.34 vo_prod:H=0.7837,top10E=0.19,eRank=191.2,q75/q25=1426.16 train_time:408451ms step_avg:88.79ms +[2025-08-22 15:30:02] [Rank 0] step:4601/10000 train_time:408470ms step_avg:88.78ms +[2025-08-22 15:30:02] [Rank 0] step:4601/10000 train_time:408470ms step_avg:88.78ms +[2025-08-22 15:30:04] [Rank 0] step:4621/10000 train_time:410298ms step_avg:88.79ms +[2025-08-22 15:30:04] [Rank 0] step:4621/10000 train_time:410298ms step_avg:88.79ms +[2025-08-22 15:30:06] [Rank 0] step:4641/10000 train_time:412125ms step_avg:88.80ms +[2025-08-22 15:30:06] [Rank 0] step:4641/10000 train_time:412125ms step_avg:88.80ms +[2025-08-22 15:30:07] [Rank 0] step:4661/10000 train_time:413947ms step_avg:88.81ms +[2025-08-22 15:30:07] [Rank 0] step:4661/10000 train_time:413947ms step_avg:88.81ms +[2025-08-22 15:30:09] [Rank 0] step:4681/10000 train_time:415771ms step_avg:88.82ms +[2025-08-22 15:30:09] [Rank 0] step:4681/10000 train_time:415771ms step_avg:88.82ms +[2025-08-22 15:30:11] [Rank 0] step:4701/10000 train_time:417599ms step_avg:88.83ms +[2025-08-22 15:30:11] [Rank 0] step:4701/10000 train_time:417599ms step_avg:88.83ms +[2025-08-22 15:30:13] [Rank 0] step:4721/10000 train_time:419425ms step_avg:88.84ms +[2025-08-22 15:30:13] [Rank 0] step:4721/10000 train_time:419425ms step_avg:88.84ms +[2025-08-22 15:30:15] [Rank 0] step:4741/10000 train_time:421252ms step_avg:88.85ms +[2025-08-22 15:30:15] [Rank 0] step:4741/10000 train_time:421252ms step_avg:88.85ms +[2025-08-22 15:30:17] [Rank 0] step:4761/10000 train_time:423080ms step_avg:88.86ms +[2025-08-22 15:30:17] [Rank 0] step:4761/10000 train_time:423080ms step_avg:88.86ms +[2025-08-22 15:30:18] [Rank 0] step:4781/10000 train_time:424907ms step_avg:88.87ms +[2025-08-22 15:30:18] [Rank 0] step:4781/10000 train_time:424907ms step_avg:88.87ms +[2025-08-22 15:30:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:30:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:30:34] [Rank 0] PRINT: step:4800/10000 val_loss:3.9113 svd_entropy: attn_qk:H=0.8937,top10E=0.09,eRank=379.8,q75/q25=16.83 attn_vo:H=0.8740,top10E=0.10,eRank=338.4,q75/q25=39.76 mlp_w1:H=0.8551,top10E=0.17,eRank=300.8,q75/q25=9.08 mlp_w2:H=0.9289,top10E=0.09,eRank=479.3,q75/q25=5.25 vo_prod:H=0.7864,top10E=0.18,eRank=194.5,q75/q25=1273.54 train_time:426740ms step_avg:88.90ms +[2025-08-22 15:30:34] [Rank 0] PRINT: step:4800/10000 val_loss:3.9113 svd_entropy: attn_qk:H=0.8937,top10E=0.09,eRank=379.8,q75/q25=16.83 attn_vo:H=0.8740,top10E=0.10,eRank=338.4,q75/q25=39.76 mlp_w1:H=0.8551,top10E=0.17,eRank=300.8,q75/q25=9.08 mlp_w2:H=0.9289,top10E=0.09,eRank=479.3,q75/q25=5.25 vo_prod:H=0.7864,top10E=0.18,eRank=194.5,q75/q25=1273.54 train_time:426740ms step_avg:88.90ms +[2025-08-22 15:30:34] [Rank 0] step:4801/10000 train_time:426759ms step_avg:88.89ms +[2025-08-22 15:30:34] [Rank 0] step:4801/10000 train_time:426759ms step_avg:88.89ms +[2025-08-22 15:30:36] [Rank 0] step:4821/10000 train_time:428600ms step_avg:88.90ms +[2025-08-22 15:30:36] [Rank 0] step:4821/10000 train_time:428600ms step_avg:88.90ms +[2025-08-22 15:30:38] [Rank 0] step:4841/10000 train_time:430510ms step_avg:88.93ms +[2025-08-22 15:30:38] [Rank 0] step:4841/10000 train_time:430510ms step_avg:88.93ms +[2025-08-22 15:30:40] [Rank 0] step:4861/10000 train_time:432411ms step_avg:88.96ms +[2025-08-22 15:30:40] [Rank 0] step:4861/10000 train_time:432411ms step_avg:88.96ms +[2025-08-22 15:30:42] [Rank 0] step:4881/10000 train_time:434233ms step_avg:88.96ms +[2025-08-22 15:30:42] [Rank 0] step:4881/10000 train_time:434233ms step_avg:88.96ms +[2025-08-22 15:30:43] [Rank 0] step:4901/10000 train_time:436056ms step_avg:88.97ms +[2025-08-22 15:30:43] [Rank 0] step:4901/10000 train_time:436056ms step_avg:88.97ms +[2025-08-22 15:30:45] [Rank 0] step:4921/10000 train_time:437884ms step_avg:88.98ms +[2025-08-22 15:30:45] [Rank 0] step:4921/10000 train_time:437884ms step_avg:88.98ms +[2025-08-22 15:30:47] [Rank 0] step:4941/10000 train_time:439714ms step_avg:88.99ms +[2025-08-22 15:30:47] [Rank 0] step:4941/10000 train_time:439714ms step_avg:88.99ms +[2025-08-22 15:30:49] [Rank 0] step:4961/10000 train_time:441541ms step_avg:89.00ms +[2025-08-22 15:30:49] [Rank 0] step:4961/10000 train_time:441541ms step_avg:89.00ms +[2025-08-22 15:30:51] [Rank 0] step:4981/10000 train_time:443373ms step_avg:89.01ms +[2025-08-22 15:30:51] [Rank 0] step:4981/10000 train_time:443373ms step_avg:89.01ms +[2025-08-22 15:30:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:30:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:31:07] [Rank 0] PRINT: step:5000/10000 val_loss:3.8940 svd_entropy: attn_qk:H=0.8944,top10E=0.09,eRank=381.5,q75/q25=16.48 attn_vo:H=0.8756,top10E=0.09,eRank=341.6,q75/q25=38.07 mlp_w1:H=0.8573,top10E=0.17,eRank=305.0,q75/q25=8.95 mlp_w2:H=0.9298,top10E=0.09,eRank=482.3,q75/q25=5.16 vo_prod:H=0.7890,top10E=0.18,eRank=197.6,q75/q25=1146.40 train_time:445208ms step_avg:89.04ms +[2025-08-22 15:31:07] [Rank 0] PRINT: step:5000/10000 val_loss:3.8940 svd_entropy: attn_qk:H=0.8944,top10E=0.09,eRank=381.5,q75/q25=16.48 attn_vo:H=0.8756,top10E=0.09,eRank=341.6,q75/q25=38.07 mlp_w1:H=0.8573,top10E=0.17,eRank=305.0,q75/q25=8.95 mlp_w2:H=0.9298,top10E=0.09,eRank=482.3,q75/q25=5.16 vo_prod:H=0.7890,top10E=0.18,eRank=197.6,q75/q25=1146.40 train_time:445208ms step_avg:89.04ms +[2025-08-22 15:31:07] [Rank 0] step:5001/10000 train_time:445227ms step_avg:89.03ms +[2025-08-22 15:31:07] [Rank 0] step:5001/10000 train_time:445227ms step_avg:89.03ms +[2025-08-22 15:31:08] [Rank 0] step:5021/10000 train_time:447042ms step_avg:89.03ms +[2025-08-22 15:31:08] [Rank 0] step:5021/10000 train_time:447042ms step_avg:89.03ms +[2025-08-22 15:31:10] [Rank 0] step:5041/10000 train_time:448871ms step_avg:89.04ms +[2025-08-22 15:31:10] [Rank 0] step:5041/10000 train_time:448871ms step_avg:89.04ms +[2025-08-22 15:31:12] [Rank 0] step:5061/10000 train_time:450698ms step_avg:89.05ms +[2025-08-22 15:31:12] [Rank 0] step:5061/10000 train_time:450698ms step_avg:89.05ms +[2025-08-22 15:31:14] [Rank 0] step:5081/10000 train_time:452528ms step_avg:89.06ms +[2025-08-22 15:31:14] [Rank 0] step:5081/10000 train_time:452528ms step_avg:89.06ms +[2025-08-22 15:31:16] [Rank 0] step:5101/10000 train_time:454359ms step_avg:89.07ms +[2025-08-22 15:31:16] [Rank 0] step:5101/10000 train_time:454359ms step_avg:89.07ms +[2025-08-22 15:31:18] [Rank 0] step:5121/10000 train_time:456190ms step_avg:89.08ms +[2025-08-22 15:31:18] [Rank 0] step:5121/10000 train_time:456190ms step_avg:89.08ms +[2025-08-22 15:31:19] [Rank 0] step:5141/10000 train_time:458022ms step_avg:89.09ms +[2025-08-22 15:31:19] [Rank 0] step:5141/10000 train_time:458022ms step_avg:89.09ms +[2025-08-22 15:31:21] [Rank 0] step:5161/10000 train_time:459853ms step_avg:89.10ms +[2025-08-22 15:31:21] [Rank 0] step:5161/10000 train_time:459853ms step_avg:89.10ms +[2025-08-22 15:31:23] [Rank 0] step:5181/10000 train_time:461688ms step_avg:89.11ms +[2025-08-22 15:31:23] [Rank 0] step:5181/10000 train_time:461688ms step_avg:89.11ms +[2025-08-22 15:31:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:31:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:31:39] [Rank 0] PRINT: step:5200/10000 val_loss:3.8718 svd_entropy: attn_qk:H=0.8950,top10E=0.09,eRank=383.0,q75/q25=16.24 attn_vo:H=0.8770,top10E=0.09,eRank=344.7,q75/q25=36.59 mlp_w1:H=0.8593,top10E=0.16,eRank=309.1,q75/q25=8.85 mlp_w2:H=0.9306,top10E=0.09,eRank=485.0,q75/q25=5.10 vo_prod:H=0.7914,top10E=0.18,eRank=200.5,q75/q25=1048.20 train_time:463548ms step_avg:89.14ms +[2025-08-22 15:31:39] [Rank 0] PRINT: step:5200/10000 val_loss:3.8718 svd_entropy: attn_qk:H=0.8950,top10E=0.09,eRank=383.0,q75/q25=16.24 attn_vo:H=0.8770,top10E=0.09,eRank=344.7,q75/q25=36.59 mlp_w1:H=0.8593,top10E=0.16,eRank=309.1,q75/q25=8.85 mlp_w2:H=0.9306,top10E=0.09,eRank=485.0,q75/q25=5.10 vo_prod:H=0.7914,top10E=0.18,eRank=200.5,q75/q25=1048.20 train_time:463548ms step_avg:89.14ms +[2025-08-22 15:31:39] [Rank 0] step:5201/10000 train_time:463567ms step_avg:89.13ms +[2025-08-22 15:31:39] [Rank 0] step:5201/10000 train_time:463567ms step_avg:89.13ms +[2025-08-22 15:31:41] [Rank 0] step:5221/10000 train_time:465518ms step_avg:89.16ms +[2025-08-22 15:31:41] [Rank 0] step:5221/10000 train_time:465518ms step_avg:89.16ms +[2025-08-22 15:31:43] [Rank 0] step:5241/10000 train_time:467452ms step_avg:89.19ms +[2025-08-22 15:31:43] [Rank 0] step:5241/10000 train_time:467452ms step_avg:89.19ms +[2025-08-22 15:31:45] [Rank 0] step:5261/10000 train_time:469307ms step_avg:89.20ms +[2025-08-22 15:31:45] [Rank 0] step:5261/10000 train_time:469307ms step_avg:89.20ms +[2025-08-22 15:31:47] [Rank 0] step:5281/10000 train_time:471164ms step_avg:89.22ms +[2025-08-22 15:31:47] [Rank 0] step:5281/10000 train_time:471164ms step_avg:89.22ms +[2025-08-22 15:31:48] [Rank 0] step:5301/10000 train_time:473031ms step_avg:89.23ms +[2025-08-22 15:31:48] [Rank 0] step:5301/10000 train_time:473031ms step_avg:89.23ms +[2025-08-22 15:31:50] [Rank 0] step:5321/10000 train_time:474891ms step_avg:89.25ms +[2025-08-22 15:31:50] [Rank 0] step:5321/10000 train_time:474891ms step_avg:89.25ms +[2025-08-22 15:31:52] [Rank 0] step:5341/10000 train_time:476752ms step_avg:89.26ms +[2025-08-22 15:31:52] [Rank 0] step:5341/10000 train_time:476752ms step_avg:89.26ms +[2025-08-22 15:31:54] [Rank 0] step:5361/10000 train_time:478613ms step_avg:89.28ms +[2025-08-22 15:31:54] [Rank 0] step:5361/10000 train_time:478613ms step_avg:89.28ms +[2025-08-22 15:31:56] [Rank 0] step:5381/10000 train_time:480473ms step_avg:89.29ms +[2025-08-22 15:31:56] [Rank 0] step:5381/10000 train_time:480473ms step_avg:89.29ms +[2025-08-22 15:31:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:31:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:32:11] [Rank 0] PRINT: step:5400/10000 val_loss:3.8591 svd_entropy: attn_qk:H=0.8955,top10E=0.09,eRank=384.3,q75/q25=15.93 attn_vo:H=0.8782,top10E=0.09,eRank=347.4,q75/q25=35.27 mlp_w1:H=0.8612,top10E=0.16,eRank=312.9,q75/q25=8.76 mlp_w2:H=0.9314,top10E=0.09,eRank=487.4,q75/q25=5.02 vo_prod:H=0.7935,top10E=0.18,eRank=203.0,q75/q25=945.02 train_time:482334ms step_avg:89.32ms +[2025-08-22 15:32:11] [Rank 0] PRINT: step:5400/10000 val_loss:3.8591 svd_entropy: attn_qk:H=0.8955,top10E=0.09,eRank=384.3,q75/q25=15.93 attn_vo:H=0.8782,top10E=0.09,eRank=347.4,q75/q25=35.27 mlp_w1:H=0.8612,top10E=0.16,eRank=312.9,q75/q25=8.76 mlp_w2:H=0.9314,top10E=0.09,eRank=487.4,q75/q25=5.02 vo_prod:H=0.7935,top10E=0.18,eRank=203.0,q75/q25=945.02 train_time:482334ms step_avg:89.32ms +[2025-08-22 15:32:11] [Rank 0] step:5401/10000 train_time:482353ms step_avg:89.31ms +[2025-08-22 15:32:11] [Rank 0] step:5401/10000 train_time:482353ms step_avg:89.31ms +[2025-08-22 15:32:13] [Rank 0] step:5421/10000 train_time:484227ms step_avg:89.32ms +[2025-08-22 15:32:13] [Rank 0] step:5421/10000 train_time:484227ms step_avg:89.32ms +[2025-08-22 15:32:15] [Rank 0] step:5441/10000 train_time:486079ms step_avg:89.34ms +[2025-08-22 15:32:15] [Rank 0] step:5441/10000 train_time:486079ms step_avg:89.34ms +[2025-08-22 15:32:17] [Rank 0] step:5461/10000 train_time:487938ms step_avg:89.35ms +[2025-08-22 15:32:17] [Rank 0] step:5461/10000 train_time:487938ms step_avg:89.35ms +[2025-08-22 15:32:19] [Rank 0] step:5481/10000 train_time:489794ms step_avg:89.36ms +[2025-08-22 15:32:19] [Rank 0] step:5481/10000 train_time:489794ms step_avg:89.36ms +[2025-08-22 15:32:21] [Rank 0] step:5501/10000 train_time:491656ms step_avg:89.38ms +[2025-08-22 15:32:21] [Rank 0] step:5501/10000 train_time:491656ms step_avg:89.38ms +[2025-08-22 15:32:23] [Rank 0] step:5521/10000 train_time:493521ms step_avg:89.39ms +[2025-08-22 15:32:23] [Rank 0] step:5521/10000 train_time:493521ms step_avg:89.39ms +[2025-08-22 15:32:24] [Rank 0] step:5541/10000 train_time:495382ms step_avg:89.40ms +[2025-08-22 15:32:24] [Rank 0] step:5541/10000 train_time:495382ms step_avg:89.40ms +[2025-08-22 15:32:26] [Rank 0] step:5561/10000 train_time:497242ms step_avg:89.42ms +[2025-08-22 15:32:26] [Rank 0] step:5561/10000 train_time:497242ms step_avg:89.42ms +[2025-08-22 15:32:28] [Rank 0] step:5581/10000 train_time:499104ms step_avg:89.43ms +[2025-08-22 15:32:28] [Rank 0] step:5581/10000 train_time:499104ms step_avg:89.43ms +[2025-08-22 15:32:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:32:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:32:44] [Rank 0] PRINT: step:5600/10000 val_loss:3.8516 svd_entropy: attn_qk:H=0.8960,top10E=0.09,eRank=385.6,q75/q25=15.73 attn_vo:H=0.8794,top10E=0.09,eRank=350.0,q75/q25=34.10 mlp_w1:H=0.8628,top10E=0.16,eRank=316.4,q75/q25=8.68 mlp_w2:H=0.9320,top10E=0.09,eRank=489.5,q75/q25=4.98 vo_prod:H=0.7956,top10E=0.17,eRank=205.6,q75/q25=870.22 train_time:500971ms step_avg:89.46ms +[2025-08-22 15:32:44] [Rank 0] PRINT: step:5600/10000 val_loss:3.8516 svd_entropy: attn_qk:H=0.8960,top10E=0.09,eRank=385.6,q75/q25=15.73 attn_vo:H=0.8794,top10E=0.09,eRank=350.0,q75/q25=34.10 mlp_w1:H=0.8628,top10E=0.16,eRank=316.4,q75/q25=8.68 mlp_w2:H=0.9320,top10E=0.09,eRank=489.5,q75/q25=4.98 vo_prod:H=0.7956,top10E=0.17,eRank=205.6,q75/q25=870.22 train_time:500971ms step_avg:89.46ms +[2025-08-22 15:32:44] [Rank 0] step:5601/10000 train_time:500989ms step_avg:89.45ms +[2025-08-22 15:32:44] [Rank 0] step:5601/10000 train_time:500989ms step_avg:89.45ms +[2025-08-22 15:32:46] [Rank 0] step:5621/10000 train_time:502857ms step_avg:89.46ms +[2025-08-22 15:32:46] [Rank 0] step:5621/10000 train_time:502857ms step_avg:89.46ms +[2025-08-22 15:32:48] [Rank 0] step:5641/10000 train_time:504808ms step_avg:89.49ms +[2025-08-22 15:32:48] [Rank 0] step:5641/10000 train_time:504808ms step_avg:89.49ms +[2025-08-22 15:32:49] [Rank 0] step:5661/10000 train_time:506667ms step_avg:89.50ms +[2025-08-22 15:32:49] [Rank 0] step:5661/10000 train_time:506667ms step_avg:89.50ms +[2025-08-22 15:32:51] [Rank 0] step:5681/10000 train_time:508533ms step_avg:89.51ms +[2025-08-22 15:32:51] [Rank 0] step:5681/10000 train_time:508533ms step_avg:89.51ms +[2025-08-22 15:32:53] [Rank 0] step:5701/10000 train_time:510396ms step_avg:89.53ms +[2025-08-22 15:32:53] [Rank 0] step:5701/10000 train_time:510396ms step_avg:89.53ms +[2025-08-22 15:32:55] [Rank 0] step:5721/10000 train_time:512263ms step_avg:89.54ms +[2025-08-22 15:32:55] [Rank 0] step:5721/10000 train_time:512263ms step_avg:89.54ms +[2025-08-22 15:32:57] [Rank 0] step:5741/10000 train_time:514124ms step_avg:89.55ms +[2025-08-22 15:32:57] [Rank 0] step:5741/10000 train_time:514124ms step_avg:89.55ms +[2025-08-22 15:32:59] [Rank 0] step:5761/10000 train_time:515989ms step_avg:89.57ms +[2025-08-22 15:32:59] [Rank 0] step:5761/10000 train_time:515989ms step_avg:89.57ms +[2025-08-22 15:33:01] [Rank 0] step:5781/10000 train_time:517853ms step_avg:89.58ms +[2025-08-22 15:33:01] [Rank 0] step:5781/10000 train_time:517853ms step_avg:89.58ms +[2025-08-22 15:33:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:33:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:33:16] [Rank 0] PRINT: step:5800/10000 val_loss:3.8322 svd_entropy: attn_qk:H=0.8964,top10E=0.09,eRank=386.7,q75/q25=15.51 attn_vo:H=0.8805,top10E=0.09,eRank=352.4,q75/q25=33.02 mlp_w1:H=0.8645,top10E=0.16,eRank=319.8,q75/q25=8.58 mlp_w2:H=0.9326,top10E=0.09,eRank=491.5,q75/q25=4.94 vo_prod:H=0.7976,top10E=0.17,eRank=208.1,q75/q25=819.85 train_time:519722ms step_avg:89.61ms +[2025-08-22 15:33:16] [Rank 0] PRINT: step:5800/10000 val_loss:3.8322 svd_entropy: attn_qk:H=0.8964,top10E=0.09,eRank=386.7,q75/q25=15.51 attn_vo:H=0.8805,top10E=0.09,eRank=352.4,q75/q25=33.02 mlp_w1:H=0.8645,top10E=0.16,eRank=319.8,q75/q25=8.58 mlp_w2:H=0.9326,top10E=0.09,eRank=491.5,q75/q25=4.94 vo_prod:H=0.7976,top10E=0.17,eRank=208.1,q75/q25=819.85 train_time:519722ms step_avg:89.61ms +[2025-08-22 15:33:16] [Rank 0] step:5801/10000 train_time:519741ms step_avg:89.60ms +[2025-08-22 15:33:16] [Rank 0] step:5801/10000 train_time:519741ms step_avg:89.60ms +[2025-08-22 15:33:18] [Rank 0] step:5821/10000 train_time:521603ms step_avg:89.61ms +[2025-08-22 15:33:18] [Rank 0] step:5821/10000 train_time:521603ms step_avg:89.61ms +[2025-08-22 15:33:20] [Rank 0] step:5841/10000 train_time:523458ms step_avg:89.62ms +[2025-08-22 15:33:20] [Rank 0] step:5841/10000 train_time:523458ms step_avg:89.62ms +[2025-08-22 15:33:22] [Rank 0] step:5861/10000 train_time:525321ms step_avg:89.63ms +[2025-08-22 15:33:22] [Rank 0] step:5861/10000 train_time:525321ms step_avg:89.63ms +[2025-08-22 15:33:24] [Rank 0] step:5881/10000 train_time:527178ms step_avg:89.64ms +[2025-08-22 15:33:24] [Rank 0] step:5881/10000 train_time:527178ms step_avg:89.64ms +[2025-08-22 15:33:26] [Rank 0] step:5901/10000 train_time:529037ms step_avg:89.65ms +[2025-08-22 15:33:26] [Rank 0] step:5901/10000 train_time:529037ms step_avg:89.65ms +[2025-08-22 15:33:27] [Rank 0] step:5921/10000 train_time:530896ms step_avg:89.66ms +[2025-08-22 15:33:27] [Rank 0] step:5921/10000 train_time:530896ms step_avg:89.66ms +[2025-08-22 15:33:29] [Rank 0] step:5941/10000 train_time:532760ms step_avg:89.68ms +[2025-08-22 15:33:29] [Rank 0] step:5941/10000 train_time:532760ms step_avg:89.68ms +[2025-08-22 15:33:31] [Rank 0] step:5961/10000 train_time:534620ms step_avg:89.69ms +[2025-08-22 15:33:31] [Rank 0] step:5961/10000 train_time:534620ms step_avg:89.69ms +[2025-08-22 15:33:33] [Rank 0] step:5981/10000 train_time:536479ms step_avg:89.70ms +[2025-08-22 15:33:33] [Rank 0] step:5981/10000 train_time:536479ms step_avg:89.70ms +[2025-08-22 15:33:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:33:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:33:48] [Rank 0] PRINT: step:6000/10000 val_loss:3.8091 svd_entropy: attn_qk:H=0.8968,top10E=0.09,eRank=387.7,q75/q25=15.34 attn_vo:H=0.8816,top10E=0.09,eRank=354.8,q75/q25=31.99 mlp_w1:H=0.8661,top10E=0.16,eRank=323.2,q75/q25=8.49 mlp_w2:H=0.9332,top10E=0.09,eRank=493.3,q75/q25=4.89 vo_prod:H=0.7994,top10E=0.17,eRank=210.3,q75/q25=745.66 train_time:538340ms step_avg:89.72ms +[2025-08-22 15:33:48] [Rank 0] PRINT: step:6000/10000 val_loss:3.8091 svd_entropy: attn_qk:H=0.8968,top10E=0.09,eRank=387.7,q75/q25=15.34 attn_vo:H=0.8816,top10E=0.09,eRank=354.8,q75/q25=31.99 mlp_w1:H=0.8661,top10E=0.16,eRank=323.2,q75/q25=8.49 mlp_w2:H=0.9332,top10E=0.09,eRank=493.3,q75/q25=4.89 vo_prod:H=0.7994,top10E=0.17,eRank=210.3,q75/q25=745.66 train_time:538340ms step_avg:89.72ms +[2025-08-22 15:33:49] [Rank 0] step:6001/10000 train_time:538359ms step_avg:89.71ms +[2025-08-22 15:33:49] [Rank 0] step:6001/10000 train_time:538359ms step_avg:89.71ms +[2025-08-22 15:33:51] [Rank 0] step:6021/10000 train_time:540305ms step_avg:89.74ms +[2025-08-22 15:33:51] [Rank 0] step:6021/10000 train_time:540305ms step_avg:89.74ms +[2025-08-22 15:33:52] [Rank 0] step:6041/10000 train_time:542167ms step_avg:89.75ms +[2025-08-22 15:33:52] [Rank 0] step:6041/10000 train_time:542167ms step_avg:89.75ms +[2025-08-22 15:33:54] [Rank 0] step:6061/10000 train_time:544032ms step_avg:89.76ms +[2025-08-22 15:33:54] [Rank 0] step:6061/10000 train_time:544032ms step_avg:89.76ms +[2025-08-22 15:33:56] [Rank 0] step:6081/10000 train_time:545891ms step_avg:89.77ms +[2025-08-22 15:33:56] [Rank 0] step:6081/10000 train_time:545891ms step_avg:89.77ms +[2025-08-22 15:33:58] [Rank 0] step:6101/10000 train_time:547757ms step_avg:89.78ms +[2025-08-22 15:33:58] [Rank 0] step:6101/10000 train_time:547757ms step_avg:89.78ms +[2025-08-22 15:34:00] [Rank 0] step:6121/10000 train_time:549895ms step_avg:89.84ms +[2025-08-22 15:34:00] [Rank 0] step:6121/10000 train_time:549895ms step_avg:89.84ms +[2025-08-22 15:34:02] [Rank 0] step:6141/10000 train_time:551770ms step_avg:89.85ms +[2025-08-22 15:34:02] [Rank 0] step:6141/10000 train_time:551770ms step_avg:89.85ms +[2025-08-22 15:34:04] [Rank 0] step:6161/10000 train_time:553633ms step_avg:89.86ms +[2025-08-22 15:34:04] [Rank 0] step:6161/10000 train_time:553633ms step_avg:89.86ms +[2025-08-22 15:34:06] [Rank 0] step:6181/10000 train_time:555498ms step_avg:89.87ms +[2025-08-22 15:34:06] [Rank 0] step:6181/10000 train_time:555498ms step_avg:89.87ms +[2025-08-22 15:34:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:34:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:34:21] [Rank 0] PRINT: step:6200/10000 val_loss:3.7946 svd_entropy: attn_qk:H=0.8971,top10E=0.09,eRank=388.6,q75/q25=15.16 attn_vo:H=0.8826,top10E=0.09,eRank=356.9,q75/q25=31.11 mlp_w1:H=0.8676,top10E=0.16,eRank=326.4,q75/q25=8.40 mlp_w2:H=0.9336,top10E=0.09,eRank=494.9,q75/q25=4.85 vo_prod:H=0.8011,top10E=0.17,eRank=212.4,q75/q25=712.73 train_time:557363ms step_avg:89.90ms +[2025-08-22 15:34:21] [Rank 0] PRINT: step:6200/10000 val_loss:3.7946 svd_entropy: attn_qk:H=0.8971,top10E=0.09,eRank=388.6,q75/q25=15.16 attn_vo:H=0.8826,top10E=0.09,eRank=356.9,q75/q25=31.11 mlp_w1:H=0.8676,top10E=0.16,eRank=326.4,q75/q25=8.40 mlp_w2:H=0.9336,top10E=0.09,eRank=494.9,q75/q25=4.85 vo_prod:H=0.8011,top10E=0.17,eRank=212.4,q75/q25=712.73 train_time:557363ms step_avg:89.90ms +[2025-08-22 15:34:21] [Rank 0] step:6201/10000 train_time:557382ms step_avg:89.89ms +[2025-08-22 15:34:21] [Rank 0] step:6201/10000 train_time:557382ms step_avg:89.89ms +[2025-08-22 15:34:23] [Rank 0] step:6221/10000 train_time:559237ms step_avg:89.89ms +[2025-08-22 15:34:23] [Rank 0] step:6221/10000 train_time:559237ms step_avg:89.89ms +[2025-08-22 15:34:25] [Rank 0] step:6241/10000 train_time:561092ms step_avg:89.90ms +[2025-08-22 15:34:25] [Rank 0] step:6241/10000 train_time:561092ms step_avg:89.90ms +[2025-08-22 15:34:27] [Rank 0] step:6261/10000 train_time:562953ms step_avg:89.91ms +[2025-08-22 15:34:27] [Rank 0] step:6261/10000 train_time:562953ms step_avg:89.91ms +[2025-08-22 15:34:29] [Rank 0] step:6281/10000 train_time:564819ms step_avg:89.93ms +[2025-08-22 15:34:29] [Rank 0] step:6281/10000 train_time:564819ms step_avg:89.93ms +[2025-08-22 15:34:31] [Rank 0] step:6301/10000 train_time:566684ms step_avg:89.94ms +[2025-08-22 15:34:31] [Rank 0] step:6301/10000 train_time:566684ms step_avg:89.94ms +[2025-08-22 15:34:32] [Rank 0] step:6321/10000 train_time:568549ms step_avg:89.95ms +[2025-08-22 15:34:32] [Rank 0] step:6321/10000 train_time:568549ms step_avg:89.95ms +[2025-08-22 15:34:34] [Rank 0] step:6341/10000 train_time:570414ms step_avg:89.96ms +[2025-08-22 15:34:34] [Rank 0] step:6341/10000 train_time:570414ms step_avg:89.96ms +[2025-08-22 15:34:36] [Rank 0] step:6361/10000 train_time:572287ms step_avg:89.97ms +[2025-08-22 15:34:36] [Rank 0] step:6361/10000 train_time:572287ms step_avg:89.97ms +[2025-08-22 15:34:38] [Rank 0] step:6381/10000 train_time:574157ms step_avg:89.98ms +[2025-08-22 15:34:38] [Rank 0] step:6381/10000 train_time:574157ms step_avg:89.98ms +[2025-08-22 15:34:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:34:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:34:54] [Rank 0] PRINT: step:6400/10000 val_loss:3.7786 svd_entropy: attn_qk:H=0.8974,top10E=0.09,eRank=389.3,q75/q25=15.07 attn_vo:H=0.8835,top10E=0.09,eRank=358.9,q75/q25=30.19 mlp_w1:H=0.8690,top10E=0.15,eRank=329.4,q75/q25=8.33 mlp_w2:H=0.9341,top10E=0.09,eRank=496.3,q75/q25=4.81 vo_prod:H=0.8027,top10E=0.17,eRank=214.4,q75/q25=655.49 train_time:576025ms step_avg:90.00ms +[2025-08-22 15:34:54] [Rank 0] PRINT: step:6400/10000 val_loss:3.7786 svd_entropy: attn_qk:H=0.8974,top10E=0.09,eRank=389.3,q75/q25=15.07 attn_vo:H=0.8835,top10E=0.09,eRank=358.9,q75/q25=30.19 mlp_w1:H=0.8690,top10E=0.15,eRank=329.4,q75/q25=8.33 mlp_w2:H=0.9341,top10E=0.09,eRank=496.3,q75/q25=4.81 vo_prod:H=0.8027,top10E=0.17,eRank=214.4,q75/q25=655.49 train_time:576025ms step_avg:90.00ms +[2025-08-22 15:34:54] [Rank 0] step:6401/10000 train_time:576044ms step_avg:89.99ms +[2025-08-22 15:34:54] [Rank 0] step:6401/10000 train_time:576044ms step_avg:89.99ms +[2025-08-22 15:34:56] [Rank 0] step:6421/10000 train_time:577914ms step_avg:90.00ms +[2025-08-22 15:34:56] [Rank 0] step:6421/10000 train_time:577914ms step_avg:90.00ms +[2025-08-22 15:34:58] [Rank 0] step:6441/10000 train_time:579780ms step_avg:90.01ms +[2025-08-22 15:34:58] [Rank 0] step:6441/10000 train_time:579780ms step_avg:90.01ms +[2025-08-22 15:35:00] [Rank 0] step:6461/10000 train_time:581650ms step_avg:90.02ms +[2025-08-22 15:35:00] [Rank 0] step:6461/10000 train_time:581650ms step_avg:90.02ms +[2025-08-22 15:35:01] [Rank 0] step:6481/10000 train_time:583526ms step_avg:90.04ms +[2025-08-22 15:35:01] [Rank 0] step:6481/10000 train_time:583526ms step_avg:90.04ms +[2025-08-22 15:35:03] [Rank 0] step:6501/10000 train_time:585392ms step_avg:90.05ms +[2025-08-22 15:35:03] [Rank 0] step:6501/10000 train_time:585392ms step_avg:90.05ms +[2025-08-22 15:35:05] [Rank 0] step:6521/10000 train_time:587259ms step_avg:90.06ms +[2025-08-22 15:35:05] [Rank 0] step:6521/10000 train_time:587259ms step_avg:90.06ms +[2025-08-22 15:35:07] [Rank 0] step:6541/10000 train_time:589131ms step_avg:90.07ms +[2025-08-22 15:35:07] [Rank 0] step:6541/10000 train_time:589131ms step_avg:90.07ms +[2025-08-22 15:35:09] [Rank 0] step:6561/10000 train_time:591001ms step_avg:90.08ms +[2025-08-22 15:35:09] [Rank 0] step:6561/10000 train_time:591001ms step_avg:90.08ms +[2025-08-22 15:35:11] [Rank 0] step:6581/10000 train_time:592867ms step_avg:90.09ms +[2025-08-22 15:35:11] [Rank 0] step:6581/10000 train_time:592867ms step_avg:90.09ms +[2025-08-22 15:35:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:35:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:35:26] [Rank 0] PRINT: step:6600/10000 val_loss:3.7625 svd_entropy: attn_qk:H=0.8977,top10E=0.09,eRank=390.0,q75/q25=14.93 attn_vo:H=0.8843,top10E=0.09,eRank=360.7,q75/q25=29.61 mlp_w1:H=0.8701,top10E=0.15,eRank=332.0,q75/q25=8.25 mlp_w2:H=0.9344,top10E=0.09,eRank=497.5,q75/q25=4.79 vo_prod:H=0.8041,top10E=0.17,eRank=216.2,q75/q25=619.26 train_time:594739ms step_avg:90.11ms +[2025-08-22 15:35:26] [Rank 0] PRINT: step:6600/10000 val_loss:3.7625 svd_entropy: attn_qk:H=0.8977,top10E=0.09,eRank=390.0,q75/q25=14.93 attn_vo:H=0.8843,top10E=0.09,eRank=360.7,q75/q25=29.61 mlp_w1:H=0.8701,top10E=0.15,eRank=332.0,q75/q25=8.25 mlp_w2:H=0.9344,top10E=0.09,eRank=497.5,q75/q25=4.79 vo_prod:H=0.8041,top10E=0.17,eRank=216.2,q75/q25=619.26 train_time:594739ms step_avg:90.11ms +[2025-08-22 15:35:27] [Rank 0] step:6601/10000 train_time:594758ms step_avg:90.10ms +[2025-08-22 15:35:27] [Rank 0] step:6601/10000 train_time:594758ms step_avg:90.10ms +[2025-08-22 15:35:28] [Rank 0] step:6621/10000 train_time:596625ms step_avg:90.11ms +[2025-08-22 15:35:28] [Rank 0] step:6621/10000 train_time:596625ms step_avg:90.11ms +[2025-08-22 15:35:30] [Rank 0] step:6641/10000 train_time:598497ms step_avg:90.12ms +[2025-08-22 15:35:30] [Rank 0] step:6641/10000 train_time:598497ms step_avg:90.12ms +[2025-08-22 15:35:32] [Rank 0] step:6661/10000 train_time:600358ms step_avg:90.13ms +[2025-08-22 15:35:32] [Rank 0] step:6661/10000 train_time:600358ms step_avg:90.13ms +[2025-08-22 15:35:34] [Rank 0] step:6681/10000 train_time:602240ms step_avg:90.14ms +[2025-08-22 15:35:34] [Rank 0] step:6681/10000 train_time:602240ms step_avg:90.14ms +[2025-08-22 15:35:36] [Rank 0] step:6701/10000 train_time:604143ms step_avg:90.16ms +[2025-08-22 15:35:36] [Rank 0] step:6701/10000 train_time:604143ms step_avg:90.16ms +[2025-08-22 15:35:38] [Rank 0] step:6721/10000 train_time:606037ms step_avg:90.17ms +[2025-08-22 15:35:38] [Rank 0] step:6721/10000 train_time:606037ms step_avg:90.17ms +[2025-08-22 15:35:40] [Rank 0] step:6741/10000 train_time:607929ms step_avg:90.18ms +[2025-08-22 15:35:40] [Rank 0] step:6741/10000 train_time:607929ms step_avg:90.18ms +[2025-08-22 15:35:42] [Rank 0] step:6761/10000 train_time:609822ms step_avg:90.20ms +[2025-08-22 15:35:42] [Rank 0] step:6761/10000 train_time:609822ms step_avg:90.20ms +[2025-08-22 15:35:44] [Rank 0] step:6781/10000 train_time:611719ms step_avg:90.21ms +[2025-08-22 15:35:44] [Rank 0] step:6781/10000 train_time:611719ms step_avg:90.21ms +[2025-08-22 15:35:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:35:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:35:59] [Rank 0] PRINT: step:6800/10000 val_loss:3.7466 svd_entropy: attn_qk:H=0.8978,top10E=0.09,eRank=390.5,q75/q25=14.84 attn_vo:H=0.8849,top10E=0.09,eRank=362.2,q75/q25=29.00 mlp_w1:H=0.8712,top10E=0.15,eRank=334.4,q75/q25=8.19 mlp_w2:H=0.9347,top10E=0.09,eRank=498.5,q75/q25=4.75 vo_prod:H=0.8054,top10E=0.16,eRank=217.8,q75/q25=587.39 train_time:613621ms step_avg:90.24ms +[2025-08-22 15:35:59] [Rank 0] PRINT: step:6800/10000 val_loss:3.7466 svd_entropy: attn_qk:H=0.8978,top10E=0.09,eRank=390.5,q75/q25=14.84 attn_vo:H=0.8849,top10E=0.09,eRank=362.2,q75/q25=29.00 mlp_w1:H=0.8712,top10E=0.15,eRank=334.4,q75/q25=8.19 mlp_w2:H=0.9347,top10E=0.09,eRank=498.5,q75/q25=4.75 vo_prod:H=0.8054,top10E=0.16,eRank=217.8,q75/q25=587.39 train_time:613621ms step_avg:90.24ms +[2025-08-22 15:35:59] [Rank 0] step:6801/10000 train_time:613640ms step_avg:90.23ms +[2025-08-22 15:35:59] [Rank 0] step:6801/10000 train_time:613640ms step_avg:90.23ms +[2025-08-22 15:36:01] [Rank 0] step:6821/10000 train_time:615537ms step_avg:90.24ms +[2025-08-22 15:36:01] [Rank 0] step:6821/10000 train_time:615537ms step_avg:90.24ms +[2025-08-22 15:36:03] [Rank 0] step:6841/10000 train_time:617427ms step_avg:90.25ms +[2025-08-22 15:36:03] [Rank 0] step:6841/10000 train_time:617427ms step_avg:90.25ms +[2025-08-22 15:36:05] [Rank 0] step:6861/10000 train_time:619313ms step_avg:90.27ms +[2025-08-22 15:36:05] [Rank 0] step:6861/10000 train_time:619313ms step_avg:90.27ms +[2025-08-22 15:36:07] [Rank 0] step:6881/10000 train_time:621206ms step_avg:90.28ms +[2025-08-22 15:36:07] [Rank 0] step:6881/10000 train_time:621206ms step_avg:90.28ms +[2025-08-22 15:36:09] [Rank 0] step:6901/10000 train_time:623094ms step_avg:90.29ms +[2025-08-22 15:36:09] [Rank 0] step:6901/10000 train_time:623094ms step_avg:90.29ms +[2025-08-22 15:36:11] [Rank 0] step:6921/10000 train_time:624981ms step_avg:90.30ms +[2025-08-22 15:36:11] [Rank 0] step:6921/10000 train_time:624981ms step_avg:90.30ms +[2025-08-22 15:36:13] [Rank 0] step:6941/10000 train_time:626879ms step_avg:90.32ms +[2025-08-22 15:36:13] [Rank 0] step:6941/10000 train_time:626879ms step_avg:90.32ms +[2025-08-22 15:36:15] [Rank 0] step:6961/10000 train_time:628786ms step_avg:90.33ms +[2025-08-22 15:36:15] [Rank 0] step:6961/10000 train_time:628786ms step_avg:90.33ms +[2025-08-22 15:36:16] [Rank 0] step:6981/10000 train_time:630682ms step_avg:90.34ms +[2025-08-22 15:36:16] [Rank 0] step:6981/10000 train_time:630682ms step_avg:90.34ms +[2025-08-22 15:36:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:36:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:36:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.7273 svd_entropy: attn_qk:H=0.8980,top10E=0.09,eRank=390.8,q75/q25=14.76 attn_vo:H=0.8856,top10E=0.09,eRank=363.6,q75/q25=28.57 mlp_w1:H=0.8722,top10E=0.15,eRank=336.5,q75/q25=8.13 mlp_w2:H=0.9350,top10E=0.09,eRank=499.3,q75/q25=4.73 vo_prod:H=0.8065,top10E=0.16,eRank=219.3,q75/q25=558.79 train_time:632583ms step_avg:90.37ms +[2025-08-22 15:36:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.7273 svd_entropy: attn_qk:H=0.8980,top10E=0.09,eRank=390.8,q75/q25=14.76 attn_vo:H=0.8856,top10E=0.09,eRank=363.6,q75/q25=28.57 mlp_w1:H=0.8722,top10E=0.15,eRank=336.5,q75/q25=8.13 mlp_w2:H=0.9350,top10E=0.09,eRank=499.3,q75/q25=4.73 vo_prod:H=0.8065,top10E=0.16,eRank=219.3,q75/q25=558.79 train_time:632583ms step_avg:90.37ms +[2025-08-22 15:36:32] [Rank 0] step:7001/10000 train_time:632602ms step_avg:90.36ms +[2025-08-22 15:36:32] [Rank 0] step:7001/10000 train_time:632602ms step_avg:90.36ms +[2025-08-22 15:36:34] [Rank 0] step:7021/10000 train_time:634492ms step_avg:90.37ms +[2025-08-22 15:36:34] [Rank 0] step:7021/10000 train_time:634492ms step_avg:90.37ms +[2025-08-22 15:36:36] [Rank 0] step:7041/10000 train_time:636380ms step_avg:90.38ms +[2025-08-22 15:36:36] [Rank 0] step:7041/10000 train_time:636380ms step_avg:90.38ms +[2025-08-22 15:36:38] [Rank 0] step:7061/10000 train_time:638272ms step_avg:90.39ms +[2025-08-22 15:36:38] [Rank 0] step:7061/10000 train_time:638272ms step_avg:90.39ms +[2025-08-22 15:36:40] [Rank 0] step:7081/10000 train_time:640159ms step_avg:90.41ms +[2025-08-22 15:36:40] [Rank 0] step:7081/10000 train_time:640159ms step_avg:90.41ms +[2025-08-22 15:36:42] [Rank 0] step:7101/10000 train_time:642057ms step_avg:90.42ms +[2025-08-22 15:36:42] [Rank 0] step:7101/10000 train_time:642057ms step_avg:90.42ms +[2025-08-22 15:36:44] [Rank 0] step:7121/10000 train_time:643948ms step_avg:90.43ms +[2025-08-22 15:36:44] [Rank 0] step:7121/10000 train_time:643948ms step_avg:90.43ms +[2025-08-22 15:36:46] [Rank 0] step:7141/10000 train_time:645839ms step_avg:90.44ms +[2025-08-22 15:36:46] [Rank 0] step:7141/10000 train_time:645839ms step_avg:90.44ms +[2025-08-22 15:36:48] [Rank 0] step:7161/10000 train_time:647732ms step_avg:90.45ms +[2025-08-22 15:36:48] [Rank 0] step:7161/10000 train_time:647732ms step_avg:90.45ms +[2025-08-22 15:36:49] [Rank 0] step:7181/10000 train_time:649628ms step_avg:90.46ms +[2025-08-22 15:36:49] [Rank 0] step:7181/10000 train_time:649628ms step_avg:90.46ms +[2025-08-22 15:36:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:36:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:37:05] [Rank 0] PRINT: step:7200/10000 val_loss:3.7154 svd_entropy: attn_qk:H=0.8981,top10E=0.09,eRank=391.1,q75/q25=14.71 attn_vo:H=0.8861,top10E=0.09,eRank=364.9,q75/q25=28.11 mlp_w1:H=0.8731,top10E=0.15,eRank=338.5,q75/q25=8.07 mlp_w2:H=0.9352,top10E=0.09,eRank=500.2,q75/q25=4.71 vo_prod:H=0.8076,top10E=0.16,eRank=220.7,q75/q25=540.10 train_time:651532ms step_avg:90.49ms +[2025-08-22 15:37:05] [Rank 0] PRINT: step:7200/10000 val_loss:3.7154 svd_entropy: attn_qk:H=0.8981,top10E=0.09,eRank=391.1,q75/q25=14.71 attn_vo:H=0.8861,top10E=0.09,eRank=364.9,q75/q25=28.11 mlp_w1:H=0.8731,top10E=0.15,eRank=338.5,q75/q25=8.07 mlp_w2:H=0.9352,top10E=0.09,eRank=500.2,q75/q25=4.71 vo_prod:H=0.8076,top10E=0.16,eRank=220.7,q75/q25=540.10 train_time:651532ms step_avg:90.49ms +[2025-08-22 15:37:05] [Rank 0] step:7201/10000 train_time:651551ms step_avg:90.48ms +[2025-08-22 15:37:05] [Rank 0] step:7201/10000 train_time:651551ms step_avg:90.48ms +[2025-08-22 15:37:07] [Rank 0] step:7221/10000 train_time:653442ms step_avg:90.49ms +[2025-08-22 15:37:07] [Rank 0] step:7221/10000 train_time:653442ms step_avg:90.49ms +[2025-08-22 15:37:09] [Rank 0] step:7241/10000 train_time:655336ms step_avg:90.50ms +[2025-08-22 15:37:09] [Rank 0] step:7241/10000 train_time:655336ms step_avg:90.50ms +[2025-08-22 15:37:11] [Rank 0] step:7261/10000 train_time:657226ms step_avg:90.51ms +[2025-08-22 15:37:11] [Rank 0] step:7261/10000 train_time:657226ms step_avg:90.51ms +[2025-08-22 15:37:13] [Rank 0] step:7281/10000 train_time:659130ms step_avg:90.53ms +[2025-08-22 15:37:13] [Rank 0] step:7281/10000 train_time:659130ms step_avg:90.53ms +[2025-08-22 15:37:15] [Rank 0] step:7301/10000 train_time:661025ms step_avg:90.54ms +[2025-08-22 15:37:15] [Rank 0] step:7301/10000 train_time:661025ms step_avg:90.54ms +[2025-08-22 15:37:17] [Rank 0] step:7321/10000 train_time:662936ms step_avg:90.55ms +[2025-08-22 15:37:17] [Rank 0] step:7321/10000 train_time:662936ms step_avg:90.55ms +[2025-08-22 15:37:19] [Rank 0] step:7341/10000 train_time:664834ms step_avg:90.56ms +[2025-08-22 15:37:19] [Rank 0] step:7341/10000 train_time:664834ms step_avg:90.56ms +[2025-08-22 15:37:21] [Rank 0] step:7361/10000 train_time:666741ms step_avg:90.58ms +[2025-08-22 15:37:21] [Rank 0] step:7361/10000 train_time:666741ms step_avg:90.58ms +[2025-08-22 15:37:22] [Rank 0] step:7381/10000 train_time:668645ms step_avg:90.59ms +[2025-08-22 15:37:22] [Rank 0] step:7381/10000 train_time:668645ms step_avg:90.59ms +[2025-08-22 15:37:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:37:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:37:38] [Rank 0] PRINT: step:7400/10000 val_loss:3.6955 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.4,q75/q25=14.68 attn_vo:H=0.8866,top10E=0.09,eRank=366.1,q75/q25=27.77 mlp_w1:H=0.8739,top10E=0.15,eRank=340.3,q75/q25=8.03 mlp_w2:H=0.9355,top10E=0.09,eRank=501.0,q75/q25=4.69 vo_prod:H=0.8085,top10E=0.16,eRank=221.9,q75/q25=527.09 train_time:670530ms step_avg:90.61ms +[2025-08-22 15:37:38] [Rank 0] PRINT: step:7400/10000 val_loss:3.6955 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.4,q75/q25=14.68 attn_vo:H=0.8866,top10E=0.09,eRank=366.1,q75/q25=27.77 mlp_w1:H=0.8739,top10E=0.15,eRank=340.3,q75/q25=8.03 mlp_w2:H=0.9355,top10E=0.09,eRank=501.0,q75/q25=4.69 vo_prod:H=0.8085,top10E=0.16,eRank=221.9,q75/q25=527.09 train_time:670530ms step_avg:90.61ms +[2025-08-22 15:37:38] [Rank 0] step:7401/10000 train_time:670550ms step_avg:90.60ms +[2025-08-22 15:37:38] [Rank 0] step:7401/10000 train_time:670550ms step_avg:90.60ms +[2025-08-22 15:37:40] [Rank 0] step:7421/10000 train_time:672451ms step_avg:90.61ms +[2025-08-22 15:37:40] [Rank 0] step:7421/10000 train_time:672451ms step_avg:90.61ms +[2025-08-22 15:37:42] [Rank 0] step:7441/10000 train_time:674339ms step_avg:90.62ms +[2025-08-22 15:37:42] [Rank 0] step:7441/10000 train_time:674339ms step_avg:90.62ms +[2025-08-22 15:37:44] [Rank 0] step:7461/10000 train_time:676233ms step_avg:90.64ms +[2025-08-22 15:37:44] [Rank 0] step:7461/10000 train_time:676233ms step_avg:90.64ms +[2025-08-22 15:37:46] [Rank 0] step:7481/10000 train_time:678133ms step_avg:90.65ms +[2025-08-22 15:37:46] [Rank 0] step:7481/10000 train_time:678133ms step_avg:90.65ms +[2025-08-22 15:37:48] [Rank 0] step:7501/10000 train_time:680031ms step_avg:90.66ms +[2025-08-22 15:37:48] [Rank 0] step:7501/10000 train_time:680031ms step_avg:90.66ms +[2025-08-22 15:37:50] [Rank 0] step:7521/10000 train_time:681932ms step_avg:90.67ms +[2025-08-22 15:37:50] [Rank 0] step:7521/10000 train_time:681932ms step_avg:90.67ms +[2025-08-22 15:37:52] [Rank 0] step:7541/10000 train_time:683839ms step_avg:90.68ms +[2025-08-22 15:37:52] [Rank 0] step:7541/10000 train_time:683839ms step_avg:90.68ms +[2025-08-22 15:37:53] [Rank 0] step:7561/10000 train_time:685729ms step_avg:90.69ms +[2025-08-22 15:37:53] [Rank 0] step:7561/10000 train_time:685729ms step_avg:90.69ms +[2025-08-22 15:37:55] [Rank 0] step:7581/10000 train_time:687634ms step_avg:90.70ms +[2025-08-22 15:37:55] [Rank 0] step:7581/10000 train_time:687634ms step_avg:90.70ms +[2025-08-22 15:37:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:37:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:38:11] [Rank 0] PRINT: step:7600/10000 val_loss:3.6851 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.6,q75/q25=14.64 attn_vo:H=0.8871,top10E=0.08,eRank=367.1,q75/q25=27.45 mlp_w1:H=0.8746,top10E=0.15,eRank=341.9,q75/q25=7.98 mlp_w2:H=0.9357,top10E=0.09,eRank=501.7,q75/q25=4.67 vo_prod:H=0.8094,top10E=0.16,eRank=223.0,q75/q25=511.90 train_time:689545ms step_avg:90.73ms +[2025-08-22 15:38:11] [Rank 0] PRINT: step:7600/10000 val_loss:3.6851 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.6,q75/q25=14.64 attn_vo:H=0.8871,top10E=0.08,eRank=367.1,q75/q25=27.45 mlp_w1:H=0.8746,top10E=0.15,eRank=341.9,q75/q25=7.98 mlp_w2:H=0.9357,top10E=0.09,eRank=501.7,q75/q25=4.67 vo_prod:H=0.8094,top10E=0.16,eRank=223.0,q75/q25=511.90 train_time:689545ms step_avg:90.73ms +[2025-08-22 15:38:11] [Rank 0] step:7601/10000 train_time:689564ms step_avg:90.72ms +[2025-08-22 15:38:11] [Rank 0] step:7601/10000 train_time:689564ms step_avg:90.72ms +[2025-08-22 15:38:13] [Rank 0] step:7621/10000 train_time:691470ms step_avg:90.73ms +[2025-08-22 15:38:13] [Rank 0] step:7621/10000 train_time:691470ms step_avg:90.73ms +[2025-08-22 15:38:15] [Rank 0] step:7641/10000 train_time:693362ms step_avg:90.74ms +[2025-08-22 15:38:15] [Rank 0] step:7641/10000 train_time:693362ms step_avg:90.74ms +[2025-08-22 15:38:17] [Rank 0] step:7661/10000 train_time:695262ms step_avg:90.75ms +[2025-08-22 15:38:17] [Rank 0] step:7661/10000 train_time:695262ms step_avg:90.75ms +[2025-08-22 15:38:19] [Rank 0] step:7681/10000 train_time:697155ms step_avg:90.76ms +[2025-08-22 15:38:19] [Rank 0] step:7681/10000 train_time:697155ms step_avg:90.76ms +[2025-08-22 15:38:21] [Rank 0] step:7701/10000 train_time:699050ms step_avg:90.77ms +[2025-08-22 15:38:21] [Rank 0] step:7701/10000 train_time:699050ms step_avg:90.77ms +[2025-08-22 15:38:23] [Rank 0] step:7721/10000 train_time:700960ms step_avg:90.79ms +[2025-08-22 15:38:23] [Rank 0] step:7721/10000 train_time:700960ms step_avg:90.79ms +[2025-08-22 15:38:25] [Rank 0] step:7741/10000 train_time:702858ms step_avg:90.80ms +[2025-08-22 15:38:25] [Rank 0] step:7741/10000 train_time:702858ms step_avg:90.80ms +[2025-08-22 15:38:27] [Rank 0] step:7761/10000 train_time:704765ms step_avg:90.81ms +[2025-08-22 15:38:27] [Rank 0] step:7761/10000 train_time:704765ms step_avg:90.81ms +[2025-08-22 15:38:28] [Rank 0] step:7781/10000 train_time:706665ms step_avg:90.82ms +[2025-08-22 15:38:28] [Rank 0] step:7781/10000 train_time:706665ms step_avg:90.82ms +[2025-08-22 15:38:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:38:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:38:44] [Rank 0] PRINT: step:7800/10000 val_loss:3.6721 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=391.8,q75/q25=14.57 attn_vo:H=0.8875,top10E=0.08,eRank=368.1,q75/q25=27.02 mlp_w1:H=0.8753,top10E=0.15,eRank=343.5,q75/q25=7.95 mlp_w2:H=0.9359,top10E=0.09,eRank=502.3,q75/q25=4.66 vo_prod:H=0.8102,top10E=0.16,eRank=224.1,q75/q25=493.83 train_time:708576ms step_avg:90.84ms +[2025-08-22 15:38:44] [Rank 0] PRINT: step:7800/10000 val_loss:3.6721 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=391.8,q75/q25=14.57 attn_vo:H=0.8875,top10E=0.08,eRank=368.1,q75/q25=27.02 mlp_w1:H=0.8753,top10E=0.15,eRank=343.5,q75/q25=7.95 mlp_w2:H=0.9359,top10E=0.09,eRank=502.3,q75/q25=4.66 vo_prod:H=0.8102,top10E=0.16,eRank=224.1,q75/q25=493.83 train_time:708576ms step_avg:90.84ms +[2025-08-22 15:38:44] [Rank 0] step:7801/10000 train_time:708596ms step_avg:90.83ms +[2025-08-22 15:38:44] [Rank 0] step:7801/10000 train_time:708596ms step_avg:90.83ms +[2025-08-22 15:38:46] [Rank 0] step:7821/10000 train_time:710504ms step_avg:90.85ms +[2025-08-22 15:38:46] [Rank 0] step:7821/10000 train_time:710504ms step_avg:90.85ms +[2025-08-22 15:38:48] [Rank 0] step:7841/10000 train_time:712394ms step_avg:90.86ms +[2025-08-22 15:38:48] [Rank 0] step:7841/10000 train_time:712394ms step_avg:90.86ms +[2025-08-22 15:38:50] [Rank 0] step:7861/10000 train_time:714293ms step_avg:90.87ms +[2025-08-22 15:38:50] [Rank 0] step:7861/10000 train_time:714293ms step_avg:90.87ms +[2025-08-22 15:38:52] [Rank 0] step:7881/10000 train_time:716198ms step_avg:90.88ms +[2025-08-22 15:38:52] [Rank 0] step:7881/10000 train_time:716198ms step_avg:90.88ms +[2025-08-22 15:38:54] [Rank 0] step:7901/10000 train_time:718092ms step_avg:90.89ms +[2025-08-22 15:38:54] [Rank 0] step:7901/10000 train_time:718092ms step_avg:90.89ms +[2025-08-22 15:38:56] [Rank 0] step:7921/10000 train_time:719992ms step_avg:90.90ms +[2025-08-22 15:38:56] [Rank 0] step:7921/10000 train_time:719992ms step_avg:90.90ms +[2025-08-22 15:38:58] [Rank 0] step:7941/10000 train_time:721899ms step_avg:90.91ms +[2025-08-22 15:38:58] [Rank 0] step:7941/10000 train_time:721899ms step_avg:90.91ms +[2025-08-22 15:39:00] [Rank 0] step:7961/10000 train_time:723803ms step_avg:90.92ms +[2025-08-22 15:39:00] [Rank 0] step:7961/10000 train_time:723803ms step_avg:90.92ms +[2025-08-22 15:39:01] [Rank 0] step:7981/10000 train_time:725695ms step_avg:90.93ms +[2025-08-22 15:39:01] [Rank 0] step:7981/10000 train_time:725695ms step_avg:90.93ms +[2025-08-22 15:39:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:39:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:39:17] [Rank 0] PRINT: step:8000/10000 val_loss:3.6528 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=391.9,q75/q25=14.55 attn_vo:H=0.8879,top10E=0.08,eRank=368.9,q75/q25=26.77 mlp_w1:H=0.8759,top10E=0.15,eRank=344.9,q75/q25=7.91 mlp_w2:H=0.9360,top10E=0.09,eRank=502.9,q75/q25=4.64 vo_prod:H=0.8110,top10E=0.16,eRank=225.1,q75/q25=476.80 train_time:727662ms step_avg:90.96ms +[2025-08-22 15:39:17] [Rank 0] PRINT: step:8000/10000 val_loss:3.6528 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=391.9,q75/q25=14.55 attn_vo:H=0.8879,top10E=0.08,eRank=368.9,q75/q25=26.77 mlp_w1:H=0.8759,top10E=0.15,eRank=344.9,q75/q25=7.91 mlp_w2:H=0.9360,top10E=0.09,eRank=502.9,q75/q25=4.64 vo_prod:H=0.8110,top10E=0.16,eRank=225.1,q75/q25=476.80 train_time:727662ms step_avg:90.96ms +[2025-08-22 15:39:17] [Rank 0] step:8001/10000 train_time:727682ms step_avg:90.95ms +[2025-08-22 15:39:17] [Rank 0] step:8001/10000 train_time:727682ms step_avg:90.95ms +[2025-08-22 15:39:19] [Rank 0] step:8021/10000 train_time:729570ms step_avg:90.96ms +[2025-08-22 15:39:19] [Rank 0] step:8021/10000 train_time:729570ms step_avg:90.96ms +[2025-08-22 15:39:21] [Rank 0] step:8041/10000 train_time:731476ms step_avg:90.97ms +[2025-08-22 15:39:21] [Rank 0] step:8041/10000 train_time:731476ms step_avg:90.97ms +[2025-08-22 15:39:23] [Rank 0] step:8061/10000 train_time:733376ms step_avg:90.98ms +[2025-08-22 15:39:23] [Rank 0] step:8061/10000 train_time:733376ms step_avg:90.98ms +[2025-08-22 15:39:25] [Rank 0] step:8081/10000 train_time:735266ms step_avg:90.99ms +[2025-08-22 15:39:25] [Rank 0] step:8081/10000 train_time:735266ms step_avg:90.99ms +[2025-08-22 15:39:27] [Rank 0] step:8101/10000 train_time:737175ms step_avg:91.00ms +[2025-08-22 15:39:27] [Rank 0] step:8101/10000 train_time:737175ms step_avg:91.00ms +[2025-08-22 15:39:29] [Rank 0] step:8121/10000 train_time:739076ms step_avg:91.01ms +[2025-08-22 15:39:29] [Rank 0] step:8121/10000 train_time:739076ms step_avg:91.01ms +[2025-08-22 15:39:31] [Rank 0] step:8141/10000 train_time:741635ms step_avg:91.10ms +[2025-08-22 15:39:31] [Rank 0] step:8141/10000 train_time:741635ms step_avg:91.10ms +[2025-08-22 15:39:33] [Rank 0] step:8161/10000 train_time:743555ms step_avg:91.11ms +[2025-08-22 15:39:33] [Rank 0] step:8161/10000 train_time:743555ms step_avg:91.11ms +[2025-08-22 15:39:35] [Rank 0] step:8181/10000 train_time:745486ms step_avg:91.12ms +[2025-08-22 15:39:35] [Rank 0] step:8181/10000 train_time:745486ms step_avg:91.12ms +[2025-08-22 15:39:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:39:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:39:51] [Rank 0] PRINT: step:8200/10000 val_loss:3.6414 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.55 attn_vo:H=0.8883,top10E=0.08,eRank=369.7,q75/q25=26.47 mlp_w1:H=0.8765,top10E=0.15,eRank=346.2,q75/q25=7.88 mlp_w2:H=0.9362,top10E=0.09,eRank=503.4,q75/q25=4.63 vo_prod:H=0.8117,top10E=0.16,eRank=226.0,q75/q25=464.80 train_time:747441ms step_avg:91.15ms +[2025-08-22 15:39:51] [Rank 0] PRINT: step:8200/10000 val_loss:3.6414 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.55 attn_vo:H=0.8883,top10E=0.08,eRank=369.7,q75/q25=26.47 mlp_w1:H=0.8765,top10E=0.15,eRank=346.2,q75/q25=7.88 mlp_w2:H=0.9362,top10E=0.09,eRank=503.4,q75/q25=4.63 vo_prod:H=0.8117,top10E=0.16,eRank=226.0,q75/q25=464.80 train_time:747441ms step_avg:91.15ms +[2025-08-22 15:39:51] [Rank 0] step:8201/10000 train_time:747461ms step_avg:91.14ms +[2025-08-22 15:39:51] [Rank 0] step:8201/10000 train_time:747461ms step_avg:91.14ms +[2025-08-22 15:39:53] [Rank 0] step:8221/10000 train_time:749403ms step_avg:91.16ms +[2025-08-22 15:39:53] [Rank 0] step:8221/10000 train_time:749403ms step_avg:91.16ms +[2025-08-22 15:39:55] [Rank 0] step:8241/10000 train_time:751333ms step_avg:91.17ms +[2025-08-22 15:39:55] [Rank 0] step:8241/10000 train_time:751333ms step_avg:91.17ms +[2025-08-22 15:39:57] [Rank 0] step:8261/10000 train_time:753265ms step_avg:91.18ms +[2025-08-22 15:39:57] [Rank 0] step:8261/10000 train_time:753265ms step_avg:91.18ms +[2025-08-22 15:39:59] [Rank 0] step:8281/10000 train_time:755186ms step_avg:91.20ms +[2025-08-22 15:39:59] [Rank 0] step:8281/10000 train_time:755186ms step_avg:91.20ms +[2025-08-22 15:40:01] [Rank 0] step:8301/10000 train_time:757111ms step_avg:91.21ms +[2025-08-22 15:40:01] [Rank 0] step:8301/10000 train_time:757111ms step_avg:91.21ms +[2025-08-22 15:40:03] [Rank 0] step:8321/10000 train_time:759030ms step_avg:91.22ms +[2025-08-22 15:40:03] [Rank 0] step:8321/10000 train_time:759030ms step_avg:91.22ms +[2025-08-22 15:40:05] [Rank 0] step:8341/10000 train_time:760960ms step_avg:91.23ms +[2025-08-22 15:40:05] [Rank 0] step:8341/10000 train_time:760960ms step_avg:91.23ms +[2025-08-22 15:40:07] [Rank 0] step:8361/10000 train_time:762886ms step_avg:91.24ms +[2025-08-22 15:40:07] [Rank 0] step:8361/10000 train_time:762886ms step_avg:91.24ms +[2025-08-22 15:40:08] [Rank 0] step:8381/10000 train_time:764907ms step_avg:91.27ms +[2025-08-22 15:40:08] [Rank 0] step:8381/10000 train_time:764907ms step_avg:91.27ms +[2025-08-22 15:40:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:40:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:40:24] [Rank 0] PRINT: step:8400/10000 val_loss:3.6287 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.54 attn_vo:H=0.8886,top10E=0.08,eRank=370.4,q75/q25=26.20 mlp_w1:H=0.8770,top10E=0.15,eRank=347.3,q75/q25=7.85 mlp_w2:H=0.9363,top10E=0.09,eRank=503.8,q75/q25=4.62 vo_prod:H=0.8123,top10E=0.16,eRank=226.9,q75/q25=453.69 train_time:766903ms step_avg:91.30ms +[2025-08-22 15:40:24] [Rank 0] PRINT: step:8400/10000 val_loss:3.6287 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.54 attn_vo:H=0.8886,top10E=0.08,eRank=370.4,q75/q25=26.20 mlp_w1:H=0.8770,top10E=0.15,eRank=347.3,q75/q25=7.85 mlp_w2:H=0.9363,top10E=0.09,eRank=503.8,q75/q25=4.62 vo_prod:H=0.8123,top10E=0.16,eRank=226.9,q75/q25=453.69 train_time:766903ms step_avg:91.30ms +[2025-08-22 15:40:24] [Rank 0] step:8401/10000 train_time:766922ms step_avg:91.29ms +[2025-08-22 15:40:24] [Rank 0] step:8401/10000 train_time:766922ms step_avg:91.29ms +[2025-08-22 15:40:26] [Rank 0] step:8421/10000 train_time:768842ms step_avg:91.30ms +[2025-08-22 15:40:26] [Rank 0] step:8421/10000 train_time:768842ms step_avg:91.30ms +[2025-08-22 15:40:28] [Rank 0] step:8441/10000 train_time:770765ms step_avg:91.31ms +[2025-08-22 15:40:28] [Rank 0] step:8441/10000 train_time:770765ms step_avg:91.31ms +[2025-08-22 15:40:30] [Rank 0] step:8461/10000 train_time:772685ms step_avg:91.32ms +[2025-08-22 15:40:30] [Rank 0] step:8461/10000 train_time:772685ms step_avg:91.32ms +[2025-08-22 15:40:32] [Rank 0] step:8481/10000 train_time:774615ms step_avg:91.34ms +[2025-08-22 15:40:32] [Rank 0] step:8481/10000 train_time:774615ms step_avg:91.34ms +[2025-08-22 15:40:34] [Rank 0] step:8501/10000 train_time:776562ms step_avg:91.35ms +[2025-08-22 15:40:34] [Rank 0] step:8501/10000 train_time:776562ms step_avg:91.35ms +[2025-08-22 15:40:36] [Rank 0] step:8521/10000 train_time:778489ms step_avg:91.36ms +[2025-08-22 15:40:36] [Rank 0] step:8521/10000 train_time:778489ms step_avg:91.36ms +[2025-08-22 15:40:37] [Rank 0] step:8541/10000 train_time:780426ms step_avg:91.37ms +[2025-08-22 15:40:37] [Rank 0] step:8541/10000 train_time:780426ms step_avg:91.37ms +[2025-08-22 15:40:39] [Rank 0] step:8561/10000 train_time:782359ms step_avg:91.39ms +[2025-08-22 15:40:39] [Rank 0] step:8561/10000 train_time:782359ms step_avg:91.39ms +[2025-08-22 15:40:41] [Rank 0] step:8581/10000 train_time:784289ms step_avg:91.40ms +[2025-08-22 15:40:41] [Rank 0] step:8581/10000 train_time:784289ms step_avg:91.40ms +[2025-08-22 15:40:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:40:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:40:57] [Rank 0] PRINT: step:8600/10000 val_loss:3.6184 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.52 attn_vo:H=0.8888,top10E=0.08,eRank=371.0,q75/q25=26.03 mlp_w1:H=0.8775,top10E=0.15,eRank=348.3,q75/q25=7.82 mlp_w2:H=0.9364,top10E=0.09,eRank=504.2,q75/q25=4.62 vo_prod:H=0.8128,top10E=0.16,eRank=227.6,q75/q25=449.40 train_time:786216ms step_avg:91.42ms +[2025-08-22 15:40:57] [Rank 0] PRINT: step:8600/10000 val_loss:3.6184 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.52 attn_vo:H=0.8888,top10E=0.08,eRank=371.0,q75/q25=26.03 mlp_w1:H=0.8775,top10E=0.15,eRank=348.3,q75/q25=7.82 mlp_w2:H=0.9364,top10E=0.09,eRank=504.2,q75/q25=4.62 vo_prod:H=0.8128,top10E=0.16,eRank=227.6,q75/q25=449.40 train_time:786216ms step_avg:91.42ms +[2025-08-22 15:40:57] [Rank 0] step:8601/10000 train_time:786235ms step_avg:91.41ms +[2025-08-22 15:40:57] [Rank 0] step:8601/10000 train_time:786235ms step_avg:91.41ms +[2025-08-22 15:40:59] [Rank 0] step:8621/10000 train_time:788181ms step_avg:91.43ms +[2025-08-22 15:40:59] [Rank 0] step:8621/10000 train_time:788181ms step_avg:91.43ms +[2025-08-22 15:41:01] [Rank 0] step:8641/10000 train_time:790101ms step_avg:91.44ms +[2025-08-22 15:41:01] [Rank 0] step:8641/10000 train_time:790101ms step_avg:91.44ms +[2025-08-22 15:41:03] [Rank 0] step:8661/10000 train_time:792024ms step_avg:91.45ms +[2025-08-22 15:41:03] [Rank 0] step:8661/10000 train_time:792024ms step_avg:91.45ms +[2025-08-22 15:41:04] [Rank 0] step:8681/10000 train_time:793954ms step_avg:91.46ms +[2025-08-22 15:41:04] [Rank 0] step:8681/10000 train_time:793954ms step_avg:91.46ms +[2025-08-22 15:41:06] [Rank 0] step:8701/10000 train_time:795872ms step_avg:91.47ms +[2025-08-22 15:41:06] [Rank 0] step:8701/10000 train_time:795872ms step_avg:91.47ms +[2025-08-22 15:41:08] [Rank 0] step:8721/10000 train_time:797804ms step_avg:91.48ms +[2025-08-22 15:41:08] [Rank 0] step:8721/10000 train_time:797804ms step_avg:91.48ms +[2025-08-22 15:41:10] [Rank 0] step:8741/10000 train_time:799798ms step_avg:91.50ms +[2025-08-22 15:41:10] [Rank 0] step:8741/10000 train_time:799798ms step_avg:91.50ms +[2025-08-22 15:41:12] [Rank 0] step:8761/10000 train_time:801728ms step_avg:91.51ms +[2025-08-22 15:41:12] [Rank 0] step:8761/10000 train_time:801728ms step_avg:91.51ms +[2025-08-22 15:41:14] [Rank 0] step:8781/10000 train_time:803659ms step_avg:91.52ms +[2025-08-22 15:41:14] [Rank 0] step:8781/10000 train_time:803659ms step_avg:91.52ms +[2025-08-22 15:41:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:41:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:41:30] [Rank 0] PRINT: step:8800/10000 val_loss:3.6048 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.1,q75/q25=14.49 attn_vo:H=0.8891,top10E=0.08,eRank=371.6,q75/q25=25.87 mlp_w1:H=0.8779,top10E=0.15,eRank=349.2,q75/q25=7.80 mlp_w2:H=0.9365,top10E=0.09,eRank=504.6,q75/q25=4.60 vo_prod:H=0.8133,top10E=0.16,eRank=228.3,q75/q25=439.97 train_time:805591ms step_avg:91.54ms +[2025-08-22 15:41:30] [Rank 0] PRINT: step:8800/10000 val_loss:3.6048 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.1,q75/q25=14.49 attn_vo:H=0.8891,top10E=0.08,eRank=371.6,q75/q25=25.87 mlp_w1:H=0.8779,top10E=0.15,eRank=349.2,q75/q25=7.80 mlp_w2:H=0.9365,top10E=0.09,eRank=504.6,q75/q25=4.60 vo_prod:H=0.8133,top10E=0.16,eRank=228.3,q75/q25=439.97 train_time:805591ms step_avg:91.54ms +[2025-08-22 15:41:30] [Rank 0] step:8801/10000 train_time:805610ms step_avg:91.54ms +[2025-08-22 15:41:30] [Rank 0] step:8801/10000 train_time:805610ms step_avg:91.54ms +[2025-08-22 15:41:32] [Rank 0] step:8821/10000 train_time:807543ms step_avg:91.55ms +[2025-08-22 15:41:32] [Rank 0] step:8821/10000 train_time:807543ms step_avg:91.55ms +[2025-08-22 15:41:34] [Rank 0] step:8841/10000 train_time:809488ms step_avg:91.56ms +[2025-08-22 15:41:34] [Rank 0] step:8841/10000 train_time:809488ms step_avg:91.56ms +[2025-08-22 15:41:36] [Rank 0] step:8861/10000 train_time:811409ms step_avg:91.57ms +[2025-08-22 15:41:36] [Rank 0] step:8861/10000 train_time:811409ms step_avg:91.57ms +[2025-08-22 15:41:38] [Rank 0] step:8881/10000 train_time:813338ms step_avg:91.58ms +[2025-08-22 15:41:38] [Rank 0] step:8881/10000 train_time:813338ms step_avg:91.58ms +[2025-08-22 15:41:40] [Rank 0] step:8901/10000 train_time:815267ms step_avg:91.59ms +[2025-08-22 15:41:40] [Rank 0] step:8901/10000 train_time:815267ms step_avg:91.59ms +[2025-08-22 15:41:41] [Rank 0] step:8921/10000 train_time:817211ms step_avg:91.61ms +[2025-08-22 15:41:41] [Rank 0] step:8921/10000 train_time:817211ms step_avg:91.61ms +[2025-08-22 15:41:43] [Rank 0] step:8941/10000 train_time:819144ms step_avg:91.62ms +[2025-08-22 15:41:43] [Rank 0] step:8941/10000 train_time:819144ms step_avg:91.62ms +[2025-08-22 15:41:45] [Rank 0] step:8961/10000 train_time:821074ms step_avg:91.63ms +[2025-08-22 15:41:45] [Rank 0] step:8961/10000 train_time:821074ms step_avg:91.63ms +[2025-08-22 15:41:47] [Rank 0] step:8981/10000 train_time:823002ms step_avg:91.64ms +[2025-08-22 15:41:47] [Rank 0] step:8981/10000 train_time:823002ms step_avg:91.64ms +[2025-08-22 15:41:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:41:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:42:03] [Rank 0] PRINT: step:9000/10000 val_loss:3.5941 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.46 attn_vo:H=0.8893,top10E=0.08,eRank=372.1,q75/q25=25.65 mlp_w1:H=0.8782,top10E=0.15,eRank=350.0,q75/q25=7.77 mlp_w2:H=0.9366,top10E=0.09,eRank=505.0,q75/q25=4.60 vo_prod:H=0.8138,top10E=0.16,eRank=228.9,q75/q25=432.35 train_time:824933ms step_avg:91.66ms +[2025-08-22 15:42:03] [Rank 0] PRINT: step:9000/10000 val_loss:3.5941 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.46 attn_vo:H=0.8893,top10E=0.08,eRank=372.1,q75/q25=25.65 mlp_w1:H=0.8782,top10E=0.15,eRank=350.0,q75/q25=7.77 mlp_w2:H=0.9366,top10E=0.09,eRank=505.0,q75/q25=4.60 vo_prod:H=0.8138,top10E=0.16,eRank=228.9,q75/q25=432.35 train_time:824933ms step_avg:91.66ms +[2025-08-22 15:42:03] [Rank 0] step:9001/10000 train_time:824952ms step_avg:91.65ms +[2025-08-22 15:42:03] [Rank 0] step:9001/10000 train_time:824952ms step_avg:91.65ms +[2025-08-22 15:42:05] [Rank 0] step:9021/10000 train_time:826891ms step_avg:91.66ms +[2025-08-22 15:42:05] [Rank 0] step:9021/10000 train_time:826891ms step_avg:91.66ms +[2025-08-22 15:42:07] [Rank 0] step:9041/10000 train_time:828818ms step_avg:91.67ms +[2025-08-22 15:42:07] [Rank 0] step:9041/10000 train_time:828818ms step_avg:91.67ms +[2025-08-22 15:42:09] [Rank 0] step:9061/10000 train_time:830753ms step_avg:91.68ms +[2025-08-22 15:42:09] [Rank 0] step:9061/10000 train_time:830753ms step_avg:91.68ms +[2025-08-22 15:42:11] [Rank 0] step:9081/10000 train_time:832687ms step_avg:91.70ms +[2025-08-22 15:42:11] [Rank 0] step:9081/10000 train_time:832687ms step_avg:91.70ms +[2025-08-22 15:42:13] [Rank 0] step:9101/10000 train_time:834628ms step_avg:91.71ms +[2025-08-22 15:42:13] [Rank 0] step:9101/10000 train_time:834628ms step_avg:91.71ms +[2025-08-22 15:42:15] [Rank 0] step:9121/10000 train_time:836612ms step_avg:91.72ms +[2025-08-22 15:42:15] [Rank 0] step:9121/10000 train_time:836612ms step_avg:91.72ms +[2025-08-22 15:42:17] [Rank 0] step:9141/10000 train_time:838600ms step_avg:91.74ms +[2025-08-22 15:42:17] [Rank 0] step:9141/10000 train_time:838600ms step_avg:91.74ms +[2025-08-22 15:42:18] [Rank 0] step:9161/10000 train_time:840522ms step_avg:91.75ms +[2025-08-22 15:42:18] [Rank 0] step:9161/10000 train_time:840522ms step_avg:91.75ms +[2025-08-22 15:42:20] [Rank 0] step:9181/10000 train_time:842484ms step_avg:91.76ms +[2025-08-22 15:42:20] [Rank 0] step:9181/10000 train_time:842484ms step_avg:91.76ms +[2025-08-22 15:42:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:42:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:42:36] [Rank 0] PRINT: step:9200/10000 val_loss:3.5845 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.48 attn_vo:H=0.8895,top10E=0.08,eRank=372.5,q75/q25=25.55 mlp_w1:H=0.8785,top10E=0.15,eRank=350.7,q75/q25=7.75 mlp_w2:H=0.9367,top10E=0.09,eRank=505.3,q75/q25=4.59 vo_prod:H=0.8141,top10E=0.16,eRank=229.4,q75/q25=425.94 train_time:844408ms step_avg:91.78ms +[2025-08-22 15:42:36] [Rank 0] PRINT: step:9200/10000 val_loss:3.5845 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.48 attn_vo:H=0.8895,top10E=0.08,eRank=372.5,q75/q25=25.55 mlp_w1:H=0.8785,top10E=0.15,eRank=350.7,q75/q25=7.75 mlp_w2:H=0.9367,top10E=0.09,eRank=505.3,q75/q25=4.59 vo_prod:H=0.8141,top10E=0.16,eRank=229.4,q75/q25=425.94 train_time:844408ms step_avg:91.78ms +[2025-08-22 15:42:36] [Rank 0] step:9201/10000 train_time:844427ms step_avg:91.78ms +[2025-08-22 15:42:36] [Rank 0] step:9201/10000 train_time:844427ms step_avg:91.78ms +[2025-08-22 15:42:38] [Rank 0] step:9221/10000 train_time:846360ms step_avg:91.79ms +[2025-08-22 15:42:38] [Rank 0] step:9221/10000 train_time:846360ms step_avg:91.79ms +[2025-08-22 15:42:40] [Rank 0] step:9241/10000 train_time:848295ms step_avg:91.80ms +[2025-08-22 15:42:40] [Rank 0] step:9241/10000 train_time:848295ms step_avg:91.80ms +[2025-08-22 15:42:42] [Rank 0] step:9261/10000 train_time:850228ms step_avg:91.81ms +[2025-08-22 15:42:42] [Rank 0] step:9261/10000 train_time:850228ms step_avg:91.81ms +[2025-08-22 15:42:44] [Rank 0] step:9281/10000 train_time:852146ms step_avg:91.82ms +[2025-08-22 15:42:44] [Rank 0] step:9281/10000 train_time:852146ms step_avg:91.82ms +[2025-08-22 15:42:46] [Rank 0] step:9301/10000 train_time:854069ms step_avg:91.83ms +[2025-08-22 15:42:46] [Rank 0] step:9301/10000 train_time:854069ms step_avg:91.83ms +[2025-08-22 15:42:48] [Rank 0] step:9321/10000 train_time:856004ms step_avg:91.84ms +[2025-08-22 15:42:48] [Rank 0] step:9321/10000 train_time:856004ms step_avg:91.84ms +[2025-08-22 15:42:50] [Rank 0] step:9341/10000 train_time:857933ms step_avg:91.85ms +[2025-08-22 15:42:50] [Rank 0] step:9341/10000 train_time:857933ms step_avg:91.85ms +[2025-08-22 15:42:52] [Rank 0] step:9361/10000 train_time:859867ms step_avg:91.86ms +[2025-08-22 15:42:52] [Rank 0] step:9361/10000 train_time:859867ms step_avg:91.86ms +[2025-08-22 15:42:54] [Rank 0] step:9381/10000 train_time:861809ms step_avg:91.87ms +[2025-08-22 15:42:54] [Rank 0] step:9381/10000 train_time:861809ms step_avg:91.87ms +[2025-08-22 15:42:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:42:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:43:09] [Rank 0] PRINT: step:9400/10000 val_loss:3.5752 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.47 attn_vo:H=0.8896,top10E=0.08,eRank=372.8,q75/q25=25.43 mlp_w1:H=0.8788,top10E=0.14,eRank=351.3,q75/q25=7.74 mlp_w2:H=0.9368,top10E=0.09,eRank=505.5,q75/q25=4.59 vo_prod:H=0.8145,top10E=0.16,eRank=229.9,q75/q25=421.53 train_time:863747ms step_avg:91.89ms +[2025-08-22 15:43:09] [Rank 0] PRINT: step:9400/10000 val_loss:3.5752 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=392.0,q75/q25=14.47 attn_vo:H=0.8896,top10E=0.08,eRank=372.8,q75/q25=25.43 mlp_w1:H=0.8788,top10E=0.14,eRank=351.3,q75/q25=7.74 mlp_w2:H=0.9368,top10E=0.09,eRank=505.5,q75/q25=4.59 vo_prod:H=0.8145,top10E=0.16,eRank=229.9,q75/q25=421.53 train_time:863747ms step_avg:91.89ms +[2025-08-22 15:43:09] [Rank 0] step:9401/10000 train_time:863766ms step_avg:91.88ms +[2025-08-22 15:43:09] [Rank 0] step:9401/10000 train_time:863766ms step_avg:91.88ms +[2025-08-22 15:43:11] [Rank 0] step:9421/10000 train_time:865681ms step_avg:91.89ms +[2025-08-22 15:43:11] [Rank 0] step:9421/10000 train_time:865681ms step_avg:91.89ms +[2025-08-22 15:43:13] [Rank 0] step:9441/10000 train_time:867607ms step_avg:91.90ms +[2025-08-22 15:43:13] [Rank 0] step:9441/10000 train_time:867607ms step_avg:91.90ms +[2025-08-22 15:43:15] [Rank 0] step:9461/10000 train_time:869539ms step_avg:91.91ms +[2025-08-22 15:43:15] [Rank 0] step:9461/10000 train_time:869539ms step_avg:91.91ms +[2025-08-22 15:43:17] [Rank 0] step:9481/10000 train_time:871547ms step_avg:91.93ms +[2025-08-22 15:43:17] [Rank 0] step:9481/10000 train_time:871547ms step_avg:91.93ms +[2025-08-22 15:43:19] [Rank 0] step:9501/10000 train_time:873562ms step_avg:91.94ms +[2025-08-22 15:43:19] [Rank 0] step:9501/10000 train_time:873562ms step_avg:91.94ms +[2025-08-22 15:43:21] [Rank 0] step:9521/10000 train_time:875483ms step_avg:91.95ms +[2025-08-22 15:43:21] [Rank 0] step:9521/10000 train_time:875483ms step_avg:91.95ms +[2025-08-22 15:43:23] [Rank 0] step:9541/10000 train_time:877412ms step_avg:91.96ms +[2025-08-22 15:43:23] [Rank 0] step:9541/10000 train_time:877412ms step_avg:91.96ms +[2025-08-22 15:43:25] [Rank 0] step:9561/10000 train_time:879335ms step_avg:91.97ms +[2025-08-22 15:43:25] [Rank 0] step:9561/10000 train_time:879335ms step_avg:91.97ms +[2025-08-22 15:43:27] [Rank 0] step:9581/10000 train_time:881264ms step_avg:91.98ms +[2025-08-22 15:43:27] [Rank 0] step:9581/10000 train_time:881264ms step_avg:91.98ms +[2025-08-22 15:43:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:43:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:43:43] [Rank 0] PRINT: step:9600/10000 val_loss:3.5675 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=391.9,q75/q25=14.47 attn_vo:H=0.8897,top10E=0.08,eRank=373.1,q75/q25=25.35 mlp_w1:H=0.8790,top10E=0.14,eRank=351.7,q75/q25=7.74 mlp_w2:H=0.9368,top10E=0.09,eRank=505.7,q75/q25=4.58 vo_prod:H=0.8148,top10E=0.15,eRank=230.3,q75/q25=416.61 train_time:883211ms step_avg:92.00ms +[2025-08-22 15:43:43] [Rank 0] PRINT: step:9600/10000 val_loss:3.5675 svd_entropy: attn_qk:H=0.8983,top10E=0.09,eRank=391.9,q75/q25=14.47 attn_vo:H=0.8897,top10E=0.08,eRank=373.1,q75/q25=25.35 mlp_w1:H=0.8790,top10E=0.14,eRank=351.7,q75/q25=7.74 mlp_w2:H=0.9368,top10E=0.09,eRank=505.7,q75/q25=4.58 vo_prod:H=0.8148,top10E=0.15,eRank=230.3,q75/q25=416.61 train_time:883211ms step_avg:92.00ms +[2025-08-22 15:43:43] [Rank 0] step:9601/10000 train_time:883231ms step_avg:91.99ms +[2025-08-22 15:43:43] [Rank 0] step:9601/10000 train_time:883231ms step_avg:91.99ms +[2025-08-22 15:43:45] [Rank 0] step:9621/10000 train_time:885159ms step_avg:92.00ms +[2025-08-22 15:43:45] [Rank 0] step:9621/10000 train_time:885159ms step_avg:92.00ms +[2025-08-22 15:43:47] [Rank 0] step:9641/10000 train_time:887090ms step_avg:92.01ms +[2025-08-22 15:43:47] [Rank 0] step:9641/10000 train_time:887090ms step_avg:92.01ms +[2025-08-22 15:43:48] [Rank 0] step:9661/10000 train_time:889051ms step_avg:92.02ms +[2025-08-22 15:43:48] [Rank 0] step:9661/10000 train_time:889051ms step_avg:92.02ms +[2025-08-22 15:43:50] [Rank 0] step:9681/10000 train_time:891004ms step_avg:92.04ms +[2025-08-22 15:43:50] [Rank 0] step:9681/10000 train_time:891004ms step_avg:92.04ms +[2025-08-22 15:43:52] [Rank 0] step:9701/10000 train_time:892968ms step_avg:92.05ms +[2025-08-22 15:43:52] [Rank 0] step:9701/10000 train_time:892968ms step_avg:92.05ms +[2025-08-22 15:43:54] [Rank 0] step:9721/10000 train_time:894920ms step_avg:92.06ms +[2025-08-22 15:43:54] [Rank 0] step:9721/10000 train_time:894920ms step_avg:92.06ms +[2025-08-22 15:43:56] [Rank 0] step:9741/10000 train_time:896890ms step_avg:92.07ms +[2025-08-22 15:43:56] [Rank 0] step:9741/10000 train_time:896890ms step_avg:92.07ms +[2025-08-22 15:43:58] [Rank 0] step:9761/10000 train_time:898852ms step_avg:92.09ms +[2025-08-22 15:43:58] [Rank 0] step:9761/10000 train_time:898852ms step_avg:92.09ms +[2025-08-22 15:44:00] [Rank 0] step:9781/10000 train_time:900819ms step_avg:92.10ms +[2025-08-22 15:44:00] [Rank 0] step:9781/10000 train_time:900819ms step_avg:92.10ms +[2025-08-22 15:44:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:44:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:44:16] [Rank 0] PRINT: step:9800/10000 val_loss:3.5586 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.9,q75/q25=14.49 attn_vo:H=0.8898,top10E=0.08,eRank=373.4,q75/q25=25.28 mlp_w1:H=0.8792,top10E=0.14,eRank=352.1,q75/q25=7.72 mlp_w2:H=0.9369,top10E=0.09,eRank=505.8,q75/q25=4.58 vo_prod:H=0.8150,top10E=0.15,eRank=230.6,q75/q25=412.89 train_time:902799ms step_avg:92.12ms +[2025-08-22 15:44:16] [Rank 0] PRINT: step:9800/10000 val_loss:3.5586 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.9,q75/q25=14.49 attn_vo:H=0.8898,top10E=0.08,eRank=373.4,q75/q25=25.28 mlp_w1:H=0.8792,top10E=0.14,eRank=352.1,q75/q25=7.72 mlp_w2:H=0.9369,top10E=0.09,eRank=505.8,q75/q25=4.58 vo_prod:H=0.8150,top10E=0.15,eRank=230.6,q75/q25=412.89 train_time:902799ms step_avg:92.12ms +[2025-08-22 15:44:16] [Rank 0] step:9801/10000 train_time:902818ms step_avg:92.11ms +[2025-08-22 15:44:16] [Rank 0] step:9801/10000 train_time:902818ms step_avg:92.11ms +[2025-08-22 15:44:18] [Rank 0] step:9821/10000 train_time:904788ms step_avg:92.13ms +[2025-08-22 15:44:18] [Rank 0] step:9821/10000 train_time:904788ms step_avg:92.13ms +[2025-08-22 15:44:20] [Rank 0] step:9841/10000 train_time:906827ms step_avg:92.15ms +[2025-08-22 15:44:20] [Rank 0] step:9841/10000 train_time:906827ms step_avg:92.15ms +[2025-08-22 15:44:22] [Rank 0] step:9861/10000 train_time:908853ms step_avg:92.17ms +[2025-08-22 15:44:22] [Rank 0] step:9861/10000 train_time:908853ms step_avg:92.17ms +[2025-08-22 15:44:24] [Rank 0] step:9881/10000 train_time:910802ms step_avg:92.18ms +[2025-08-22 15:44:24] [Rank 0] step:9881/10000 train_time:910802ms step_avg:92.18ms +[2025-08-22 15:44:26] [Rank 0] step:9901/10000 train_time:912775ms step_avg:92.19ms +[2025-08-22 15:44:26] [Rank 0] step:9901/10000 train_time:912775ms step_avg:92.19ms +[2025-08-22 15:44:28] [Rank 0] step:9921/10000 train_time:914728ms step_avg:92.20ms +[2025-08-22 15:44:28] [Rank 0] step:9921/10000 train_time:914728ms step_avg:92.20ms +[2025-08-22 15:44:30] [Rank 0] step:9941/10000 train_time:916697ms step_avg:92.21ms +[2025-08-22 15:44:30] [Rank 0] step:9941/10000 train_time:916697ms step_avg:92.21ms +[2025-08-22 15:44:32] [Rank 0] step:9961/10000 train_time:918650ms step_avg:92.22ms +[2025-08-22 15:44:32] [Rank 0] step:9961/10000 train_time:918650ms step_avg:92.22ms +[2025-08-22 15:44:34] [Rank 0] step:9981/10000 train_time:920612ms step_avg:92.24ms +[2025-08-22 15:44:34] [Rank 0] step:9981/10000 train_time:920612ms step_avg:92.24ms +[2025-08-22 15:44:36] [Rank 0] step:10000/10000 train_time:922481ms step_avg:92.25ms +[2025-08-22 15:44:36] [Rank 0] step:10000/10000 train_time:922481ms step_avg:92.25ms +[2025-08-22 15:44:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:44:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:44:50] [Rank 0] PRINT: step:10000/10000 val_loss:3.5509 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.8,q75/q25=14.49 attn_vo:H=0.8899,top10E=0.08,eRank=373.5,q75/q25=25.24 mlp_w1:H=0.8793,top10E=0.14,eRank=352.4,q75/q25=7.71 mlp_w2:H=0.9369,top10E=0.09,eRank=506.0,q75/q25=4.58 vo_prod:H=0.8151,top10E=0.15,eRank=230.8,q75/q25=410.82 train_time:922583ms step_avg:92.26ms +[2025-08-22 15:44:50] [Rank 0] PRINT: step:10000/10000 val_loss:3.5509 svd_entropy: attn_qk:H=0.8982,top10E=0.09,eRank=391.8,q75/q25=14.49 attn_vo:H=0.8899,top10E=0.08,eRank=373.5,q75/q25=25.24 mlp_w1:H=0.8793,top10E=0.14,eRank=352.4,q75/q25=7.71 mlp_w2:H=0.9369,top10E=0.09,eRank=506.0,q75/q25=4.58 vo_prod:H=0.8151,top10E=0.15,eRank=230.8,q75/q25=410.82 train_time:922583ms step_avg:92.26ms +[2025-08-22 15:44:50] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 15:44:50 2025 --- +[2025-08-22 15:44:50] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 15:44:50 2025 --- +[2025-08-22 15:44:50] [Rank 0] PRINT: Peak memory allocated: 11485 MiB reserved: 15856 MiB +[2025-08-22 15:44:50] [Rank 0] PRINT: Peak memory allocated: 11485 MiB reserved: 15856 MiB diff --git a/logs_svd_gated/mode_3_param_gated_seed_43/config.json b/logs_svd_gated/mode_3_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..45f22d016688728b975f77b98ec2bd24262f9593 --- /dev/null +++ b/logs_svd_gated/mode_3_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 3, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "3111d285-89b1-4ad1-a13d-1623c772d29a", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_3_param_gated_seed_43/training_log_3111d285-89b1-4ad1-a13d-1623c772d29a.txt b/logs_svd_gated/mode_3_param_gated_seed_43/training_log_3111d285-89b1-4ad1-a13d-1623c772d29a.txt new file mode 100644 index 0000000000000000000000000000000000000000..805b6e6af4725aacb10c278813e056d096a211de --- /dev/null +++ b/logs_svd_gated/mode_3_param_gated_seed_43/training_log_3111d285-89b1-4ad1-a13d-1623c772d29a.txt @@ -0,0 +1,2926 @@ +[2025-08-22 20:30:36] [Rank 0] PRINT: --- Script Start: Fri Aug 22 20:30:36 2025 --- +[2025-08-22 20:30:36] [Rank 0] PRINT: --- Script Start: Fri Aug 22 20:30:36 2025 --- +[2025-08-22 20:30:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=3, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 20:30:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=3, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 20:30:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 20:30:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 20:30:36] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 20:30:36] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 20:30:36] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_3_param_gated_seed_43 +[2025-08-22 20:30:36] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_3_param_gated_seed_43 +[2025-08-22 20:30:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 20:30:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 20:30:36] [Rank 0] PRINT: Constructing model... +[2025-08-22 20:30:36] [Rank 0] PRINT: Constructing model... +[2025-08-22 20:30:38] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 20:30:38] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 20:30:38] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 20:30:38] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 20:30:38] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 20:30:38] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 20:30:38] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-08-22 20:30:38] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 3 +[2025-08-22 20:30:38] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.05). +[2025-08-22 20:30:38] [Rank 0] PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: 0.05). +[2025-08-22 20:30:38] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 20:30:38] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 20:30:38] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-08-22 20:30:38] [Rank 0] PRINT: Muon optimizer is active with 44 parameters. +[2025-08-22 20:30:38] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 20:30:38] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 20:30:38] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 20:30:38] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 20:30:38] [Rank 0] PRINT: Starting warmup... +[2025-08-22 20:30:38] [Rank 0] PRINT: Starting warmup... +[2025-08-22 20:31:21] [Rank 0] PRINT: Warmup complete. +[2025-08-22 20:31:21] [Rank 0] PRINT: Warmup complete. +[2025-08-22 20:31:21] [Rank 0] PRINT: Starting training... +[2025-08-22 20:31:21] [Rank 0] PRINT: Starting training... +[2025-08-22 20:31:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:31:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:31:39] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 20:31:39] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 20:31:41] [Rank 0] step:21/10000 train_time:1769ms step_avg:84.25ms +[2025-08-22 20:31:41] [Rank 0] step:21/10000 train_time:1769ms step_avg:84.25ms +[2025-08-22 20:31:42] [Rank 0] step:41/10000 train_time:3479ms step_avg:84.84ms +[2025-08-22 20:31:42] [Rank 0] step:41/10000 train_time:3479ms step_avg:84.84ms +[2025-08-22 20:31:44] [Rank 0] step:61/10000 train_time:5190ms step_avg:85.09ms +[2025-08-22 20:31:44] [Rank 0] step:61/10000 train_time:5190ms step_avg:85.09ms +[2025-08-22 20:31:46] [Rank 0] step:81/10000 train_time:6902ms step_avg:85.21ms +[2025-08-22 20:31:46] [Rank 0] step:81/10000 train_time:6902ms step_avg:85.21ms +[2025-08-22 20:31:47] [Rank 0] step:101/10000 train_time:8615ms step_avg:85.30ms +[2025-08-22 20:31:47] [Rank 0] step:101/10000 train_time:8615ms step_avg:85.30ms +[2025-08-22 20:31:49] [Rank 0] step:121/10000 train_time:10330ms step_avg:85.37ms +[2025-08-22 20:31:49] [Rank 0] step:121/10000 train_time:10330ms step_avg:85.37ms +[2025-08-22 20:31:51] [Rank 0] step:141/10000 train_time:12048ms step_avg:85.45ms +[2025-08-22 20:31:51] [Rank 0] step:141/10000 train_time:12048ms step_avg:85.45ms +[2025-08-22 20:31:53] [Rank 0] step:161/10000 train_time:13767ms step_avg:85.51ms +[2025-08-22 20:31:53] [Rank 0] step:161/10000 train_time:13767ms step_avg:85.51ms +[2025-08-22 20:31:54] [Rank 0] step:181/10000 train_time:15486ms step_avg:85.56ms +[2025-08-22 20:31:54] [Rank 0] step:181/10000 train_time:15486ms step_avg:85.56ms +[2025-08-22 20:31:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:31:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:32:10] [Rank 0] PRINT: step:200/10000 val_loss:6.3935 svd_entropy: attn_qk:H=0.8216,top10E=0.18,eRank=242.4,q75/q25=13.96 attn_vo:H=0.7485,top10E=0.26,eRank=158.9,q75/q25=19.71 mlp_w1:H=0.4102,top10E=0.82,eRank=19.5,q75/q25=5.85 mlp_w2:H=0.4026,top10E=0.80,eRank=17.0,q75/q25=7.95 vo_prod:H=0.5297,top10E=0.60,eRank=43.6,q75/q25=163.92 train_time:17207ms step_avg:86.04ms +[2025-08-22 20:32:10] [Rank 0] PRINT: step:200/10000 val_loss:6.3935 svd_entropy: attn_qk:H=0.8216,top10E=0.18,eRank=242.4,q75/q25=13.96 attn_vo:H=0.7485,top10E=0.26,eRank=158.9,q75/q25=19.71 mlp_w1:H=0.4102,top10E=0.82,eRank=19.5,q75/q25=5.85 mlp_w2:H=0.4026,top10E=0.80,eRank=17.0,q75/q25=7.95 vo_prod:H=0.5297,top10E=0.60,eRank=43.6,q75/q25=163.92 train_time:17207ms step_avg:86.04ms +[2025-08-22 20:32:10] [Rank 0] step:201/10000 train_time:17228ms step_avg:85.71ms +[2025-08-22 20:32:10] [Rank 0] step:201/10000 train_time:17228ms step_avg:85.71ms +[2025-08-22 20:32:12] [Rank 0] step:221/10000 train_time:18948ms step_avg:85.74ms +[2025-08-22 20:32:12] [Rank 0] step:221/10000 train_time:18948ms step_avg:85.74ms +[2025-08-22 20:32:13] [Rank 0] step:241/10000 train_time:20663ms step_avg:85.74ms +[2025-08-22 20:32:13] [Rank 0] step:241/10000 train_time:20663ms step_avg:85.74ms +[2025-08-22 20:32:15] [Rank 0] step:261/10000 train_time:22378ms step_avg:85.74ms +[2025-08-22 20:32:15] [Rank 0] step:261/10000 train_time:22378ms step_avg:85.74ms +[2025-08-22 20:32:17] [Rank 0] step:281/10000 train_time:24094ms step_avg:85.74ms +[2025-08-22 20:32:17] [Rank 0] step:281/10000 train_time:24094ms step_avg:85.74ms +[2025-08-22 20:32:19] [Rank 0] step:301/10000 train_time:25811ms step_avg:85.75ms +[2025-08-22 20:32:19] [Rank 0] step:301/10000 train_time:25811ms step_avg:85.75ms +[2025-08-22 20:32:20] [Rank 0] step:321/10000 train_time:27528ms step_avg:85.76ms +[2025-08-22 20:32:20] [Rank 0] step:321/10000 train_time:27528ms step_avg:85.76ms +[2025-08-22 20:32:22] [Rank 0] step:341/10000 train_time:29244ms step_avg:85.76ms +[2025-08-22 20:32:22] [Rank 0] step:341/10000 train_time:29244ms step_avg:85.76ms +[2025-08-22 20:32:24] [Rank 0] step:361/10000 train_time:30961ms step_avg:85.76ms +[2025-08-22 20:32:24] [Rank 0] step:361/10000 train_time:30961ms step_avg:85.76ms +[2025-08-22 20:32:25] [Rank 0] step:381/10000 train_time:32677ms step_avg:85.77ms +[2025-08-22 20:32:25] [Rank 0] step:381/10000 train_time:32677ms step_avg:85.77ms +[2025-08-22 20:32:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:32:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:32:41] [Rank 0] PRINT: step:400/10000 val_loss:5.7805 svd_entropy: attn_qk:H=0.8167,top10E=0.16,eRank=234.7,q75/q25=27.56 attn_vo:H=0.6993,top10E=0.28,eRank=122.8,q75/q25=23.23 mlp_w1:H=0.6017,top10E=0.53,eRank=62.0,q75/q25=7.55 mlp_w2:H=0.6030,top10E=0.53,eRank=58.6,q75/q25=8.65 vo_prod:H=0.5514,top10E=0.53,eRank=51.9,q75/q25=246.45 train_time:34395ms step_avg:85.99ms +[2025-08-22 20:32:41] [Rank 0] PRINT: step:400/10000 val_loss:5.7805 svd_entropy: attn_qk:H=0.8167,top10E=0.16,eRank=234.7,q75/q25=27.56 attn_vo:H=0.6993,top10E=0.28,eRank=122.8,q75/q25=23.23 mlp_w1:H=0.6017,top10E=0.53,eRank=62.0,q75/q25=7.55 mlp_w2:H=0.6030,top10E=0.53,eRank=58.6,q75/q25=8.65 vo_prod:H=0.5514,top10E=0.53,eRank=51.9,q75/q25=246.45 train_time:34395ms step_avg:85.99ms +[2025-08-22 20:32:41] [Rank 0] step:401/10000 train_time:34415ms step_avg:85.82ms +[2025-08-22 20:32:41] [Rank 0] step:401/10000 train_time:34415ms step_avg:85.82ms +[2025-08-22 20:32:43] [Rank 0] step:421/10000 train_time:36132ms step_avg:85.82ms +[2025-08-22 20:32:43] [Rank 0] step:421/10000 train_time:36132ms step_avg:85.82ms +[2025-08-22 20:32:45] [Rank 0] step:441/10000 train_time:37843ms step_avg:85.81ms +[2025-08-22 20:32:45] [Rank 0] step:441/10000 train_time:37843ms step_avg:85.81ms +[2025-08-22 20:32:46] [Rank 0] step:461/10000 train_time:39553ms step_avg:85.80ms +[2025-08-22 20:32:46] [Rank 0] step:461/10000 train_time:39553ms step_avg:85.80ms +[2025-08-22 20:32:48] [Rank 0] step:481/10000 train_time:41266ms step_avg:85.79ms +[2025-08-22 20:32:48] [Rank 0] step:481/10000 train_time:41266ms step_avg:85.79ms +[2025-08-22 20:32:50] [Rank 0] step:501/10000 train_time:42980ms step_avg:85.79ms +[2025-08-22 20:32:50] [Rank 0] step:501/10000 train_time:42980ms step_avg:85.79ms +[2025-08-22 20:32:51] [Rank 0] step:521/10000 train_time:44694ms step_avg:85.78ms +[2025-08-22 20:32:51] [Rank 0] step:521/10000 train_time:44694ms step_avg:85.78ms +[2025-08-22 20:32:53] [Rank 0] step:541/10000 train_time:46408ms step_avg:85.78ms +[2025-08-22 20:32:53] [Rank 0] step:541/10000 train_time:46408ms step_avg:85.78ms +[2025-08-22 20:32:55] [Rank 0] step:561/10000 train_time:48121ms step_avg:85.78ms +[2025-08-22 20:32:55] [Rank 0] step:561/10000 train_time:48121ms step_avg:85.78ms +[2025-08-22 20:32:57] [Rank 0] step:581/10000 train_time:49836ms step_avg:85.78ms +[2025-08-22 20:32:57] [Rank 0] step:581/10000 train_time:49836ms step_avg:85.78ms +[2025-08-22 20:32:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:32:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:33:12] [Rank 0] PRINT: step:600/10000 val_loss:5.4333 svd_entropy: attn_qk:H=0.8207,top10E=0.15,eRank=239.5,q75/q25=47.05 attn_vo:H=0.7031,top10E=0.25,eRank=124.6,q75/q25=31.60 mlp_w1:H=0.6831,top10E=0.38,eRank=102.2,q75/q25=6.52 mlp_w2:H=0.7367,top10E=0.33,eRank=136.4,q75/q25=8.46 vo_prod:H=0.5798,top10E=0.47,eRank=59.4,q75/q25=589.73 train_time:51552ms step_avg:85.92ms +[2025-08-22 20:33:12] [Rank 0] PRINT: step:600/10000 val_loss:5.4333 svd_entropy: attn_qk:H=0.8207,top10E=0.15,eRank=239.5,q75/q25=47.05 attn_vo:H=0.7031,top10E=0.25,eRank=124.6,q75/q25=31.60 mlp_w1:H=0.6831,top10E=0.38,eRank=102.2,q75/q25=6.52 mlp_w2:H=0.7367,top10E=0.33,eRank=136.4,q75/q25=8.46 vo_prod:H=0.5798,top10E=0.47,eRank=59.4,q75/q25=589.73 train_time:51552ms step_avg:85.92ms +[2025-08-22 20:33:12] [Rank 0] step:601/10000 train_time:51571ms step_avg:85.81ms +[2025-08-22 20:33:12] [Rank 0] step:601/10000 train_time:51571ms step_avg:85.81ms +[2025-08-22 20:33:14] [Rank 0] step:621/10000 train_time:53294ms step_avg:85.82ms +[2025-08-22 20:33:14] [Rank 0] step:621/10000 train_time:53294ms step_avg:85.82ms +[2025-08-22 20:33:16] [Rank 0] step:641/10000 train_time:55001ms step_avg:85.81ms +[2025-08-22 20:33:16] [Rank 0] step:641/10000 train_time:55001ms step_avg:85.81ms +[2025-08-22 20:33:17] [Rank 0] step:661/10000 train_time:56708ms step_avg:85.79ms +[2025-08-22 20:33:17] [Rank 0] step:661/10000 train_time:56708ms step_avg:85.79ms +[2025-08-22 20:33:19] [Rank 0] step:681/10000 train_time:58417ms step_avg:85.78ms +[2025-08-22 20:33:19] [Rank 0] step:681/10000 train_time:58417ms step_avg:85.78ms +[2025-08-22 20:33:21] [Rank 0] step:701/10000 train_time:60125ms step_avg:85.77ms +[2025-08-22 20:33:21] [Rank 0] step:701/10000 train_time:60125ms step_avg:85.77ms +[2025-08-22 20:33:22] [Rank 0] step:721/10000 train_time:61831ms step_avg:85.76ms +[2025-08-22 20:33:22] [Rank 0] step:721/10000 train_time:61831ms step_avg:85.76ms +[2025-08-22 20:33:24] [Rank 0] step:741/10000 train_time:63541ms step_avg:85.75ms +[2025-08-22 20:33:24] [Rank 0] step:741/10000 train_time:63541ms step_avg:85.75ms +[2025-08-22 20:33:26] [Rank 0] step:761/10000 train_time:65262ms step_avg:85.76ms +[2025-08-22 20:33:26] [Rank 0] step:761/10000 train_time:65262ms step_avg:85.76ms +[2025-08-22 20:33:28] [Rank 0] step:781/10000 train_time:66984ms step_avg:85.77ms +[2025-08-22 20:33:28] [Rank 0] step:781/10000 train_time:66984ms step_avg:85.77ms +[2025-08-22 20:33:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:33:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:33:43] [Rank 0] PRINT: step:800/10000 val_loss:5.1744 svd_entropy: attn_qk:H=0.8317,top10E=0.14,eRank=255.4,q75/q25=64.73 attn_vo:H=0.7267,top10E=0.22,eRank=140.6,q75/q25=58.39 mlp_w1:H=0.7153,top10E=0.33,eRank=125.5,q75/q25=7.18 mlp_w2:H=0.7886,top10E=0.25,eRank=190.4,q75/q25=9.26 vo_prod:H=0.6090,top10E=0.41,eRank=68.3,q75/q25=2783.89 train_time:68709ms step_avg:85.89ms +[2025-08-22 20:33:43] [Rank 0] PRINT: step:800/10000 val_loss:5.1744 svd_entropy: attn_qk:H=0.8317,top10E=0.14,eRank=255.4,q75/q25=64.73 attn_vo:H=0.7267,top10E=0.22,eRank=140.6,q75/q25=58.39 mlp_w1:H=0.7153,top10E=0.33,eRank=125.5,q75/q25=7.18 mlp_w2:H=0.7886,top10E=0.25,eRank=190.4,q75/q25=9.26 vo_prod:H=0.6090,top10E=0.41,eRank=68.3,q75/q25=2783.89 train_time:68709ms step_avg:85.89ms +[2025-08-22 20:33:43] [Rank 0] step:801/10000 train_time:68727ms step_avg:85.80ms +[2025-08-22 20:33:43] [Rank 0] step:801/10000 train_time:68727ms step_avg:85.80ms +[2025-08-22 20:33:45] [Rank 0] step:821/10000 train_time:70438ms step_avg:85.80ms +[2025-08-22 20:33:45] [Rank 0] step:821/10000 train_time:70438ms step_avg:85.80ms +[2025-08-22 20:33:47] [Rank 0] step:841/10000 train_time:72157ms step_avg:85.80ms +[2025-08-22 20:33:47] [Rank 0] step:841/10000 train_time:72157ms step_avg:85.80ms +[2025-08-22 20:33:48] [Rank 0] step:861/10000 train_time:73876ms step_avg:85.80ms +[2025-08-22 20:33:48] [Rank 0] step:861/10000 train_time:73876ms step_avg:85.80ms +[2025-08-22 20:33:50] [Rank 0] step:881/10000 train_time:75596ms step_avg:85.81ms +[2025-08-22 20:33:50] [Rank 0] step:881/10000 train_time:75596ms step_avg:85.81ms +[2025-08-22 20:33:52] [Rank 0] step:901/10000 train_time:77318ms step_avg:85.81ms +[2025-08-22 20:33:52] [Rank 0] step:901/10000 train_time:77318ms step_avg:85.81ms +[2025-08-22 20:33:53] [Rank 0] step:921/10000 train_time:79041ms step_avg:85.82ms +[2025-08-22 20:33:53] [Rank 0] step:921/10000 train_time:79041ms step_avg:85.82ms +[2025-08-22 20:33:55] [Rank 0] step:941/10000 train_time:80764ms step_avg:85.83ms +[2025-08-22 20:33:55] [Rank 0] step:941/10000 train_time:80764ms step_avg:85.83ms +[2025-08-22 20:33:57] [Rank 0] step:961/10000 train_time:82486ms step_avg:85.83ms +[2025-08-22 20:33:57] [Rank 0] step:961/10000 train_time:82486ms step_avg:85.83ms +[2025-08-22 20:33:59] [Rank 0] step:981/10000 train_time:84210ms step_avg:85.84ms +[2025-08-22 20:33:59] [Rank 0] step:981/10000 train_time:84210ms step_avg:85.84ms +[2025-08-22 20:34:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:34:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:34:14] [Rank 0] PRINT: step:1000/10000 val_loss:5.0167 svd_entropy: attn_qk:H=0.8422,top10E=0.13,eRank=272.3,q75/q25=70.93 attn_vo:H=0.7508,top10E=0.19,eRank=160.3,q75/q25=101.48 mlp_w1:H=0.7361,top10E=0.30,eRank=143.1,q75/q25=7.87 mlp_w2:H=0.8171,top10E=0.21,eRank=229.2,q75/q25=9.80 vo_prod:H=0.6336,top10E=0.36,eRank=77.4,q75/q25=9868.01 train_time:85937ms step_avg:85.94ms +[2025-08-22 20:34:14] [Rank 0] PRINT: step:1000/10000 val_loss:5.0167 svd_entropy: attn_qk:H=0.8422,top10E=0.13,eRank=272.3,q75/q25=70.93 attn_vo:H=0.7508,top10E=0.19,eRank=160.3,q75/q25=101.48 mlp_w1:H=0.7361,top10E=0.30,eRank=143.1,q75/q25=7.87 mlp_w2:H=0.8171,top10E=0.21,eRank=229.2,q75/q25=9.80 vo_prod:H=0.6336,top10E=0.36,eRank=77.4,q75/q25=9868.01 train_time:85937ms step_avg:85.94ms +[2025-08-22 20:34:14] [Rank 0] step:1001/10000 train_time:85957ms step_avg:85.87ms +[2025-08-22 20:34:14] [Rank 0] step:1001/10000 train_time:85957ms step_avg:85.87ms +[2025-08-22 20:34:16] [Rank 0] step:1021/10000 train_time:87681ms step_avg:85.88ms +[2025-08-22 20:34:16] [Rank 0] step:1021/10000 train_time:87681ms step_avg:85.88ms +[2025-08-22 20:34:18] [Rank 0] step:1041/10000 train_time:89404ms step_avg:85.88ms +[2025-08-22 20:34:18] [Rank 0] step:1041/10000 train_time:89404ms step_avg:85.88ms +[2025-08-22 20:34:20] [Rank 0] step:1061/10000 train_time:91125ms step_avg:85.89ms +[2025-08-22 20:34:20] [Rank 0] step:1061/10000 train_time:91125ms step_avg:85.89ms +[2025-08-22 20:34:21] [Rank 0] step:1081/10000 train_time:92850ms step_avg:85.89ms +[2025-08-22 20:34:21] [Rank 0] step:1081/10000 train_time:92850ms step_avg:85.89ms +[2025-08-22 20:34:23] [Rank 0] step:1101/10000 train_time:94573ms step_avg:85.90ms +[2025-08-22 20:34:23] [Rank 0] step:1101/10000 train_time:94573ms step_avg:85.90ms +[2025-08-22 20:34:25] [Rank 0] step:1121/10000 train_time:96297ms step_avg:85.90ms +[2025-08-22 20:34:25] [Rank 0] step:1121/10000 train_time:96297ms step_avg:85.90ms +[2025-08-22 20:34:26] [Rank 0] step:1141/10000 train_time:98021ms step_avg:85.91ms +[2025-08-22 20:34:26] [Rank 0] step:1141/10000 train_time:98021ms step_avg:85.91ms +[2025-08-22 20:34:28] [Rank 0] step:1161/10000 train_time:99748ms step_avg:85.92ms +[2025-08-22 20:34:28] [Rank 0] step:1161/10000 train_time:99748ms step_avg:85.92ms +[2025-08-22 20:34:30] [Rank 0] step:1181/10000 train_time:101474ms step_avg:85.92ms +[2025-08-22 20:34:30] [Rank 0] step:1181/10000 train_time:101474ms step_avg:85.92ms +[2025-08-22 20:34:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:34:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:34:45] [Rank 0] PRINT: step:1200/10000 val_loss:4.9038 svd_entropy: attn_qk:H=0.8510,top10E=0.12,eRank=287.7,q75/q25=66.97 attn_vo:H=0.7725,top10E=0.17,eRank=181.3,q75/q25=141.84 mlp_w1:H=0.7523,top10E=0.28,eRank=158.2,q75/q25=8.59 mlp_w2:H=0.8369,top10E=0.19,eRank=260.9,q75/q25=10.06 vo_prod:H=0.6573,top10E=0.33,eRank=88.1,q75/q25=21377.13 train_time:103205ms step_avg:86.00ms +[2025-08-22 20:34:45] [Rank 0] PRINT: step:1200/10000 val_loss:4.9038 svd_entropy: attn_qk:H=0.8510,top10E=0.12,eRank=287.7,q75/q25=66.97 attn_vo:H=0.7725,top10E=0.17,eRank=181.3,q75/q25=141.84 mlp_w1:H=0.7523,top10E=0.28,eRank=158.2,q75/q25=8.59 mlp_w2:H=0.8369,top10E=0.19,eRank=260.9,q75/q25=10.06 vo_prod:H=0.6573,top10E=0.33,eRank=88.1,q75/q25=21377.13 train_time:103205ms step_avg:86.00ms +[2025-08-22 20:34:45] [Rank 0] step:1201/10000 train_time:103224ms step_avg:85.95ms +[2025-08-22 20:34:45] [Rank 0] step:1201/10000 train_time:103224ms step_avg:85.95ms +[2025-08-22 20:34:47] [Rank 0] step:1221/10000 train_time:104937ms step_avg:85.94ms +[2025-08-22 20:34:47] [Rank 0] step:1221/10000 train_time:104937ms step_avg:85.94ms +[2025-08-22 20:34:49] [Rank 0] step:1241/10000 train_time:106657ms step_avg:85.94ms +[2025-08-22 20:34:49] [Rank 0] step:1241/10000 train_time:106657ms step_avg:85.94ms +[2025-08-22 20:34:51] [Rank 0] step:1261/10000 train_time:108377ms step_avg:85.94ms +[2025-08-22 20:34:51] [Rank 0] step:1261/10000 train_time:108377ms step_avg:85.94ms +[2025-08-22 20:34:52] [Rank 0] step:1281/10000 train_time:110099ms step_avg:85.95ms +[2025-08-22 20:34:52] [Rank 0] step:1281/10000 train_time:110099ms step_avg:85.95ms +[2025-08-22 20:34:54] [Rank 0] step:1301/10000 train_time:111823ms step_avg:85.95ms +[2025-08-22 20:34:54] [Rank 0] step:1301/10000 train_time:111823ms step_avg:85.95ms +[2025-08-22 20:34:56] [Rank 0] step:1321/10000 train_time:113547ms step_avg:85.95ms +[2025-08-22 20:34:56] [Rank 0] step:1321/10000 train_time:113547ms step_avg:85.95ms +[2025-08-22 20:34:58] [Rank 0] step:1341/10000 train_time:115270ms step_avg:85.96ms +[2025-08-22 20:34:58] [Rank 0] step:1341/10000 train_time:115270ms step_avg:85.96ms +[2025-08-22 20:34:59] [Rank 0] step:1361/10000 train_time:116993ms step_avg:85.96ms +[2025-08-22 20:34:59] [Rank 0] step:1361/10000 train_time:116993ms step_avg:85.96ms +[2025-08-22 20:35:01] [Rank 0] step:1381/10000 train_time:118717ms step_avg:85.96ms +[2025-08-22 20:35:01] [Rank 0] step:1381/10000 train_time:118717ms step_avg:85.96ms +[2025-08-22 20:35:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:35:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:35:17] [Rank 0] PRINT: step:1400/10000 val_loss:4.8210 svd_entropy: attn_qk:H=0.8583,top10E=0.12,eRank=301.5,q75/q25=58.08 attn_vo:H=0.7906,top10E=0.16,eRank=201.5,q75/q25=164.84 mlp_w1:H=0.7656,top10E=0.26,eRank=171.6,q75/q25=9.19 mlp_w2:H=0.8522,top10E=0.17,eRank=288.5,q75/q25=9.98 vo_prod:H=0.6782,top10E=0.30,eRank=99.4,q75/q25=31345.61 train_time:120456ms step_avg:86.04ms +[2025-08-22 20:35:17] [Rank 0] PRINT: step:1400/10000 val_loss:4.8210 svd_entropy: attn_qk:H=0.8583,top10E=0.12,eRank=301.5,q75/q25=58.08 attn_vo:H=0.7906,top10E=0.16,eRank=201.5,q75/q25=164.84 mlp_w1:H=0.7656,top10E=0.26,eRank=171.6,q75/q25=9.19 mlp_w2:H=0.8522,top10E=0.17,eRank=288.5,q75/q25=9.98 vo_prod:H=0.6782,top10E=0.30,eRank=99.4,q75/q25=31345.61 train_time:120456ms step_avg:86.04ms +[2025-08-22 20:35:17] [Rank 0] step:1401/10000 train_time:120475ms step_avg:85.99ms +[2025-08-22 20:35:17] [Rank 0] step:1401/10000 train_time:120475ms step_avg:85.99ms +[2025-08-22 20:35:18] [Rank 0] step:1421/10000 train_time:122202ms step_avg:86.00ms +[2025-08-22 20:35:18] [Rank 0] step:1421/10000 train_time:122202ms step_avg:86.00ms +[2025-08-22 20:35:20] [Rank 0] step:1441/10000 train_time:123920ms step_avg:86.00ms +[2025-08-22 20:35:20] [Rank 0] step:1441/10000 train_time:123920ms step_avg:86.00ms +[2025-08-22 20:35:22] [Rank 0] step:1461/10000 train_time:125638ms step_avg:85.99ms +[2025-08-22 20:35:22] [Rank 0] step:1461/10000 train_time:125638ms step_avg:85.99ms +[2025-08-22 20:35:24] [Rank 0] step:1481/10000 train_time:127359ms step_avg:86.00ms +[2025-08-22 20:35:24] [Rank 0] step:1481/10000 train_time:127359ms step_avg:86.00ms +[2025-08-22 20:35:25] [Rank 0] step:1501/10000 train_time:129090ms step_avg:86.00ms +[2025-08-22 20:35:25] [Rank 0] step:1501/10000 train_time:129090ms step_avg:86.00ms +[2025-08-22 20:35:27] [Rank 0] step:1521/10000 train_time:130822ms step_avg:86.01ms +[2025-08-22 20:35:27] [Rank 0] step:1521/10000 train_time:130822ms step_avg:86.01ms +[2025-08-22 20:35:29] [Rank 0] step:1541/10000 train_time:132555ms step_avg:86.02ms +[2025-08-22 20:35:29] [Rank 0] step:1541/10000 train_time:132555ms step_avg:86.02ms +[2025-08-22 20:35:30] [Rank 0] step:1561/10000 train_time:134288ms step_avg:86.03ms +[2025-08-22 20:35:30] [Rank 0] step:1561/10000 train_time:134288ms step_avg:86.03ms +[2025-08-22 20:35:32] [Rank 0] step:1581/10000 train_time:136021ms step_avg:86.03ms +[2025-08-22 20:35:32] [Rank 0] step:1581/10000 train_time:136021ms step_avg:86.03ms +[2025-08-22 20:35:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:35:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:35:48] [Rank 0] PRINT: step:1600/10000 val_loss:4.7228 svd_entropy: attn_qk:H=0.8643,top10E=0.11,eRank=313.2,q75/q25=49.35 attn_vo:H=0.8053,top10E=0.15,eRank=220.1,q75/q25=169.66 mlp_w1:H=0.7767,top10E=0.25,eRank=183.8,q75/q25=9.67 mlp_w2:H=0.8640,top10E=0.16,eRank=311.9,q75/q25=9.79 vo_prod:H=0.6961,top10E=0.29,eRank=110.5,q75/q25=35085.47 train_time:137758ms step_avg:86.10ms +[2025-08-22 20:35:48] [Rank 0] PRINT: step:1600/10000 val_loss:4.7228 svd_entropy: attn_qk:H=0.8643,top10E=0.11,eRank=313.2,q75/q25=49.35 attn_vo:H=0.8053,top10E=0.15,eRank=220.1,q75/q25=169.66 mlp_w1:H=0.7767,top10E=0.25,eRank=183.8,q75/q25=9.67 mlp_w2:H=0.8640,top10E=0.16,eRank=311.9,q75/q25=9.79 vo_prod:H=0.6961,top10E=0.29,eRank=110.5,q75/q25=35085.47 train_time:137758ms step_avg:86.10ms +[2025-08-22 20:35:48] [Rank 0] step:1601/10000 train_time:137776ms step_avg:86.06ms +[2025-08-22 20:35:48] [Rank 0] step:1601/10000 train_time:137776ms step_avg:86.06ms +[2025-08-22 20:35:50] [Rank 0] step:1621/10000 train_time:139501ms step_avg:86.06ms +[2025-08-22 20:35:50] [Rank 0] step:1621/10000 train_time:139501ms step_avg:86.06ms +[2025-08-22 20:35:51] [Rank 0] step:1641/10000 train_time:141230ms step_avg:86.06ms +[2025-08-22 20:35:51] [Rank 0] step:1641/10000 train_time:141230ms step_avg:86.06ms +[2025-08-22 20:35:53] [Rank 0] step:1661/10000 train_time:142963ms step_avg:86.07ms +[2025-08-22 20:35:53] [Rank 0] step:1661/10000 train_time:142963ms step_avg:86.07ms +[2025-08-22 20:35:55] [Rank 0] step:1681/10000 train_time:144696ms step_avg:86.08ms +[2025-08-22 20:35:55] [Rank 0] step:1681/10000 train_time:144696ms step_avg:86.08ms +[2025-08-22 20:35:57] [Rank 0] step:1701/10000 train_time:146429ms step_avg:86.08ms +[2025-08-22 20:35:57] [Rank 0] step:1701/10000 train_time:146429ms step_avg:86.08ms +[2025-08-22 20:35:58] [Rank 0] step:1721/10000 train_time:148162ms step_avg:86.09ms +[2025-08-22 20:35:58] [Rank 0] step:1721/10000 train_time:148162ms step_avg:86.09ms +[2025-08-22 20:36:00] [Rank 0] step:1741/10000 train_time:149897ms step_avg:86.10ms +[2025-08-22 20:36:00] [Rank 0] step:1741/10000 train_time:149897ms step_avg:86.10ms +[2025-08-22 20:36:02] [Rank 0] step:1761/10000 train_time:151632ms step_avg:86.11ms +[2025-08-22 20:36:02] [Rank 0] step:1761/10000 train_time:151632ms step_avg:86.11ms +[2025-08-22 20:36:03] [Rank 0] step:1781/10000 train_time:153369ms step_avg:86.11ms +[2025-08-22 20:36:03] [Rank 0] step:1781/10000 train_time:153369ms step_avg:86.11ms +[2025-08-22 20:36:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:36:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:36:19] [Rank 0] PRINT: step:1800/10000 val_loss:4.6152 svd_entropy: attn_qk:H=0.8693,top10E=0.11,eRank=323.6,q75/q25=41.79 attn_vo:H=0.8178,top10E=0.14,eRank=237.6,q75/q25=161.08 mlp_w1:H=0.7863,top10E=0.24,eRank=194.9,q75/q25=10.05 mlp_w2:H=0.8738,top10E=0.14,eRank=332.8,q75/q25=9.34 vo_prod:H=0.7114,top10E=0.27,eRank=121.5,q75/q25=33147.04 train_time:155108ms step_avg:86.17ms +[2025-08-22 20:36:19] [Rank 0] PRINT: step:1800/10000 val_loss:4.6152 svd_entropy: attn_qk:H=0.8693,top10E=0.11,eRank=323.6,q75/q25=41.79 attn_vo:H=0.8178,top10E=0.14,eRank=237.6,q75/q25=161.08 mlp_w1:H=0.7863,top10E=0.24,eRank=194.9,q75/q25=10.05 mlp_w2:H=0.8738,top10E=0.14,eRank=332.8,q75/q25=9.34 vo_prod:H=0.7114,top10E=0.27,eRank=121.5,q75/q25=33147.04 train_time:155108ms step_avg:86.17ms +[2025-08-22 20:36:19] [Rank 0] step:1801/10000 train_time:155128ms step_avg:86.13ms +[2025-08-22 20:36:19] [Rank 0] step:1801/10000 train_time:155128ms step_avg:86.13ms +[2025-08-22 20:36:21] [Rank 0] step:1821/10000 train_time:156924ms step_avg:86.17ms +[2025-08-22 20:36:21] [Rank 0] step:1821/10000 train_time:156924ms step_avg:86.17ms +[2025-08-22 20:36:23] [Rank 0] step:1841/10000 train_time:158658ms step_avg:86.18ms +[2025-08-22 20:36:23] [Rank 0] step:1841/10000 train_time:158658ms step_avg:86.18ms +[2025-08-22 20:36:24] [Rank 0] step:1861/10000 train_time:160391ms step_avg:86.19ms +[2025-08-22 20:36:24] [Rank 0] step:1861/10000 train_time:160391ms step_avg:86.19ms +[2025-08-22 20:36:26] [Rank 0] step:1881/10000 train_time:162127ms step_avg:86.19ms +[2025-08-22 20:36:26] [Rank 0] step:1881/10000 train_time:162127ms step_avg:86.19ms +[2025-08-22 20:36:28] [Rank 0] step:1901/10000 train_time:163862ms step_avg:86.20ms +[2025-08-22 20:36:28] [Rank 0] step:1901/10000 train_time:163862ms step_avg:86.20ms +[2025-08-22 20:36:30] [Rank 0] step:1921/10000 train_time:165597ms step_avg:86.20ms +[2025-08-22 20:36:30] [Rank 0] step:1921/10000 train_time:165597ms step_avg:86.20ms +[2025-08-22 20:36:31] [Rank 0] step:1941/10000 train_time:167332ms step_avg:86.21ms +[2025-08-22 20:36:31] [Rank 0] step:1941/10000 train_time:167332ms step_avg:86.21ms +[2025-08-22 20:36:33] [Rank 0] step:1961/10000 train_time:169069ms step_avg:86.22ms +[2025-08-22 20:36:33] [Rank 0] step:1961/10000 train_time:169069ms step_avg:86.22ms +[2025-08-22 20:36:35] [Rank 0] step:1981/10000 train_time:170820ms step_avg:86.23ms +[2025-08-22 20:36:35] [Rank 0] step:1981/10000 train_time:170820ms step_avg:86.23ms +[2025-08-22 20:36:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:36:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:36:50] [Rank 0] PRINT: step:2000/10000 val_loss:4.5181 svd_entropy: attn_qk:H=0.8737,top10E=0.10,eRank=332.9,q75/q25=36.03 attn_vo:H=0.8282,top10E=0.13,eRank=253.6,q75/q25=144.52 mlp_w1:H=0.7951,top10E=0.23,eRank=205.7,q75/q25=10.28 mlp_w2:H=0.8820,top10E=0.14,eRank=351.2,q75/q25=8.88 vo_prod:H=0.7244,top10E=0.25,eRank=131.9,q75/q25=27330.50 train_time:172560ms step_avg:86.28ms +[2025-08-22 20:36:50] [Rank 0] PRINT: step:2000/10000 val_loss:4.5181 svd_entropy: attn_qk:H=0.8737,top10E=0.10,eRank=332.9,q75/q25=36.03 attn_vo:H=0.8282,top10E=0.13,eRank=253.6,q75/q25=144.52 mlp_w1:H=0.7951,top10E=0.23,eRank=205.7,q75/q25=10.28 mlp_w2:H=0.8820,top10E=0.14,eRank=351.2,q75/q25=8.88 vo_prod:H=0.7244,top10E=0.25,eRank=131.9,q75/q25=27330.50 train_time:172560ms step_avg:86.28ms +[2025-08-22 20:36:51] [Rank 0] step:2001/10000 train_time:172580ms step_avg:86.25ms +[2025-08-22 20:36:51] [Rank 0] step:2001/10000 train_time:172580ms step_avg:86.25ms +[2025-08-22 20:36:52] [Rank 0] step:2021/10000 train_time:174314ms step_avg:86.25ms +[2025-08-22 20:36:52] [Rank 0] step:2021/10000 train_time:174314ms step_avg:86.25ms +[2025-08-22 20:36:54] [Rank 0] step:2041/10000 train_time:176290ms step_avg:86.37ms +[2025-08-22 20:36:54] [Rank 0] step:2041/10000 train_time:176290ms step_avg:86.37ms +[2025-08-22 20:36:56] [Rank 0] step:2061/10000 train_time:178024ms step_avg:86.38ms +[2025-08-22 20:36:56] [Rank 0] step:2061/10000 train_time:178024ms step_avg:86.38ms +[2025-08-22 20:36:58] [Rank 0] step:2081/10000 train_time:179758ms step_avg:86.38ms +[2025-08-22 20:36:58] [Rank 0] step:2081/10000 train_time:179758ms step_avg:86.38ms +[2025-08-22 20:36:59] [Rank 0] step:2101/10000 train_time:181491ms step_avg:86.38ms +[2025-08-22 20:36:59] [Rank 0] step:2101/10000 train_time:181491ms step_avg:86.38ms +[2025-08-22 20:37:01] [Rank 0] step:2121/10000 train_time:183225ms step_avg:86.39ms +[2025-08-22 20:37:01] [Rank 0] step:2121/10000 train_time:183225ms step_avg:86.39ms +[2025-08-22 20:37:03] [Rank 0] step:2141/10000 train_time:184960ms step_avg:86.39ms +[2025-08-22 20:37:03] [Rank 0] step:2141/10000 train_time:184960ms step_avg:86.39ms +[2025-08-22 20:37:05] [Rank 0] step:2161/10000 train_time:186695ms step_avg:86.39ms +[2025-08-22 20:37:05] [Rank 0] step:2161/10000 train_time:186695ms step_avg:86.39ms +[2025-08-22 20:37:06] [Rank 0] step:2181/10000 train_time:188431ms step_avg:86.40ms +[2025-08-22 20:37:06] [Rank 0] step:2181/10000 train_time:188431ms step_avg:86.40ms +[2025-08-22 20:37:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:37:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:37:22] [Rank 0] PRINT: step:2200/10000 val_loss:4.4036 svd_entropy: attn_qk:H=0.8772,top10E=0.10,eRank=340.7,q75/q25=31.91 attn_vo:H=0.8362,top10E=0.12,eRank=266.8,q75/q25=127.29 mlp_w1:H=0.8026,top10E=0.22,eRank=215.4,q75/q25=10.47 mlp_w2:H=0.8886,top10E=0.13,eRank=366.9,q75/q25=8.47 vo_prod:H=0.7345,top10E=0.24,eRank=140.7,q75/q25=21321.91 train_time:190168ms step_avg:86.44ms +[2025-08-22 20:37:22] [Rank 0] PRINT: step:2200/10000 val_loss:4.4036 svd_entropy: attn_qk:H=0.8772,top10E=0.10,eRank=340.7,q75/q25=31.91 attn_vo:H=0.8362,top10E=0.12,eRank=266.8,q75/q25=127.29 mlp_w1:H=0.8026,top10E=0.22,eRank=215.4,q75/q25=10.47 mlp_w2:H=0.8886,top10E=0.13,eRank=366.9,q75/q25=8.47 vo_prod:H=0.7345,top10E=0.24,eRank=140.7,q75/q25=21321.91 train_time:190168ms step_avg:86.44ms +[2025-08-22 20:37:22] [Rank 0] step:2201/10000 train_time:190188ms step_avg:86.41ms +[2025-08-22 20:37:22] [Rank 0] step:2201/10000 train_time:190188ms step_avg:86.41ms +[2025-08-22 20:37:24] [Rank 0] step:2221/10000 train_time:191990ms step_avg:86.44ms +[2025-08-22 20:37:24] [Rank 0] step:2221/10000 train_time:191990ms step_avg:86.44ms +[2025-08-22 20:37:26] [Rank 0] step:2241/10000 train_time:193757ms step_avg:86.46ms +[2025-08-22 20:37:26] [Rank 0] step:2241/10000 train_time:193757ms step_avg:86.46ms +[2025-08-22 20:37:27] [Rank 0] step:2261/10000 train_time:195534ms step_avg:86.48ms +[2025-08-22 20:37:27] [Rank 0] step:2261/10000 train_time:195534ms step_avg:86.48ms +[2025-08-22 20:37:29] [Rank 0] step:2281/10000 train_time:197309ms step_avg:86.50ms +[2025-08-22 20:37:29] [Rank 0] step:2281/10000 train_time:197309ms step_avg:86.50ms +[2025-08-22 20:37:31] [Rank 0] step:2301/10000 train_time:199084ms step_avg:86.52ms +[2025-08-22 20:37:31] [Rank 0] step:2301/10000 train_time:199084ms step_avg:86.52ms +[2025-08-22 20:37:33] [Rank 0] step:2321/10000 train_time:200862ms step_avg:86.54ms +[2025-08-22 20:37:33] [Rank 0] step:2321/10000 train_time:200862ms step_avg:86.54ms +[2025-08-22 20:37:35] [Rank 0] step:2341/10000 train_time:202641ms step_avg:86.56ms +[2025-08-22 20:37:35] [Rank 0] step:2341/10000 train_time:202641ms step_avg:86.56ms +[2025-08-22 20:37:36] [Rank 0] step:2361/10000 train_time:204419ms step_avg:86.58ms +[2025-08-22 20:37:36] [Rank 0] step:2361/10000 train_time:204419ms step_avg:86.58ms +[2025-08-22 20:37:38] [Rank 0] step:2381/10000 train_time:206197ms step_avg:86.60ms +[2025-08-22 20:37:38] [Rank 0] step:2381/10000 train_time:206197ms step_avg:86.60ms +[2025-08-22 20:37:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:37:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:37:54] [Rank 0] PRINT: step:2400/10000 val_loss:4.3214 svd_entropy: attn_qk:H=0.8799,top10E=0.10,eRank=346.6,q75/q25=28.81 attn_vo:H=0.8424,top10E=0.12,eRank=277.6,q75/q25=110.01 mlp_w1:H=0.8093,top10E=0.22,eRank=224.5,q75/q25=10.53 mlp_w2:H=0.8940,top10E=0.13,eRank=380.3,q75/q25=8.01 vo_prod:H=0.7418,top10E=0.24,eRank=147.5,q75/q25=15287.47 train_time:207979ms step_avg:86.66ms +[2025-08-22 20:37:54] [Rank 0] PRINT: step:2400/10000 val_loss:4.3214 svd_entropy: attn_qk:H=0.8799,top10E=0.10,eRank=346.6,q75/q25=28.81 attn_vo:H=0.8424,top10E=0.12,eRank=277.6,q75/q25=110.01 mlp_w1:H=0.8093,top10E=0.22,eRank=224.5,q75/q25=10.53 mlp_w2:H=0.8940,top10E=0.13,eRank=380.3,q75/q25=8.01 vo_prod:H=0.7418,top10E=0.24,eRank=147.5,q75/q25=15287.47 train_time:207979ms step_avg:86.66ms +[2025-08-22 20:37:54] [Rank 0] step:2401/10000 train_time:207998ms step_avg:86.63ms +[2025-08-22 20:37:54] [Rank 0] step:2401/10000 train_time:207998ms step_avg:86.63ms +[2025-08-22 20:37:56] [Rank 0] step:2421/10000 train_time:209786ms step_avg:86.65ms +[2025-08-22 20:37:56] [Rank 0] step:2421/10000 train_time:209786ms step_avg:86.65ms +[2025-08-22 20:37:57] [Rank 0] step:2441/10000 train_time:211564ms step_avg:86.67ms +[2025-08-22 20:37:57] [Rank 0] step:2441/10000 train_time:211564ms step_avg:86.67ms +[2025-08-22 20:37:59] [Rank 0] step:2461/10000 train_time:213341ms step_avg:86.69ms +[2025-08-22 20:37:59] [Rank 0] step:2461/10000 train_time:213341ms step_avg:86.69ms +[2025-08-22 20:38:01] [Rank 0] step:2481/10000 train_time:215120ms step_avg:86.71ms +[2025-08-22 20:38:01] [Rank 0] step:2481/10000 train_time:215120ms step_avg:86.71ms +[2025-08-22 20:38:03] [Rank 0] step:2501/10000 train_time:216900ms step_avg:86.73ms +[2025-08-22 20:38:03] [Rank 0] step:2501/10000 train_time:216900ms step_avg:86.73ms +[2025-08-22 20:38:04] [Rank 0] step:2521/10000 train_time:218681ms step_avg:86.74ms +[2025-08-22 20:38:04] [Rank 0] step:2521/10000 train_time:218681ms step_avg:86.74ms +[2025-08-22 20:38:06] [Rank 0] step:2541/10000 train_time:220462ms step_avg:86.76ms +[2025-08-22 20:38:06] [Rank 0] step:2541/10000 train_time:220462ms step_avg:86.76ms +[2025-08-22 20:38:08] [Rank 0] step:2561/10000 train_time:222241ms step_avg:86.78ms +[2025-08-22 20:38:08] [Rank 0] step:2561/10000 train_time:222241ms step_avg:86.78ms +[2025-08-22 20:38:10] [Rank 0] step:2581/10000 train_time:224023ms step_avg:86.80ms +[2025-08-22 20:38:10] [Rank 0] step:2581/10000 train_time:224023ms step_avg:86.80ms +[2025-08-22 20:38:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:38:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:38:25] [Rank 0] PRINT: step:2600/10000 val_loss:4.2529 svd_entropy: attn_qk:H=0.8824,top10E=0.10,eRank=352.3,q75/q25=26.02 attn_vo:H=0.8476,top10E=0.12,eRank=286.9,q75/q25=97.20 mlp_w1:H=0.8153,top10E=0.21,eRank=232.9,q75/q25=10.56 mlp_w2:H=0.8986,top10E=0.12,eRank=392.0,q75/q25=7.66 vo_prod:H=0.7485,top10E=0.23,eRank=153.9,q75/q25=11513.79 train_time:225806ms step_avg:86.85ms +[2025-08-22 20:38:25] [Rank 0] PRINT: step:2600/10000 val_loss:4.2529 svd_entropy: attn_qk:H=0.8824,top10E=0.10,eRank=352.3,q75/q25=26.02 attn_vo:H=0.8476,top10E=0.12,eRank=286.9,q75/q25=97.20 mlp_w1:H=0.8153,top10E=0.21,eRank=232.9,q75/q25=10.56 mlp_w2:H=0.8986,top10E=0.12,eRank=392.0,q75/q25=7.66 vo_prod:H=0.7485,top10E=0.23,eRank=153.9,q75/q25=11513.79 train_time:225806ms step_avg:86.85ms +[2025-08-22 20:38:26] [Rank 0] step:2601/10000 train_time:225826ms step_avg:86.82ms +[2025-08-22 20:38:26] [Rank 0] step:2601/10000 train_time:225826ms step_avg:86.82ms +[2025-08-22 20:38:27] [Rank 0] step:2621/10000 train_time:227661ms step_avg:86.86ms +[2025-08-22 20:38:27] [Rank 0] step:2621/10000 train_time:227661ms step_avg:86.86ms +[2025-08-22 20:38:29] [Rank 0] step:2641/10000 train_time:229436ms step_avg:86.87ms +[2025-08-22 20:38:29] [Rank 0] step:2641/10000 train_time:229436ms step_avg:86.87ms +[2025-08-22 20:38:31] [Rank 0] step:2661/10000 train_time:231214ms step_avg:86.89ms +[2025-08-22 20:38:31] [Rank 0] step:2661/10000 train_time:231214ms step_avg:86.89ms +[2025-08-22 20:38:33] [Rank 0] step:2681/10000 train_time:232990ms step_avg:86.90ms +[2025-08-22 20:38:33] [Rank 0] step:2681/10000 train_time:232990ms step_avg:86.90ms +[2025-08-22 20:38:35] [Rank 0] step:2701/10000 train_time:234767ms step_avg:86.92ms +[2025-08-22 20:38:35] [Rank 0] step:2701/10000 train_time:234767ms step_avg:86.92ms +[2025-08-22 20:38:36] [Rank 0] step:2721/10000 train_time:236544ms step_avg:86.93ms +[2025-08-22 20:38:36] [Rank 0] step:2721/10000 train_time:236544ms step_avg:86.93ms +[2025-08-22 20:38:38] [Rank 0] step:2741/10000 train_time:238321ms step_avg:86.95ms +[2025-08-22 20:38:38] [Rank 0] step:2741/10000 train_time:238321ms step_avg:86.95ms +[2025-08-22 20:38:40] [Rank 0] step:2761/10000 train_time:240101ms step_avg:86.96ms +[2025-08-22 20:38:40] [Rank 0] step:2761/10000 train_time:240101ms step_avg:86.96ms +[2025-08-22 20:38:42] [Rank 0] step:2781/10000 train_time:241882ms step_avg:86.98ms +[2025-08-22 20:38:42] [Rank 0] step:2781/10000 train_time:241882ms step_avg:86.98ms +[2025-08-22 20:38:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:38:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:38:57] [Rank 0] PRINT: step:2800/10000 val_loss:4.2117 svd_entropy: attn_qk:H=0.8846,top10E=0.10,eRank=357.5,q75/q25=24.24 attn_vo:H=0.8520,top10E=0.11,eRank=295.0,q75/q25=85.26 mlp_w1:H=0.8205,top10E=0.20,eRank=240.6,q75/q25=10.58 mlp_w2:H=0.9026,top10E=0.12,eRank=402.4,q75/q25=7.30 vo_prod:H=0.7544,top10E=0.22,eRank=159.9,q75/q25=8484.40 train_time:243663ms step_avg:87.02ms +[2025-08-22 20:38:57] [Rank 0] PRINT: step:2800/10000 val_loss:4.2117 svd_entropy: attn_qk:H=0.8846,top10E=0.10,eRank=357.5,q75/q25=24.24 attn_vo:H=0.8520,top10E=0.11,eRank=295.0,q75/q25=85.26 mlp_w1:H=0.8205,top10E=0.20,eRank=240.6,q75/q25=10.58 mlp_w2:H=0.9026,top10E=0.12,eRank=402.4,q75/q25=7.30 vo_prod:H=0.7544,top10E=0.22,eRank=159.9,q75/q25=8484.40 train_time:243663ms step_avg:87.02ms +[2025-08-22 20:38:57] [Rank 0] step:2801/10000 train_time:243683ms step_avg:87.00ms +[2025-08-22 20:38:57] [Rank 0] step:2801/10000 train_time:243683ms step_avg:87.00ms +[2025-08-22 20:38:59] [Rank 0] step:2821/10000 train_time:245468ms step_avg:87.01ms +[2025-08-22 20:38:59] [Rank 0] step:2821/10000 train_time:245468ms step_avg:87.01ms +[2025-08-22 20:39:01] [Rank 0] step:2841/10000 train_time:247245ms step_avg:87.03ms +[2025-08-22 20:39:01] [Rank 0] step:2841/10000 train_time:247245ms step_avg:87.03ms +[2025-08-22 20:39:03] [Rank 0] step:2861/10000 train_time:249022ms step_avg:87.04ms +[2025-08-22 20:39:03] [Rank 0] step:2861/10000 train_time:249022ms step_avg:87.04ms +[2025-08-22 20:39:04] [Rank 0] step:2881/10000 train_time:250800ms step_avg:87.05ms +[2025-08-22 20:39:04] [Rank 0] step:2881/10000 train_time:250800ms step_avg:87.05ms +[2025-08-22 20:39:06] [Rank 0] step:2901/10000 train_time:252576ms step_avg:87.07ms +[2025-08-22 20:39:06] [Rank 0] step:2901/10000 train_time:252576ms step_avg:87.07ms +[2025-08-22 20:39:08] [Rank 0] step:2921/10000 train_time:254352ms step_avg:87.08ms +[2025-08-22 20:39:08] [Rank 0] step:2921/10000 train_time:254352ms step_avg:87.08ms +[2025-08-22 20:39:10] [Rank 0] step:2941/10000 train_time:256131ms step_avg:87.09ms +[2025-08-22 20:39:10] [Rank 0] step:2941/10000 train_time:256131ms step_avg:87.09ms +[2025-08-22 20:39:12] [Rank 0] step:2961/10000 train_time:257910ms step_avg:87.10ms +[2025-08-22 20:39:12] [Rank 0] step:2961/10000 train_time:257910ms step_avg:87.10ms +[2025-08-22 20:39:13] [Rank 0] step:2981/10000 train_time:259696ms step_avg:87.12ms +[2025-08-22 20:39:13] [Rank 0] step:2981/10000 train_time:259696ms step_avg:87.12ms +[2025-08-22 20:39:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:39:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:39:29] [Rank 0] PRINT: step:3000/10000 val_loss:4.1646 svd_entropy: attn_qk:H=0.8865,top10E=0.09,eRank=362.1,q75/q25=22.68 attn_vo:H=0.8557,top10E=0.11,eRank=302.1,q75/q25=78.03 mlp_w1:H=0.8252,top10E=0.20,eRank=247.8,q75/q25=10.49 mlp_w2:H=0.9059,top10E=0.11,eRank=411.3,q75/q25=7.04 vo_prod:H=0.7597,top10E=0.22,eRank=165.3,q75/q25=6887.23 train_time:261485ms step_avg:87.16ms +[2025-08-22 20:39:29] [Rank 0] PRINT: step:3000/10000 val_loss:4.1646 svd_entropy: attn_qk:H=0.8865,top10E=0.09,eRank=362.1,q75/q25=22.68 attn_vo:H=0.8557,top10E=0.11,eRank=302.1,q75/q25=78.03 mlp_w1:H=0.8252,top10E=0.20,eRank=247.8,q75/q25=10.49 mlp_w2:H=0.9059,top10E=0.11,eRank=411.3,q75/q25=7.04 vo_prod:H=0.7597,top10E=0.22,eRank=165.3,q75/q25=6887.23 train_time:261485ms step_avg:87.16ms +[2025-08-22 20:39:29] [Rank 0] step:3001/10000 train_time:261504ms step_avg:87.14ms +[2025-08-22 20:39:29] [Rank 0] step:3001/10000 train_time:261504ms step_avg:87.14ms +[2025-08-22 20:39:31] [Rank 0] step:3021/10000 train_time:263290ms step_avg:87.15ms +[2025-08-22 20:39:31] [Rank 0] step:3021/10000 train_time:263290ms step_avg:87.15ms +[2025-08-22 20:39:33] [Rank 0] step:3041/10000 train_time:265071ms step_avg:87.17ms +[2025-08-22 20:39:33] [Rank 0] step:3041/10000 train_time:265071ms step_avg:87.17ms +[2025-08-22 20:39:35] [Rank 0] step:3061/10000 train_time:266853ms step_avg:87.18ms +[2025-08-22 20:39:35] [Rank 0] step:3061/10000 train_time:266853ms step_avg:87.18ms +[2025-08-22 20:39:36] [Rank 0] step:3081/10000 train_time:268636ms step_avg:87.19ms +[2025-08-22 20:39:36] [Rank 0] step:3081/10000 train_time:268636ms step_avg:87.19ms +[2025-08-22 20:39:38] [Rank 0] step:3101/10000 train_time:270419ms step_avg:87.20ms +[2025-08-22 20:39:38] [Rank 0] step:3101/10000 train_time:270419ms step_avg:87.20ms +[2025-08-22 20:39:40] [Rank 0] step:3121/10000 train_time:272205ms step_avg:87.22ms +[2025-08-22 20:39:40] [Rank 0] step:3121/10000 train_time:272205ms step_avg:87.22ms +[2025-08-22 20:39:42] [Rank 0] step:3141/10000 train_time:273990ms step_avg:87.23ms +[2025-08-22 20:39:42] [Rank 0] step:3141/10000 train_time:273990ms step_avg:87.23ms +[2025-08-22 20:39:43] [Rank 0] step:3161/10000 train_time:275778ms step_avg:87.24ms +[2025-08-22 20:39:43] [Rank 0] step:3161/10000 train_time:275778ms step_avg:87.24ms +[2025-08-22 20:39:45] [Rank 0] step:3181/10000 train_time:277565ms step_avg:87.26ms +[2025-08-22 20:39:45] [Rank 0] step:3181/10000 train_time:277565ms step_avg:87.26ms +[2025-08-22 20:39:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:39:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:40:01] [Rank 0] PRINT: step:3200/10000 val_loss:4.1280 svd_entropy: attn_qk:H=0.8882,top10E=0.09,eRank=366.0,q75/q25=21.44 attn_vo:H=0.8590,top10E=0.11,eRank=308.4,q75/q25=70.43 mlp_w1:H=0.8294,top10E=0.19,eRank=254.4,q75/q25=10.43 mlp_w2:H=0.9088,top10E=0.11,eRank=419.2,q75/q25=6.80 vo_prod:H=0.7646,top10E=0.21,eRank=170.4,q75/q25=5569.94 train_time:279357ms step_avg:87.30ms +[2025-08-22 20:40:01] [Rank 0] PRINT: step:3200/10000 val_loss:4.1280 svd_entropy: attn_qk:H=0.8882,top10E=0.09,eRank=366.0,q75/q25=21.44 attn_vo:H=0.8590,top10E=0.11,eRank=308.4,q75/q25=70.43 mlp_w1:H=0.8294,top10E=0.19,eRank=254.4,q75/q25=10.43 mlp_w2:H=0.9088,top10E=0.11,eRank=419.2,q75/q25=6.80 vo_prod:H=0.7646,top10E=0.21,eRank=170.4,q75/q25=5569.94 train_time:279357ms step_avg:87.30ms +[2025-08-22 20:40:01] [Rank 0] step:3201/10000 train_time:279376ms step_avg:87.28ms +[2025-08-22 20:40:01] [Rank 0] step:3201/10000 train_time:279376ms step_avg:87.28ms +[2025-08-22 20:40:03] [Rank 0] step:3221/10000 train_time:281162ms step_avg:87.29ms +[2025-08-22 20:40:03] [Rank 0] step:3221/10000 train_time:281162ms step_avg:87.29ms +[2025-08-22 20:40:05] [Rank 0] step:3241/10000 train_time:282948ms step_avg:87.30ms +[2025-08-22 20:40:05] [Rank 0] step:3241/10000 train_time:282948ms step_avg:87.30ms +[2025-08-22 20:40:06] [Rank 0] step:3261/10000 train_time:284734ms step_avg:87.31ms +[2025-08-22 20:40:06] [Rank 0] step:3261/10000 train_time:284734ms step_avg:87.31ms +[2025-08-22 20:40:08] [Rank 0] step:3281/10000 train_time:286523ms step_avg:87.33ms +[2025-08-22 20:40:08] [Rank 0] step:3281/10000 train_time:286523ms step_avg:87.33ms +[2025-08-22 20:40:10] [Rank 0] step:3301/10000 train_time:288310ms step_avg:87.34ms +[2025-08-22 20:40:10] [Rank 0] step:3301/10000 train_time:288310ms step_avg:87.34ms +[2025-08-22 20:40:12] [Rank 0] step:3321/10000 train_time:290100ms step_avg:87.35ms +[2025-08-22 20:40:12] [Rank 0] step:3321/10000 train_time:290100ms step_avg:87.35ms +[2025-08-22 20:40:13] [Rank 0] step:3341/10000 train_time:291889ms step_avg:87.37ms +[2025-08-22 20:40:13] [Rank 0] step:3341/10000 train_time:291889ms step_avg:87.37ms +[2025-08-22 20:40:15] [Rank 0] step:3361/10000 train_time:293679ms step_avg:87.38ms +[2025-08-22 20:40:15] [Rank 0] step:3361/10000 train_time:293679ms step_avg:87.38ms +[2025-08-22 20:40:17] [Rank 0] step:3381/10000 train_time:295473ms step_avg:87.39ms +[2025-08-22 20:40:17] [Rank 0] step:3381/10000 train_time:295473ms step_avg:87.39ms +[2025-08-22 20:40:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:40:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:40:32] [Rank 0] PRINT: step:3400/10000 val_loss:4.0984 svd_entropy: attn_qk:H=0.8897,top10E=0.09,eRank=369.8,q75/q25=20.32 attn_vo:H=0.8620,top10E=0.11,eRank=314.4,q75/q25=64.43 mlp_w1:H=0.8333,top10E=0.19,eRank=260.7,q75/q25=10.35 mlp_w2:H=0.9113,top10E=0.11,eRank=426.3,q75/q25=6.56 vo_prod:H=0.7692,top10E=0.21,eRank=175.3,q75/q25=4436.76 train_time:297267ms step_avg:87.43ms +[2025-08-22 20:40:32] [Rank 0] PRINT: step:3400/10000 val_loss:4.0984 svd_entropy: attn_qk:H=0.8897,top10E=0.09,eRank=369.8,q75/q25=20.32 attn_vo:H=0.8620,top10E=0.11,eRank=314.4,q75/q25=64.43 mlp_w1:H=0.8333,top10E=0.19,eRank=260.7,q75/q25=10.35 mlp_w2:H=0.9113,top10E=0.11,eRank=426.3,q75/q25=6.56 vo_prod:H=0.7692,top10E=0.21,eRank=175.3,q75/q25=4436.76 train_time:297267ms step_avg:87.43ms +[2025-08-22 20:40:33] [Rank 0] step:3401/10000 train_time:297287ms step_avg:87.41ms +[2025-08-22 20:40:33] [Rank 0] step:3401/10000 train_time:297287ms step_avg:87.41ms +[2025-08-22 20:40:34] [Rank 0] step:3421/10000 train_time:299078ms step_avg:87.42ms +[2025-08-22 20:40:34] [Rank 0] step:3421/10000 train_time:299078ms step_avg:87.42ms +[2025-08-22 20:40:36] [Rank 0] step:3441/10000 train_time:300863ms step_avg:87.43ms +[2025-08-22 20:40:36] [Rank 0] step:3441/10000 train_time:300863ms step_avg:87.43ms +[2025-08-22 20:40:38] [Rank 0] step:3461/10000 train_time:302647ms step_avg:87.45ms +[2025-08-22 20:40:38] [Rank 0] step:3461/10000 train_time:302647ms step_avg:87.45ms +[2025-08-22 20:40:40] [Rank 0] step:3481/10000 train_time:304431ms step_avg:87.46ms +[2025-08-22 20:40:40] [Rank 0] step:3481/10000 train_time:304431ms step_avg:87.46ms +[2025-08-22 20:40:41] [Rank 0] step:3501/10000 train_time:306219ms step_avg:87.47ms +[2025-08-22 20:40:41] [Rank 0] step:3501/10000 train_time:306219ms step_avg:87.47ms +[2025-08-22 20:40:43] [Rank 0] step:3521/10000 train_time:308005ms step_avg:87.48ms +[2025-08-22 20:40:43] [Rank 0] step:3521/10000 train_time:308005ms step_avg:87.48ms +[2025-08-22 20:40:45] [Rank 0] step:3541/10000 train_time:309789ms step_avg:87.49ms +[2025-08-22 20:40:45] [Rank 0] step:3541/10000 train_time:309789ms step_avg:87.49ms +[2025-08-22 20:40:47] [Rank 0] step:3561/10000 train_time:311574ms step_avg:87.50ms +[2025-08-22 20:40:47] [Rank 0] step:3561/10000 train_time:311574ms step_avg:87.50ms +[2025-08-22 20:40:49] [Rank 0] step:3581/10000 train_time:313360ms step_avg:87.51ms +[2025-08-22 20:40:49] [Rank 0] step:3581/10000 train_time:313360ms step_avg:87.51ms +[2025-08-22 20:40:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:40:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:41:04] [Rank 0] PRINT: step:3600/10000 val_loss:4.0776 svd_entropy: attn_qk:H=0.8911,top10E=0.09,eRank=373.1,q75/q25=19.53 attn_vo:H=0.8646,top10E=0.10,eRank=319.6,q75/q25=59.14 mlp_w1:H=0.8367,top10E=0.19,eRank=266.3,q75/q25=10.27 mlp_w2:H=0.9135,top10E=0.11,eRank=432.5,q75/q25=6.38 vo_prod:H=0.7732,top10E=0.20,eRank=179.8,q75/q25=3601.94 train_time:315149ms step_avg:87.54ms +[2025-08-22 20:41:04] [Rank 0] PRINT: step:3600/10000 val_loss:4.0776 svd_entropy: attn_qk:H=0.8911,top10E=0.09,eRank=373.1,q75/q25=19.53 attn_vo:H=0.8646,top10E=0.10,eRank=319.6,q75/q25=59.14 mlp_w1:H=0.8367,top10E=0.19,eRank=266.3,q75/q25=10.27 mlp_w2:H=0.9135,top10E=0.11,eRank=432.5,q75/q25=6.38 vo_prod:H=0.7732,top10E=0.20,eRank=179.8,q75/q25=3601.94 train_time:315149ms step_avg:87.54ms +[2025-08-22 20:41:04] [Rank 0] step:3601/10000 train_time:315169ms step_avg:87.52ms +[2025-08-22 20:41:04] [Rank 0] step:3601/10000 train_time:315169ms step_avg:87.52ms +[2025-08-22 20:41:06] [Rank 0] step:3621/10000 train_time:316943ms step_avg:87.53ms +[2025-08-22 20:41:06] [Rank 0] step:3621/10000 train_time:316943ms step_avg:87.53ms +[2025-08-22 20:41:08] [Rank 0] step:3641/10000 train_time:318726ms step_avg:87.54ms +[2025-08-22 20:41:08] [Rank 0] step:3641/10000 train_time:318726ms step_avg:87.54ms +[2025-08-22 20:41:10] [Rank 0] step:3661/10000 train_time:320511ms step_avg:87.55ms +[2025-08-22 20:41:10] [Rank 0] step:3661/10000 train_time:320511ms step_avg:87.55ms +[2025-08-22 20:41:11] [Rank 0] step:3681/10000 train_time:322295ms step_avg:87.56ms +[2025-08-22 20:41:11] [Rank 0] step:3681/10000 train_time:322295ms step_avg:87.56ms +[2025-08-22 20:41:13] [Rank 0] step:3701/10000 train_time:324080ms step_avg:87.57ms +[2025-08-22 20:41:13] [Rank 0] step:3701/10000 train_time:324080ms step_avg:87.57ms +[2025-08-22 20:41:15] [Rank 0] step:3721/10000 train_time:325895ms step_avg:87.58ms +[2025-08-22 20:41:15] [Rank 0] step:3721/10000 train_time:325895ms step_avg:87.58ms +[2025-08-22 20:41:17] [Rank 0] step:3741/10000 train_time:327718ms step_avg:87.60ms +[2025-08-22 20:41:17] [Rank 0] step:3741/10000 train_time:327718ms step_avg:87.60ms +[2025-08-22 20:41:19] [Rank 0] step:3761/10000 train_time:329543ms step_avg:87.62ms +[2025-08-22 20:41:19] [Rank 0] step:3761/10000 train_time:329543ms step_avg:87.62ms +[2025-08-22 20:41:20] [Rank 0] step:3781/10000 train_time:331368ms step_avg:87.64ms +[2025-08-22 20:41:20] [Rank 0] step:3781/10000 train_time:331368ms step_avg:87.64ms +[2025-08-22 20:41:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:41:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:41:36] [Rank 0] PRINT: step:3800/10000 val_loss:4.0269 svd_entropy: attn_qk:H=0.8922,top10E=0.09,eRank=375.8,q75/q25=18.76 attn_vo:H=0.8669,top10E=0.10,eRank=324.2,q75/q25=54.90 mlp_w1:H=0.8400,top10E=0.18,eRank=271.9,q75/q25=10.17 mlp_w2:H=0.9154,top10E=0.11,eRank=437.8,q75/q25=6.22 vo_prod:H=0.7767,top10E=0.20,eRank=183.7,q75/q25=3019.99 train_time:333200ms step_avg:87.68ms +[2025-08-22 20:41:36] [Rank 0] PRINT: step:3800/10000 val_loss:4.0269 svd_entropy: attn_qk:H=0.8922,top10E=0.09,eRank=375.8,q75/q25=18.76 attn_vo:H=0.8669,top10E=0.10,eRank=324.2,q75/q25=54.90 mlp_w1:H=0.8400,top10E=0.18,eRank=271.9,q75/q25=10.17 mlp_w2:H=0.9154,top10E=0.11,eRank=437.8,q75/q25=6.22 vo_prod:H=0.7767,top10E=0.20,eRank=183.7,q75/q25=3019.99 train_time:333200ms step_avg:87.68ms +[2025-08-22 20:41:36] [Rank 0] step:3801/10000 train_time:333218ms step_avg:87.67ms +[2025-08-22 20:41:36] [Rank 0] step:3801/10000 train_time:333218ms step_avg:87.67ms +[2025-08-22 20:41:38] [Rank 0] step:3821/10000 train_time:335033ms step_avg:87.68ms +[2025-08-22 20:41:38] [Rank 0] step:3821/10000 train_time:335033ms step_avg:87.68ms +[2025-08-22 20:41:40] [Rank 0] step:3841/10000 train_time:336860ms step_avg:87.70ms +[2025-08-22 20:41:40] [Rank 0] step:3841/10000 train_time:336860ms step_avg:87.70ms +[2025-08-22 20:41:42] [Rank 0] step:3861/10000 train_time:338686ms step_avg:87.72ms +[2025-08-22 20:41:42] [Rank 0] step:3861/10000 train_time:338686ms step_avg:87.72ms +[2025-08-22 20:41:44] [Rank 0] step:3881/10000 train_time:340511ms step_avg:87.74ms +[2025-08-22 20:41:44] [Rank 0] step:3881/10000 train_time:340511ms step_avg:87.74ms +[2025-08-22 20:41:45] [Rank 0] step:3901/10000 train_time:342337ms step_avg:87.76ms +[2025-08-22 20:41:45] [Rank 0] step:3901/10000 train_time:342337ms step_avg:87.76ms +[2025-08-22 20:41:47] [Rank 0] step:3921/10000 train_time:344163ms step_avg:87.77ms +[2025-08-22 20:41:47] [Rank 0] step:3921/10000 train_time:344163ms step_avg:87.77ms +[2025-08-22 20:41:49] [Rank 0] step:3941/10000 train_time:345990ms step_avg:87.79ms +[2025-08-22 20:41:49] [Rank 0] step:3941/10000 train_time:345990ms step_avg:87.79ms +[2025-08-22 20:41:51] [Rank 0] step:3961/10000 train_time:347815ms step_avg:87.81ms +[2025-08-22 20:41:51] [Rank 0] step:3961/10000 train_time:347815ms step_avg:87.81ms +[2025-08-22 20:41:53] [Rank 0] step:3981/10000 train_time:349640ms step_avg:87.83ms +[2025-08-22 20:41:53] [Rank 0] step:3981/10000 train_time:349640ms step_avg:87.83ms +[2025-08-22 20:41:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:41:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:42:08] [Rank 0] PRINT: step:4000/10000 val_loss:3.9976 svd_entropy: attn_qk:H=0.8931,top10E=0.09,eRank=378.1,q75/q25=18.17 attn_vo:H=0.8689,top10E=0.10,eRank=328.4,q75/q25=51.32 mlp_w1:H=0.8430,top10E=0.18,eRank=277.0,q75/q25=10.06 mlp_w2:H=0.9171,top10E=0.10,eRank=442.8,q75/q25=6.08 vo_prod:H=0.7800,top10E=0.19,eRank=187.4,q75/q25=2548.95 train_time:351469ms step_avg:87.87ms +[2025-08-22 20:42:08] [Rank 0] PRINT: step:4000/10000 val_loss:3.9976 svd_entropy: attn_qk:H=0.8931,top10E=0.09,eRank=378.1,q75/q25=18.17 attn_vo:H=0.8689,top10E=0.10,eRank=328.4,q75/q25=51.32 mlp_w1:H=0.8430,top10E=0.18,eRank=277.0,q75/q25=10.06 mlp_w2:H=0.9171,top10E=0.10,eRank=442.8,q75/q25=6.08 vo_prod:H=0.7800,top10E=0.19,eRank=187.4,q75/q25=2548.95 train_time:351469ms step_avg:87.87ms +[2025-08-22 20:42:09] [Rank 0] step:4001/10000 train_time:351488ms step_avg:87.85ms +[2025-08-22 20:42:09] [Rank 0] step:4001/10000 train_time:351488ms step_avg:87.85ms +[2025-08-22 20:42:10] [Rank 0] step:4021/10000 train_time:353300ms step_avg:87.86ms +[2025-08-22 20:42:10] [Rank 0] step:4021/10000 train_time:353300ms step_avg:87.86ms +[2025-08-22 20:42:12] [Rank 0] step:4041/10000 train_time:355118ms step_avg:87.88ms +[2025-08-22 20:42:12] [Rank 0] step:4041/10000 train_time:355118ms step_avg:87.88ms +[2025-08-22 20:42:14] [Rank 0] step:4061/10000 train_time:356935ms step_avg:87.89ms +[2025-08-22 20:42:14] [Rank 0] step:4061/10000 train_time:356935ms step_avg:87.89ms +[2025-08-22 20:42:16] [Rank 0] step:4081/10000 train_time:359004ms step_avg:87.97ms +[2025-08-22 20:42:16] [Rank 0] step:4081/10000 train_time:359004ms step_avg:87.97ms +[2025-08-22 20:42:18] [Rank 0] step:4101/10000 train_time:360824ms step_avg:87.98ms +[2025-08-22 20:42:18] [Rank 0] step:4101/10000 train_time:360824ms step_avg:87.98ms +[2025-08-22 20:42:20] [Rank 0] step:4121/10000 train_time:362643ms step_avg:88.00ms +[2025-08-22 20:42:20] [Rank 0] step:4121/10000 train_time:362643ms step_avg:88.00ms +[2025-08-22 20:42:21] [Rank 0] step:4141/10000 train_time:364465ms step_avg:88.01ms +[2025-08-22 20:42:21] [Rank 0] step:4141/10000 train_time:364465ms step_avg:88.01ms +[2025-08-22 20:42:23] [Rank 0] step:4161/10000 train_time:366286ms step_avg:88.03ms +[2025-08-22 20:42:23] [Rank 0] step:4161/10000 train_time:366286ms step_avg:88.03ms +[2025-08-22 20:42:25] [Rank 0] step:4181/10000 train_time:368109ms step_avg:88.04ms +[2025-08-22 20:42:25] [Rank 0] step:4181/10000 train_time:368109ms step_avg:88.04ms +[2025-08-22 20:42:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:42:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:42:41] [Rank 0] PRINT: step:4200/10000 val_loss:3.9845 svd_entropy: attn_qk:H=0.8940,top10E=0.09,eRank=380.3,q75/q25=17.67 attn_vo:H=0.8709,top10E=0.10,eRank=332.4,q75/q25=48.80 mlp_w1:H=0.8456,top10E=0.18,eRank=281.7,q75/q25=9.94 mlp_w2:H=0.9185,top10E=0.10,eRank=447.2,q75/q25=5.98 vo_prod:H=0.7831,top10E=0.19,eRank=191.0,q75/q25=2207.34 train_time:369935ms step_avg:88.08ms +[2025-08-22 20:42:41] [Rank 0] PRINT: step:4200/10000 val_loss:3.9845 svd_entropy: attn_qk:H=0.8940,top10E=0.09,eRank=380.3,q75/q25=17.67 attn_vo:H=0.8709,top10E=0.10,eRank=332.4,q75/q25=48.80 mlp_w1:H=0.8456,top10E=0.18,eRank=281.7,q75/q25=9.94 mlp_w2:H=0.9185,top10E=0.10,eRank=447.2,q75/q25=5.98 vo_prod:H=0.7831,top10E=0.19,eRank=191.0,q75/q25=2207.34 train_time:369935ms step_avg:88.08ms +[2025-08-22 20:42:41] [Rank 0] step:4201/10000 train_time:369954ms step_avg:88.06ms +[2025-08-22 20:42:41] [Rank 0] step:4201/10000 train_time:369954ms step_avg:88.06ms +[2025-08-22 20:42:43] [Rank 0] step:4221/10000 train_time:371764ms step_avg:88.07ms +[2025-08-22 20:42:43] [Rank 0] step:4221/10000 train_time:371764ms step_avg:88.07ms +[2025-08-22 20:42:45] [Rank 0] step:4241/10000 train_time:373584ms step_avg:88.09ms +[2025-08-22 20:42:45] [Rank 0] step:4241/10000 train_time:373584ms step_avg:88.09ms +[2025-08-22 20:42:46] [Rank 0] step:4261/10000 train_time:375402ms step_avg:88.10ms +[2025-08-22 20:42:46] [Rank 0] step:4261/10000 train_time:375402ms step_avg:88.10ms +[2025-08-22 20:42:48] [Rank 0] step:4281/10000 train_time:377225ms step_avg:88.12ms +[2025-08-22 20:42:48] [Rank 0] step:4281/10000 train_time:377225ms step_avg:88.12ms +[2025-08-22 20:42:50] [Rank 0] step:4301/10000 train_time:379046ms step_avg:88.13ms +[2025-08-22 20:42:50] [Rank 0] step:4301/10000 train_time:379046ms step_avg:88.13ms +[2025-08-22 20:42:52] [Rank 0] step:4321/10000 train_time:380868ms step_avg:88.14ms +[2025-08-22 20:42:52] [Rank 0] step:4321/10000 train_time:380868ms step_avg:88.14ms +[2025-08-22 20:42:54] [Rank 0] step:4341/10000 train_time:382687ms step_avg:88.16ms +[2025-08-22 20:42:54] [Rank 0] step:4341/10000 train_time:382687ms step_avg:88.16ms +[2025-08-22 20:42:56] [Rank 0] step:4361/10000 train_time:384509ms step_avg:88.17ms +[2025-08-22 20:42:56] [Rank 0] step:4361/10000 train_time:384509ms step_avg:88.17ms +[2025-08-22 20:42:57] [Rank 0] step:4381/10000 train_time:386333ms step_avg:88.18ms +[2025-08-22 20:42:57] [Rank 0] step:4381/10000 train_time:386333ms step_avg:88.18ms +[2025-08-22 20:42:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:42:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:43:13] [Rank 0] PRINT: step:4400/10000 val_loss:3.9558 svd_entropy: attn_qk:H=0.8948,top10E=0.09,eRank=382.4,q75/q25=17.17 attn_vo:H=0.8726,top10E=0.10,eRank=336.0,q75/q25=45.75 mlp_w1:H=0.8481,top10E=0.18,eRank=286.2,q75/q25=9.83 mlp_w2:H=0.9199,top10E=0.10,eRank=451.1,q75/q25=5.86 vo_prod:H=0.7860,top10E=0.19,eRank=194.3,q75/q25=1912.75 train_time:388159ms step_avg:88.22ms +[2025-08-22 20:43:13] [Rank 0] PRINT: step:4400/10000 val_loss:3.9558 svd_entropy: attn_qk:H=0.8948,top10E=0.09,eRank=382.4,q75/q25=17.17 attn_vo:H=0.8726,top10E=0.10,eRank=336.0,q75/q25=45.75 mlp_w1:H=0.8481,top10E=0.18,eRank=286.2,q75/q25=9.83 mlp_w2:H=0.9199,top10E=0.10,eRank=451.1,q75/q25=5.86 vo_prod:H=0.7860,top10E=0.19,eRank=194.3,q75/q25=1912.75 train_time:388159ms step_avg:88.22ms +[2025-08-22 20:43:13] [Rank 0] step:4401/10000 train_time:388181ms step_avg:88.20ms +[2025-08-22 20:43:13] [Rank 0] step:4401/10000 train_time:388181ms step_avg:88.20ms +[2025-08-22 20:43:15] [Rank 0] step:4421/10000 train_time:389997ms step_avg:88.21ms +[2025-08-22 20:43:15] [Rank 0] step:4421/10000 train_time:389997ms step_avg:88.21ms +[2025-08-22 20:43:17] [Rank 0] step:4441/10000 train_time:391813ms step_avg:88.23ms +[2025-08-22 20:43:17] [Rank 0] step:4441/10000 train_time:391813ms step_avg:88.23ms +[2025-08-22 20:43:18] [Rank 0] step:4461/10000 train_time:393637ms step_avg:88.24ms +[2025-08-22 20:43:18] [Rank 0] step:4461/10000 train_time:393637ms step_avg:88.24ms +[2025-08-22 20:43:20] [Rank 0] step:4481/10000 train_time:395465ms step_avg:88.25ms +[2025-08-22 20:43:20] [Rank 0] step:4481/10000 train_time:395465ms step_avg:88.25ms +[2025-08-22 20:43:22] [Rank 0] step:4501/10000 train_time:397289ms step_avg:88.27ms +[2025-08-22 20:43:22] [Rank 0] step:4501/10000 train_time:397289ms step_avg:88.27ms +[2025-08-22 20:43:24] [Rank 0] step:4521/10000 train_time:399116ms step_avg:88.28ms +[2025-08-22 20:43:24] [Rank 0] step:4521/10000 train_time:399116ms step_avg:88.28ms +[2025-08-22 20:43:26] [Rank 0] step:4541/10000 train_time:400946ms step_avg:88.29ms +[2025-08-22 20:43:26] [Rank 0] step:4541/10000 train_time:400946ms step_avg:88.29ms +[2025-08-22 20:43:28] [Rank 0] step:4561/10000 train_time:402774ms step_avg:88.31ms +[2025-08-22 20:43:28] [Rank 0] step:4561/10000 train_time:402774ms step_avg:88.31ms +[2025-08-22 20:43:29] [Rank 0] step:4581/10000 train_time:404606ms step_avg:88.32ms +[2025-08-22 20:43:29] [Rank 0] step:4581/10000 train_time:404606ms step_avg:88.32ms +[2025-08-22 20:43:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:43:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:43:45] [Rank 0] PRINT: step:4600/10000 val_loss:3.9334 svd_entropy: attn_qk:H=0.8956,top10E=0.09,eRank=384.4,q75/q25=16.71 attn_vo:H=0.8743,top10E=0.10,eRank=339.4,q75/q25=43.12 mlp_w1:H=0.8504,top10E=0.17,eRank=290.5,q75/q25=9.71 mlp_w2:H=0.9211,top10E=0.10,eRank=454.8,q75/q25=5.76 vo_prod:H=0.7886,top10E=0.18,eRank=197.5,q75/q25=1623.73 train_time:406440ms step_avg:88.36ms +[2025-08-22 20:43:45] [Rank 0] PRINT: step:4600/10000 val_loss:3.9334 svd_entropy: attn_qk:H=0.8956,top10E=0.09,eRank=384.4,q75/q25=16.71 attn_vo:H=0.8743,top10E=0.10,eRank=339.4,q75/q25=43.12 mlp_w1:H=0.8504,top10E=0.17,eRank=290.5,q75/q25=9.71 mlp_w2:H=0.9211,top10E=0.10,eRank=454.8,q75/q25=5.76 vo_prod:H=0.7886,top10E=0.18,eRank=197.5,q75/q25=1623.73 train_time:406440ms step_avg:88.36ms +[2025-08-22 20:43:45] [Rank 0] step:4601/10000 train_time:406459ms step_avg:88.34ms +[2025-08-22 20:43:45] [Rank 0] step:4601/10000 train_time:406459ms step_avg:88.34ms +[2025-08-22 20:43:47] [Rank 0] step:4621/10000 train_time:408292ms step_avg:88.36ms +[2025-08-22 20:43:47] [Rank 0] step:4621/10000 train_time:408292ms step_avg:88.36ms +[2025-08-22 20:43:49] [Rank 0] step:4641/10000 train_time:410124ms step_avg:88.37ms +[2025-08-22 20:43:49] [Rank 0] step:4641/10000 train_time:410124ms step_avg:88.37ms +[2025-08-22 20:43:51] [Rank 0] step:4661/10000 train_time:411952ms step_avg:88.38ms +[2025-08-22 20:43:51] [Rank 0] step:4661/10000 train_time:411952ms step_avg:88.38ms +[2025-08-22 20:43:52] [Rank 0] step:4681/10000 train_time:413782ms step_avg:88.40ms +[2025-08-22 20:43:52] [Rank 0] step:4681/10000 train_time:413782ms step_avg:88.40ms +[2025-08-22 20:43:54] [Rank 0] step:4701/10000 train_time:415612ms step_avg:88.41ms +[2025-08-22 20:43:54] [Rank 0] step:4701/10000 train_time:415612ms step_avg:88.41ms +[2025-08-22 20:43:56] [Rank 0] step:4721/10000 train_time:417446ms step_avg:88.42ms +[2025-08-22 20:43:56] [Rank 0] step:4721/10000 train_time:417446ms step_avg:88.42ms +[2025-08-22 20:43:58] [Rank 0] step:4741/10000 train_time:419279ms step_avg:88.44ms +[2025-08-22 20:43:58] [Rank 0] step:4741/10000 train_time:419279ms step_avg:88.44ms +[2025-08-22 20:44:00] [Rank 0] step:4761/10000 train_time:421112ms step_avg:88.45ms +[2025-08-22 20:44:00] [Rank 0] step:4761/10000 train_time:421112ms step_avg:88.45ms +[2025-08-22 20:44:02] [Rank 0] step:4781/10000 train_time:422946ms step_avg:88.46ms +[2025-08-22 20:44:02] [Rank 0] step:4781/10000 train_time:422946ms step_avg:88.46ms +[2025-08-22 20:44:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:44:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:44:17] [Rank 0] PRINT: step:4800/10000 val_loss:3.9165 svd_entropy: attn_qk:H=0.8964,top10E=0.09,eRank=386.3,q75/q25=16.31 attn_vo:H=0.8758,top10E=0.10,eRank=342.6,q75/q25=41.02 mlp_w1:H=0.8526,top10E=0.17,eRank=294.6,q75/q25=9.61 mlp_w2:H=0.9222,top10E=0.10,eRank=458.1,q75/q25=5.69 vo_prod:H=0.7912,top10E=0.18,eRank=200.6,q75/q25=1454.80 train_time:424782ms step_avg:88.50ms +[2025-08-22 20:44:17] [Rank 0] PRINT: step:4800/10000 val_loss:3.9165 svd_entropy: attn_qk:H=0.8964,top10E=0.09,eRank=386.3,q75/q25=16.31 attn_vo:H=0.8758,top10E=0.10,eRank=342.6,q75/q25=41.02 mlp_w1:H=0.8526,top10E=0.17,eRank=294.6,q75/q25=9.61 mlp_w2:H=0.9222,top10E=0.10,eRank=458.1,q75/q25=5.69 vo_prod:H=0.7912,top10E=0.18,eRank=200.6,q75/q25=1454.80 train_time:424782ms step_avg:88.50ms +[2025-08-22 20:44:17] [Rank 0] step:4801/10000 train_time:424800ms step_avg:88.48ms +[2025-08-22 20:44:17] [Rank 0] step:4801/10000 train_time:424800ms step_avg:88.48ms +[2025-08-22 20:44:19] [Rank 0] step:4821/10000 train_time:426636ms step_avg:88.50ms +[2025-08-22 20:44:19] [Rank 0] step:4821/10000 train_time:426636ms step_avg:88.50ms +[2025-08-22 20:44:21] [Rank 0] step:4841/10000 train_time:428459ms step_avg:88.51ms +[2025-08-22 20:44:21] [Rank 0] step:4841/10000 train_time:428459ms step_avg:88.51ms +[2025-08-22 20:44:23] [Rank 0] step:4861/10000 train_time:430287ms step_avg:88.52ms +[2025-08-22 20:44:23] [Rank 0] step:4861/10000 train_time:430287ms step_avg:88.52ms +[2025-08-22 20:44:24] [Rank 0] step:4881/10000 train_time:432113ms step_avg:88.53ms +[2025-08-22 20:44:24] [Rank 0] step:4881/10000 train_time:432113ms step_avg:88.53ms +[2025-08-22 20:44:26] [Rank 0] step:4901/10000 train_time:433938ms step_avg:88.54ms +[2025-08-22 20:44:26] [Rank 0] step:4901/10000 train_time:433938ms step_avg:88.54ms +[2025-08-22 20:44:28] [Rank 0] step:4921/10000 train_time:435765ms step_avg:88.55ms +[2025-08-22 20:44:28] [Rank 0] step:4921/10000 train_time:435765ms step_avg:88.55ms +[2025-08-22 20:44:30] [Rank 0] step:4941/10000 train_time:437595ms step_avg:88.56ms +[2025-08-22 20:44:30] [Rank 0] step:4941/10000 train_time:437595ms step_avg:88.56ms +[2025-08-22 20:44:32] [Rank 0] step:4961/10000 train_time:439425ms step_avg:88.58ms +[2025-08-22 20:44:32] [Rank 0] step:4961/10000 train_time:439425ms step_avg:88.58ms +[2025-08-22 20:44:34] [Rank 0] step:4981/10000 train_time:441257ms step_avg:88.59ms +[2025-08-22 20:44:34] [Rank 0] step:4981/10000 train_time:441257ms step_avg:88.59ms +[2025-08-22 20:44:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:44:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:44:49] [Rank 0] PRINT: step:5000/10000 val_loss:3.8977 svd_entropy: attn_qk:H=0.8970,top10E=0.09,eRank=387.9,q75/q25=15.96 attn_vo:H=0.8772,top10E=0.09,eRank=345.6,q75/q25=39.17 mlp_w1:H=0.8547,top10E=0.17,eRank=298.5,q75/q25=9.51 mlp_w2:H=0.9232,top10E=0.10,eRank=461.2,q75/q25=5.61 vo_prod:H=0.7935,top10E=0.18,eRank=203.3,q75/q25=1280.33 train_time:443090ms step_avg:88.62ms +[2025-08-22 20:44:49] [Rank 0] PRINT: step:5000/10000 val_loss:3.8977 svd_entropy: attn_qk:H=0.8970,top10E=0.09,eRank=387.9,q75/q25=15.96 attn_vo:H=0.8772,top10E=0.09,eRank=345.6,q75/q25=39.17 mlp_w1:H=0.8547,top10E=0.17,eRank=298.5,q75/q25=9.51 mlp_w2:H=0.9232,top10E=0.10,eRank=461.2,q75/q25=5.61 vo_prod:H=0.7935,top10E=0.18,eRank=203.3,q75/q25=1280.33 train_time:443090ms step_avg:88.62ms +[2025-08-22 20:44:49] [Rank 0] step:5001/10000 train_time:443109ms step_avg:88.60ms +[2025-08-22 20:44:49] [Rank 0] step:5001/10000 train_time:443109ms step_avg:88.60ms +[2025-08-22 20:44:51] [Rank 0] step:5021/10000 train_time:444942ms step_avg:88.62ms +[2025-08-22 20:44:51] [Rank 0] step:5021/10000 train_time:444942ms step_avg:88.62ms +[2025-08-22 20:44:53] [Rank 0] step:5041/10000 train_time:446771ms step_avg:88.63ms +[2025-08-22 20:44:53] [Rank 0] step:5041/10000 train_time:446771ms step_avg:88.63ms +[2025-08-22 20:44:55] [Rank 0] step:5061/10000 train_time:448594ms step_avg:88.64ms +[2025-08-22 20:44:55] [Rank 0] step:5061/10000 train_time:448594ms step_avg:88.64ms +[2025-08-22 20:44:57] [Rank 0] step:5081/10000 train_time:450419ms step_avg:88.65ms +[2025-08-22 20:44:57] [Rank 0] step:5081/10000 train_time:450419ms step_avg:88.65ms +[2025-08-22 20:44:58] [Rank 0] step:5101/10000 train_time:452249ms step_avg:88.66ms +[2025-08-22 20:44:58] [Rank 0] step:5101/10000 train_time:452249ms step_avg:88.66ms +[2025-08-22 20:45:00] [Rank 0] step:5121/10000 train_time:454078ms step_avg:88.67ms +[2025-08-22 20:45:00] [Rank 0] step:5121/10000 train_time:454078ms step_avg:88.67ms +[2025-08-22 20:45:02] [Rank 0] step:5141/10000 train_time:455910ms step_avg:88.68ms +[2025-08-22 20:45:02] [Rank 0] step:5141/10000 train_time:455910ms step_avg:88.68ms +[2025-08-22 20:45:04] [Rank 0] step:5161/10000 train_time:457738ms step_avg:88.69ms +[2025-08-22 20:45:04] [Rank 0] step:5161/10000 train_time:457738ms step_avg:88.69ms +[2025-08-22 20:45:06] [Rank 0] step:5181/10000 train_time:459571ms step_avg:88.70ms +[2025-08-22 20:45:06] [Rank 0] step:5181/10000 train_time:459571ms step_avg:88.70ms +[2025-08-22 20:45:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:45:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:45:21] [Rank 0] PRINT: step:5200/10000 val_loss:3.8847 svd_entropy: attn_qk:H=0.8976,top10E=0.09,eRank=389.4,q75/q25=15.73 attn_vo:H=0.8785,top10E=0.09,eRank=348.3,q75/q25=37.56 mlp_w1:H=0.8565,top10E=0.17,eRank=302.2,q75/q25=9.38 mlp_w2:H=0.9241,top10E=0.10,eRank=464.0,q75/q25=5.54 vo_prod:H=0.7957,top10E=0.18,eRank=206.0,q75/q25=1149.80 train_time:461431ms step_avg:88.74ms +[2025-08-22 20:45:21] [Rank 0] PRINT: step:5200/10000 val_loss:3.8847 svd_entropy: attn_qk:H=0.8976,top10E=0.09,eRank=389.4,q75/q25=15.73 attn_vo:H=0.8785,top10E=0.09,eRank=348.3,q75/q25=37.56 mlp_w1:H=0.8565,top10E=0.17,eRank=302.2,q75/q25=9.38 mlp_w2:H=0.9241,top10E=0.10,eRank=464.0,q75/q25=5.54 vo_prod:H=0.7957,top10E=0.18,eRank=206.0,q75/q25=1149.80 train_time:461431ms step_avg:88.74ms +[2025-08-22 20:45:22] [Rank 0] step:5201/10000 train_time:461451ms step_avg:88.72ms +[2025-08-22 20:45:22] [Rank 0] step:5201/10000 train_time:461451ms step_avg:88.72ms +[2025-08-22 20:45:23] [Rank 0] step:5221/10000 train_time:463294ms step_avg:88.74ms +[2025-08-22 20:45:23] [Rank 0] step:5221/10000 train_time:463294ms step_avg:88.74ms +[2025-08-22 20:45:25] [Rank 0] step:5241/10000 train_time:465150ms step_avg:88.75ms +[2025-08-22 20:45:25] [Rank 0] step:5241/10000 train_time:465150ms step_avg:88.75ms +[2025-08-22 20:45:27] [Rank 0] step:5261/10000 train_time:467009ms step_avg:88.77ms +[2025-08-22 20:45:27] [Rank 0] step:5261/10000 train_time:467009ms step_avg:88.77ms +[2025-08-22 20:45:29] [Rank 0] step:5281/10000 train_time:468867ms step_avg:88.78ms +[2025-08-22 20:45:29] [Rank 0] step:5281/10000 train_time:468867ms step_avg:88.78ms +[2025-08-22 20:45:31] [Rank 0] step:5301/10000 train_time:470735ms step_avg:88.80ms +[2025-08-22 20:45:31] [Rank 0] step:5301/10000 train_time:470735ms step_avg:88.80ms +[2025-08-22 20:45:33] [Rank 0] step:5321/10000 train_time:472596ms step_avg:88.82ms +[2025-08-22 20:45:33] [Rank 0] step:5321/10000 train_time:472596ms step_avg:88.82ms +[2025-08-22 20:45:35] [Rank 0] step:5341/10000 train_time:474457ms step_avg:88.83ms +[2025-08-22 20:45:35] [Rank 0] step:5341/10000 train_time:474457ms step_avg:88.83ms +[2025-08-22 20:45:36] [Rank 0] step:5361/10000 train_time:476321ms step_avg:88.85ms +[2025-08-22 20:45:36] [Rank 0] step:5361/10000 train_time:476321ms step_avg:88.85ms +[2025-08-22 20:45:38] [Rank 0] step:5381/10000 train_time:478186ms step_avg:88.87ms +[2025-08-22 20:45:38] [Rank 0] step:5381/10000 train_time:478186ms step_avg:88.87ms +[2025-08-22 20:45:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:45:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:45:54] [Rank 0] PRINT: step:5400/10000 val_loss:3.8705 svd_entropy: attn_qk:H=0.8981,top10E=0.09,eRank=390.7,q75/q25=15.47 attn_vo:H=0.8796,top10E=0.09,eRank=350.9,q75/q25=36.08 mlp_w1:H=0.8583,top10E=0.17,eRank=305.7,q75/q25=9.29 mlp_w2:H=0.9249,top10E=0.10,eRank=466.4,q75/q25=5.47 vo_prod:H=0.7977,top10E=0.18,eRank=208.4,q75/q25=1018.02 train_time:480048ms step_avg:88.90ms +[2025-08-22 20:45:54] [Rank 0] PRINT: step:5400/10000 val_loss:3.8705 svd_entropy: attn_qk:H=0.8981,top10E=0.09,eRank=390.7,q75/q25=15.47 attn_vo:H=0.8796,top10E=0.09,eRank=350.9,q75/q25=36.08 mlp_w1:H=0.8583,top10E=0.17,eRank=305.7,q75/q25=9.29 mlp_w2:H=0.9249,top10E=0.10,eRank=466.4,q75/q25=5.47 vo_prod:H=0.7977,top10E=0.18,eRank=208.4,q75/q25=1018.02 train_time:480048ms step_avg:88.90ms +[2025-08-22 20:45:54] [Rank 0] step:5401/10000 train_time:480067ms step_avg:88.88ms +[2025-08-22 20:45:54] [Rank 0] step:5401/10000 train_time:480067ms step_avg:88.88ms +[2025-08-22 20:45:56] [Rank 0] step:5421/10000 train_time:481922ms step_avg:88.90ms +[2025-08-22 20:45:56] [Rank 0] step:5421/10000 train_time:481922ms step_avg:88.90ms +[2025-08-22 20:45:58] [Rank 0] step:5441/10000 train_time:483780ms step_avg:88.91ms +[2025-08-22 20:45:58] [Rank 0] step:5441/10000 train_time:483780ms step_avg:88.91ms +[2025-08-22 20:46:00] [Rank 0] step:5461/10000 train_time:485647ms step_avg:88.93ms +[2025-08-22 20:46:00] [Rank 0] step:5461/10000 train_time:485647ms step_avg:88.93ms +[2025-08-22 20:46:02] [Rank 0] step:5481/10000 train_time:487509ms step_avg:88.95ms +[2025-08-22 20:46:02] [Rank 0] step:5481/10000 train_time:487509ms step_avg:88.95ms +[2025-08-22 20:46:04] [Rank 0] step:5501/10000 train_time:489380ms step_avg:88.96ms +[2025-08-22 20:46:04] [Rank 0] step:5501/10000 train_time:489380ms step_avg:88.96ms +[2025-08-22 20:46:05] [Rank 0] step:5521/10000 train_time:491249ms step_avg:88.98ms +[2025-08-22 20:46:05] [Rank 0] step:5521/10000 train_time:491249ms step_avg:88.98ms +[2025-08-22 20:46:07] [Rank 0] step:5541/10000 train_time:493115ms step_avg:88.99ms +[2025-08-22 20:46:07] [Rank 0] step:5541/10000 train_time:493115ms step_avg:88.99ms +[2025-08-22 20:46:09] [Rank 0] step:5561/10000 train_time:494979ms step_avg:89.01ms +[2025-08-22 20:46:09] [Rank 0] step:5561/10000 train_time:494979ms step_avg:89.01ms +[2025-08-22 20:46:11] [Rank 0] step:5581/10000 train_time:496844ms step_avg:89.02ms +[2025-08-22 20:46:11] [Rank 0] step:5581/10000 train_time:496844ms step_avg:89.02ms +[2025-08-22 20:46:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:46:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:46:27] [Rank 0] PRINT: step:5600/10000 val_loss:3.8522 svd_entropy: attn_qk:H=0.8985,top10E=0.09,eRank=391.9,q75/q25=15.26 attn_vo:H=0.8807,top10E=0.09,eRank=353.2,q75/q25=34.51 mlp_w1:H=0.8600,top10E=0.16,eRank=309.0,q75/q25=9.18 mlp_w2:H=0.9255,top10E=0.10,eRank=468.4,q75/q25=5.43 vo_prod:H=0.7995,top10E=0.17,eRank=210.6,q75/q25=931.37 train_time:498713ms step_avg:89.06ms +[2025-08-22 20:46:27] [Rank 0] PRINT: step:5600/10000 val_loss:3.8522 svd_entropy: attn_qk:H=0.8985,top10E=0.09,eRank=391.9,q75/q25=15.26 attn_vo:H=0.8807,top10E=0.09,eRank=353.2,q75/q25=34.51 mlp_w1:H=0.8600,top10E=0.16,eRank=309.0,q75/q25=9.18 mlp_w2:H=0.9255,top10E=0.10,eRank=468.4,q75/q25=5.43 vo_prod:H=0.7995,top10E=0.17,eRank=210.6,q75/q25=931.37 train_time:498713ms step_avg:89.06ms +[2025-08-22 20:46:27] [Rank 0] step:5601/10000 train_time:498731ms step_avg:89.04ms +[2025-08-22 20:46:27] [Rank 0] step:5601/10000 train_time:498731ms step_avg:89.04ms +[2025-08-22 20:46:29] [Rank 0] step:5621/10000 train_time:500599ms step_avg:89.06ms +[2025-08-22 20:46:29] [Rank 0] step:5621/10000 train_time:500599ms step_avg:89.06ms +[2025-08-22 20:46:31] [Rank 0] step:5641/10000 train_time:502458ms step_avg:89.07ms +[2025-08-22 20:46:31] [Rank 0] step:5641/10000 train_time:502458ms step_avg:89.07ms +[2025-08-22 20:46:33] [Rank 0] step:5661/10000 train_time:504315ms step_avg:89.09ms +[2025-08-22 20:46:33] [Rank 0] step:5661/10000 train_time:504315ms step_avg:89.09ms +[2025-08-22 20:46:34] [Rank 0] step:5681/10000 train_time:506177ms step_avg:89.10ms +[2025-08-22 20:46:34] [Rank 0] step:5681/10000 train_time:506177ms step_avg:89.10ms +[2025-08-22 20:46:36] [Rank 0] step:5701/10000 train_time:508036ms step_avg:89.11ms +[2025-08-22 20:46:36] [Rank 0] step:5701/10000 train_time:508036ms step_avg:89.11ms +[2025-08-22 20:46:38] [Rank 0] step:5721/10000 train_time:509900ms step_avg:89.13ms +[2025-08-22 20:46:38] [Rank 0] step:5721/10000 train_time:509900ms step_avg:89.13ms +[2025-08-22 20:46:40] [Rank 0] step:5741/10000 train_time:511760ms step_avg:89.14ms +[2025-08-22 20:46:40] [Rank 0] step:5741/10000 train_time:511760ms step_avg:89.14ms +[2025-08-22 20:46:42] [Rank 0] step:5761/10000 train_time:513624ms step_avg:89.16ms +[2025-08-22 20:46:42] [Rank 0] step:5761/10000 train_time:513624ms step_avg:89.16ms +[2025-08-22 20:46:44] [Rank 0] step:5781/10000 train_time:515487ms step_avg:89.17ms +[2025-08-22 20:46:44] [Rank 0] step:5781/10000 train_time:515487ms step_avg:89.17ms +[2025-08-22 20:46:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:46:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:47:00] [Rank 0] PRINT: step:5800/10000 val_loss:3.8440 svd_entropy: attn_qk:H=0.8989,top10E=0.08,eRank=393.0,q75/q25=15.04 attn_vo:H=0.8818,top10E=0.09,eRank=355.4,q75/q25=33.26 mlp_w1:H=0.8615,top10E=0.16,eRank=312.2,q75/q25=9.10 mlp_w2:H=0.9262,top10E=0.10,eRank=470.6,q75/q25=5.38 vo_prod:H=0.8012,top10E=0.17,eRank=212.8,q75/q25=834.33 train_time:517354ms step_avg:89.20ms +[2025-08-22 20:47:00] [Rank 0] PRINT: step:5800/10000 val_loss:3.8440 svd_entropy: attn_qk:H=0.8989,top10E=0.08,eRank=393.0,q75/q25=15.04 attn_vo:H=0.8818,top10E=0.09,eRank=355.4,q75/q25=33.26 mlp_w1:H=0.8615,top10E=0.16,eRank=312.2,q75/q25=9.10 mlp_w2:H=0.9262,top10E=0.10,eRank=470.6,q75/q25=5.38 vo_prod:H=0.8012,top10E=0.17,eRank=212.8,q75/q25=834.33 train_time:517354ms step_avg:89.20ms +[2025-08-22 20:47:00] [Rank 0] step:5801/10000 train_time:517373ms step_avg:89.19ms +[2025-08-22 20:47:00] [Rank 0] step:5801/10000 train_time:517373ms step_avg:89.19ms +[2025-08-22 20:47:02] [Rank 0] step:5821/10000 train_time:519231ms step_avg:89.20ms +[2025-08-22 20:47:02] [Rank 0] step:5821/10000 train_time:519231ms step_avg:89.20ms +[2025-08-22 20:47:03] [Rank 0] step:5841/10000 train_time:521085ms step_avg:89.21ms +[2025-08-22 20:47:03] [Rank 0] step:5841/10000 train_time:521085ms step_avg:89.21ms +[2025-08-22 20:47:05] [Rank 0] step:5861/10000 train_time:522949ms step_avg:89.23ms +[2025-08-22 20:47:05] [Rank 0] step:5861/10000 train_time:522949ms step_avg:89.23ms +[2025-08-22 20:47:07] [Rank 0] step:5881/10000 train_time:524810ms step_avg:89.24ms +[2025-08-22 20:47:07] [Rank 0] step:5881/10000 train_time:524810ms step_avg:89.24ms +[2025-08-22 20:47:09] [Rank 0] step:5901/10000 train_time:526669ms step_avg:89.25ms +[2025-08-22 20:47:09] [Rank 0] step:5901/10000 train_time:526669ms step_avg:89.25ms +[2025-08-22 20:47:11] [Rank 0] step:5921/10000 train_time:528527ms step_avg:89.26ms +[2025-08-22 20:47:11] [Rank 0] step:5921/10000 train_time:528527ms step_avg:89.26ms +[2025-08-22 20:47:13] [Rank 0] step:5941/10000 train_time:530391ms step_avg:89.28ms +[2025-08-22 20:47:13] [Rank 0] step:5941/10000 train_time:530391ms step_avg:89.28ms +[2025-08-22 20:47:15] [Rank 0] step:5961/10000 train_time:532252ms step_avg:89.29ms +[2025-08-22 20:47:15] [Rank 0] step:5961/10000 train_time:532252ms step_avg:89.29ms +[2025-08-22 20:47:17] [Rank 0] step:5981/10000 train_time:534115ms step_avg:89.30ms +[2025-08-22 20:47:17] [Rank 0] step:5981/10000 train_time:534115ms step_avg:89.30ms +[2025-08-22 20:47:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:47:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:47:32] [Rank 0] PRINT: step:6000/10000 val_loss:3.8211 svd_entropy: attn_qk:H=0.8993,top10E=0.08,eRank=394.0,q75/q25=14.88 attn_vo:H=0.8827,top10E=0.09,eRank=357.6,q75/q25=32.18 mlp_w1:H=0.8631,top10E=0.16,eRank=315.4,q75/q25=9.01 mlp_w2:H=0.9268,top10E=0.10,eRank=472.5,q75/q25=5.34 vo_prod:H=0.8029,top10E=0.17,eRank=214.9,q75/q25=759.60 train_time:535980ms step_avg:89.33ms +[2025-08-22 20:47:32] [Rank 0] PRINT: step:6000/10000 val_loss:3.8211 svd_entropy: attn_qk:H=0.8993,top10E=0.08,eRank=394.0,q75/q25=14.88 attn_vo:H=0.8827,top10E=0.09,eRank=357.6,q75/q25=32.18 mlp_w1:H=0.8631,top10E=0.16,eRank=315.4,q75/q25=9.01 mlp_w2:H=0.9268,top10E=0.10,eRank=472.5,q75/q25=5.34 vo_prod:H=0.8029,top10E=0.17,eRank=214.9,q75/q25=759.60 train_time:535980ms step_avg:89.33ms +[2025-08-22 20:47:32] [Rank 0] step:6001/10000 train_time:535998ms step_avg:89.32ms +[2025-08-22 20:47:32] [Rank 0] step:6001/10000 train_time:535998ms step_avg:89.32ms +[2025-08-22 20:47:34] [Rank 0] step:6021/10000 train_time:537860ms step_avg:89.33ms +[2025-08-22 20:47:34] [Rank 0] step:6021/10000 train_time:537860ms step_avg:89.33ms +[2025-08-22 20:47:36] [Rank 0] step:6041/10000 train_time:539723ms step_avg:89.34ms +[2025-08-22 20:47:36] [Rank 0] step:6041/10000 train_time:539723ms step_avg:89.34ms +[2025-08-22 20:47:38] [Rank 0] step:6061/10000 train_time:541590ms step_avg:89.36ms +[2025-08-22 20:47:38] [Rank 0] step:6061/10000 train_time:541590ms step_avg:89.36ms +[2025-08-22 20:47:40] [Rank 0] step:6081/10000 train_time:543450ms step_avg:89.37ms +[2025-08-22 20:47:40] [Rank 0] step:6081/10000 train_time:543450ms step_avg:89.37ms +[2025-08-22 20:47:41] [Rank 0] step:6101/10000 train_time:545320ms step_avg:89.38ms +[2025-08-22 20:47:41] [Rank 0] step:6101/10000 train_time:545320ms step_avg:89.38ms +[2025-08-22 20:47:43] [Rank 0] step:6121/10000 train_time:547256ms step_avg:89.41ms +[2025-08-22 20:47:43] [Rank 0] step:6121/10000 train_time:547256ms step_avg:89.41ms +[2025-08-22 20:47:45] [Rank 0] step:6141/10000 train_time:549134ms step_avg:89.42ms +[2025-08-22 20:47:45] [Rank 0] step:6141/10000 train_time:549134ms step_avg:89.42ms +[2025-08-22 20:47:47] [Rank 0] step:6161/10000 train_time:551001ms step_avg:89.43ms +[2025-08-22 20:47:47] [Rank 0] step:6161/10000 train_time:551001ms step_avg:89.43ms +[2025-08-22 20:47:49] [Rank 0] step:6181/10000 train_time:552870ms step_avg:89.45ms +[2025-08-22 20:47:49] [Rank 0] step:6181/10000 train_time:552870ms step_avg:89.45ms +[2025-08-22 20:47:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:47:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:48:05] [Rank 0] PRINT: step:6200/10000 val_loss:3.8038 svd_entropy: attn_qk:H=0.8997,top10E=0.08,eRank=394.9,q75/q25=14.74 attn_vo:H=0.8837,top10E=0.09,eRank=359.6,q75/q25=31.11 mlp_w1:H=0.8645,top10E=0.16,eRank=318.3,q75/q25=8.93 mlp_w2:H=0.9273,top10E=0.10,eRank=474.0,q75/q25=5.30 vo_prod:H=0.8045,top10E=0.17,eRank=216.8,q75/q25=696.35 train_time:554740ms step_avg:89.47ms +[2025-08-22 20:48:05] [Rank 0] PRINT: step:6200/10000 val_loss:3.8038 svd_entropy: attn_qk:H=0.8997,top10E=0.08,eRank=394.9,q75/q25=14.74 attn_vo:H=0.8837,top10E=0.09,eRank=359.6,q75/q25=31.11 mlp_w1:H=0.8645,top10E=0.16,eRank=318.3,q75/q25=8.93 mlp_w2:H=0.9273,top10E=0.10,eRank=474.0,q75/q25=5.30 vo_prod:H=0.8045,top10E=0.17,eRank=216.8,q75/q25=696.35 train_time:554740ms step_avg:89.47ms +[2025-08-22 20:48:05] [Rank 0] step:6201/10000 train_time:554758ms step_avg:89.46ms +[2025-08-22 20:48:05] [Rank 0] step:6201/10000 train_time:554758ms step_avg:89.46ms +[2025-08-22 20:48:07] [Rank 0] step:6221/10000 train_time:556625ms step_avg:89.48ms +[2025-08-22 20:48:07] [Rank 0] step:6221/10000 train_time:556625ms step_avg:89.48ms +[2025-08-22 20:48:09] [Rank 0] step:6241/10000 train_time:558488ms step_avg:89.49ms +[2025-08-22 20:48:09] [Rank 0] step:6241/10000 train_time:558488ms step_avg:89.49ms +[2025-08-22 20:48:11] [Rank 0] step:6261/10000 train_time:560354ms step_avg:89.50ms +[2025-08-22 20:48:11] [Rank 0] step:6261/10000 train_time:560354ms step_avg:89.50ms +[2025-08-22 20:48:12] [Rank 0] step:6281/10000 train_time:562224ms step_avg:89.51ms +[2025-08-22 20:48:12] [Rank 0] step:6281/10000 train_time:562224ms step_avg:89.51ms +[2025-08-22 20:48:14] [Rank 0] step:6301/10000 train_time:564093ms step_avg:89.52ms +[2025-08-22 20:48:14] [Rank 0] step:6301/10000 train_time:564093ms step_avg:89.52ms +[2025-08-22 20:48:16] [Rank 0] step:6321/10000 train_time:565963ms step_avg:89.54ms +[2025-08-22 20:48:16] [Rank 0] step:6321/10000 train_time:565963ms step_avg:89.54ms +[2025-08-22 20:48:18] [Rank 0] step:6341/10000 train_time:567834ms step_avg:89.55ms +[2025-08-22 20:48:18] [Rank 0] step:6341/10000 train_time:567834ms step_avg:89.55ms +[2025-08-22 20:48:20] [Rank 0] step:6361/10000 train_time:569711ms step_avg:89.56ms +[2025-08-22 20:48:20] [Rank 0] step:6361/10000 train_time:569711ms step_avg:89.56ms +[2025-08-22 20:48:22] [Rank 0] step:6381/10000 train_time:571584ms step_avg:89.58ms +[2025-08-22 20:48:22] [Rank 0] step:6381/10000 train_time:571584ms step_avg:89.58ms +[2025-08-22 20:48:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:48:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:48:38] [Rank 0] PRINT: step:6400/10000 val_loss:3.7937 svd_entropy: attn_qk:H=0.8999,top10E=0.08,eRank=395.5,q75/q25=14.60 attn_vo:H=0.8845,top10E=0.09,eRank=361.4,q75/q25=30.22 mlp_w1:H=0.8658,top10E=0.16,eRank=321.0,q75/q25=8.85 mlp_w2:H=0.9277,top10E=0.09,eRank=475.4,q75/q25=5.27 vo_prod:H=0.8058,top10E=0.17,eRank=218.5,q75/q25=637.42 train_time:573457ms step_avg:89.60ms +[2025-08-22 20:48:38] [Rank 0] PRINT: step:6400/10000 val_loss:3.7937 svd_entropy: attn_qk:H=0.8999,top10E=0.08,eRank=395.5,q75/q25=14.60 attn_vo:H=0.8845,top10E=0.09,eRank=361.4,q75/q25=30.22 mlp_w1:H=0.8658,top10E=0.16,eRank=321.0,q75/q25=8.85 mlp_w2:H=0.9277,top10E=0.09,eRank=475.4,q75/q25=5.27 vo_prod:H=0.8058,top10E=0.17,eRank=218.5,q75/q25=637.42 train_time:573457ms step_avg:89.60ms +[2025-08-22 20:48:38] [Rank 0] step:6401/10000 train_time:573475ms step_avg:89.59ms +[2025-08-22 20:48:38] [Rank 0] step:6401/10000 train_time:573475ms step_avg:89.59ms +[2025-08-22 20:48:40] [Rank 0] step:6421/10000 train_time:575350ms step_avg:89.60ms +[2025-08-22 20:48:40] [Rank 0] step:6421/10000 train_time:575350ms step_avg:89.60ms +[2025-08-22 20:48:41] [Rank 0] step:6441/10000 train_time:577214ms step_avg:89.62ms +[2025-08-22 20:48:41] [Rank 0] step:6441/10000 train_time:577214ms step_avg:89.62ms +[2025-08-22 20:48:43] [Rank 0] step:6461/10000 train_time:579082ms step_avg:89.63ms +[2025-08-22 20:48:43] [Rank 0] step:6461/10000 train_time:579082ms step_avg:89.63ms +[2025-08-22 20:48:45] [Rank 0] step:6481/10000 train_time:580954ms step_avg:89.64ms +[2025-08-22 20:48:45] [Rank 0] step:6481/10000 train_time:580954ms step_avg:89.64ms +[2025-08-22 20:48:47] [Rank 0] step:6501/10000 train_time:582818ms step_avg:89.65ms +[2025-08-22 20:48:47] [Rank 0] step:6501/10000 train_time:582818ms step_avg:89.65ms +[2025-08-22 20:48:49] [Rank 0] step:6521/10000 train_time:584681ms step_avg:89.66ms +[2025-08-22 20:48:49] [Rank 0] step:6521/10000 train_time:584681ms step_avg:89.66ms +[2025-08-22 20:48:51] [Rank 0] step:6541/10000 train_time:586550ms step_avg:89.67ms +[2025-08-22 20:48:51] [Rank 0] step:6541/10000 train_time:586550ms step_avg:89.67ms +[2025-08-22 20:48:53] [Rank 0] step:6561/10000 train_time:588419ms step_avg:89.68ms +[2025-08-22 20:48:53] [Rank 0] step:6561/10000 train_time:588419ms step_avg:89.68ms +[2025-08-22 20:48:55] [Rank 0] step:6581/10000 train_time:590282ms step_avg:89.69ms +[2025-08-22 20:48:55] [Rank 0] step:6581/10000 train_time:590282ms step_avg:89.69ms +[2025-08-22 20:48:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:48:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:49:10] [Rank 0] PRINT: step:6600/10000 val_loss:3.7750 svd_entropy: attn_qk:H=0.9001,top10E=0.08,eRank=396.1,q75/q25=14.51 attn_vo:H=0.8852,top10E=0.09,eRank=363.0,q75/q25=29.39 mlp_w1:H=0.8669,top10E=0.16,eRank=323.4,q75/q25=8.78 mlp_w2:H=0.9281,top10E=0.09,eRank=476.7,q75/q25=5.25 vo_prod:H=0.8070,top10E=0.17,eRank=220.1,q75/q25=590.74 train_time:592153ms step_avg:89.72ms +[2025-08-22 20:49:10] [Rank 0] PRINT: step:6600/10000 val_loss:3.7750 svd_entropy: attn_qk:H=0.9001,top10E=0.08,eRank=396.1,q75/q25=14.51 attn_vo:H=0.8852,top10E=0.09,eRank=363.0,q75/q25=29.39 mlp_w1:H=0.8669,top10E=0.16,eRank=323.4,q75/q25=8.78 mlp_w2:H=0.9281,top10E=0.09,eRank=476.7,q75/q25=5.25 vo_prod:H=0.8070,top10E=0.17,eRank=220.1,q75/q25=590.74 train_time:592153ms step_avg:89.72ms +[2025-08-22 20:49:10] [Rank 0] step:6601/10000 train_time:592173ms step_avg:89.71ms +[2025-08-22 20:49:10] [Rank 0] step:6601/10000 train_time:592173ms step_avg:89.71ms +[2025-08-22 20:49:12] [Rank 0] step:6621/10000 train_time:594031ms step_avg:89.72ms +[2025-08-22 20:49:12] [Rank 0] step:6621/10000 train_time:594031ms step_avg:89.72ms +[2025-08-22 20:49:14] [Rank 0] step:6641/10000 train_time:595902ms step_avg:89.73ms +[2025-08-22 20:49:14] [Rank 0] step:6641/10000 train_time:595902ms step_avg:89.73ms +[2025-08-22 20:49:16] [Rank 0] step:6661/10000 train_time:597764ms step_avg:89.74ms +[2025-08-22 20:49:16] [Rank 0] step:6661/10000 train_time:597764ms step_avg:89.74ms +[2025-08-22 20:49:18] [Rank 0] step:6681/10000 train_time:599649ms step_avg:89.75ms +[2025-08-22 20:49:18] [Rank 0] step:6681/10000 train_time:599649ms step_avg:89.75ms +[2025-08-22 20:49:20] [Rank 0] step:6701/10000 train_time:601551ms step_avg:89.77ms +[2025-08-22 20:49:20] [Rank 0] step:6701/10000 train_time:601551ms step_avg:89.77ms +[2025-08-22 20:49:22] [Rank 0] step:6721/10000 train_time:603447ms step_avg:89.79ms +[2025-08-22 20:49:22] [Rank 0] step:6721/10000 train_time:603447ms step_avg:89.79ms +[2025-08-22 20:49:24] [Rank 0] step:6741/10000 train_time:605339ms step_avg:89.80ms +[2025-08-22 20:49:24] [Rank 0] step:6741/10000 train_time:605339ms step_avg:89.80ms +[2025-08-22 20:49:25] [Rank 0] step:6761/10000 train_time:607232ms step_avg:89.81ms +[2025-08-22 20:49:25] [Rank 0] step:6761/10000 train_time:607232ms step_avg:89.81ms +[2025-08-22 20:49:27] [Rank 0] step:6781/10000 train_time:609129ms step_avg:89.83ms +[2025-08-22 20:49:27] [Rank 0] step:6781/10000 train_time:609129ms step_avg:89.83ms +[2025-08-22 20:49:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:49:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:49:43] [Rank 0] PRINT: step:6800/10000 val_loss:3.7569 svd_entropy: attn_qk:H=0.9003,top10E=0.08,eRank=396.5,q75/q25=14.47 attn_vo:H=0.8859,top10E=0.09,eRank=364.5,q75/q25=28.70 mlp_w1:H=0.8680,top10E=0.16,eRank=325.6,q75/q25=8.71 mlp_w2:H=0.9285,top10E=0.09,eRank=477.8,q75/q25=5.22 vo_prod:H=0.8082,top10E=0.17,eRank=221.6,q75/q25=559.43 train_time:611032ms step_avg:89.86ms +[2025-08-22 20:49:43] [Rank 0] PRINT: step:6800/10000 val_loss:3.7569 svd_entropy: attn_qk:H=0.9003,top10E=0.08,eRank=396.5,q75/q25=14.47 attn_vo:H=0.8859,top10E=0.09,eRank=364.5,q75/q25=28.70 mlp_w1:H=0.8680,top10E=0.16,eRank=325.6,q75/q25=8.71 mlp_w2:H=0.9285,top10E=0.09,eRank=477.8,q75/q25=5.22 vo_prod:H=0.8082,top10E=0.17,eRank=221.6,q75/q25=559.43 train_time:611032ms step_avg:89.86ms +[2025-08-22 20:49:43] [Rank 0] step:6801/10000 train_time:611050ms step_avg:89.85ms +[2025-08-22 20:49:43] [Rank 0] step:6801/10000 train_time:611050ms step_avg:89.85ms +[2025-08-22 20:49:45] [Rank 0] step:6821/10000 train_time:612952ms step_avg:89.86ms +[2025-08-22 20:49:45] [Rank 0] step:6821/10000 train_time:612952ms step_avg:89.86ms +[2025-08-22 20:49:47] [Rank 0] step:6841/10000 train_time:614843ms step_avg:89.88ms +[2025-08-22 20:49:47] [Rank 0] step:6841/10000 train_time:614843ms step_avg:89.88ms +[2025-08-22 20:49:49] [Rank 0] step:6861/10000 train_time:616732ms step_avg:89.89ms +[2025-08-22 20:49:49] [Rank 0] step:6861/10000 train_time:616732ms step_avg:89.89ms +[2025-08-22 20:49:51] [Rank 0] step:6881/10000 train_time:618628ms step_avg:89.90ms +[2025-08-22 20:49:51] [Rank 0] step:6881/10000 train_time:618628ms step_avg:89.90ms +[2025-08-22 20:49:53] [Rank 0] step:6901/10000 train_time:620519ms step_avg:89.92ms +[2025-08-22 20:49:53] [Rank 0] step:6901/10000 train_time:620519ms step_avg:89.92ms +[2025-08-22 20:49:55] [Rank 0] step:6921/10000 train_time:622410ms step_avg:89.93ms +[2025-08-22 20:49:55] [Rank 0] step:6921/10000 train_time:622410ms step_avg:89.93ms +[2025-08-22 20:49:57] [Rank 0] step:6941/10000 train_time:624314ms step_avg:89.95ms +[2025-08-22 20:49:57] [Rank 0] step:6941/10000 train_time:624314ms step_avg:89.95ms +[2025-08-22 20:49:58] [Rank 0] step:6961/10000 train_time:626224ms step_avg:89.96ms +[2025-08-22 20:49:58] [Rank 0] step:6961/10000 train_time:626224ms step_avg:89.96ms +[2025-08-22 20:50:00] [Rank 0] step:6981/10000 train_time:628217ms step_avg:89.99ms +[2025-08-22 20:50:00] [Rank 0] step:6981/10000 train_time:628217ms step_avg:89.99ms +[2025-08-22 20:50:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:50:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:50:16] [Rank 0] PRINT: step:7000/10000 val_loss:3.7433 svd_entropy: attn_qk:H=0.9004,top10E=0.08,eRank=396.9,q75/q25=14.37 attn_vo:H=0.8865,top10E=0.09,eRank=365.9,q75/q25=28.17 mlp_w1:H=0.8689,top10E=0.16,eRank=327.8,q75/q25=8.64 mlp_w2:H=0.9288,top10E=0.09,eRank=478.8,q75/q25=5.19 vo_prod:H=0.8093,top10E=0.17,eRank=223.1,q75/q25=523.41 train_time:630176ms step_avg:90.03ms +[2025-08-22 20:50:16] [Rank 0] PRINT: step:7000/10000 val_loss:3.7433 svd_entropy: attn_qk:H=0.9004,top10E=0.08,eRank=396.9,q75/q25=14.37 attn_vo:H=0.8865,top10E=0.09,eRank=365.9,q75/q25=28.17 mlp_w1:H=0.8689,top10E=0.16,eRank=327.8,q75/q25=8.64 mlp_w2:H=0.9288,top10E=0.09,eRank=478.8,q75/q25=5.19 vo_prod:H=0.8093,top10E=0.17,eRank=223.1,q75/q25=523.41 train_time:630176ms step_avg:90.03ms +[2025-08-22 20:50:17] [Rank 0] step:7001/10000 train_time:630195ms step_avg:90.01ms +[2025-08-22 20:50:17] [Rank 0] step:7001/10000 train_time:630195ms step_avg:90.01ms +[2025-08-22 20:50:18] [Rank 0] step:7021/10000 train_time:632082ms step_avg:90.03ms +[2025-08-22 20:50:18] [Rank 0] step:7021/10000 train_time:632082ms step_avg:90.03ms +[2025-08-22 20:50:20] [Rank 0] step:7041/10000 train_time:633976ms step_avg:90.04ms +[2025-08-22 20:50:20] [Rank 0] step:7041/10000 train_time:633976ms step_avg:90.04ms +[2025-08-22 20:50:22] [Rank 0] step:7061/10000 train_time:635871ms step_avg:90.05ms +[2025-08-22 20:50:22] [Rank 0] step:7061/10000 train_time:635871ms step_avg:90.05ms +[2025-08-22 20:50:24] [Rank 0] step:7081/10000 train_time:637766ms step_avg:90.07ms +[2025-08-22 20:50:24] [Rank 0] step:7081/10000 train_time:637766ms step_avg:90.07ms +[2025-08-22 20:50:26] [Rank 0] step:7101/10000 train_time:639671ms step_avg:90.08ms +[2025-08-22 20:50:26] [Rank 0] step:7101/10000 train_time:639671ms step_avg:90.08ms +[2025-08-22 20:50:28] [Rank 0] step:7121/10000 train_time:641567ms step_avg:90.10ms +[2025-08-22 20:50:28] [Rank 0] step:7121/10000 train_time:641567ms step_avg:90.10ms +[2025-08-22 20:50:30] [Rank 0] step:7141/10000 train_time:643465ms step_avg:90.11ms +[2025-08-22 20:50:30] [Rank 0] step:7141/10000 train_time:643465ms step_avg:90.11ms +[2025-08-22 20:50:32] [Rank 0] step:7161/10000 train_time:645368ms step_avg:90.12ms +[2025-08-22 20:50:32] [Rank 0] step:7161/10000 train_time:645368ms step_avg:90.12ms +[2025-08-22 20:50:34] [Rank 0] step:7181/10000 train_time:647268ms step_avg:90.14ms +[2025-08-22 20:50:34] [Rank 0] step:7181/10000 train_time:647268ms step_avg:90.14ms +[2025-08-22 20:50:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:50:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:50:49] [Rank 0] PRINT: step:7200/10000 val_loss:3.7283 svd_entropy: attn_qk:H=0.9006,top10E=0.08,eRank=397.3,q75/q25=14.32 attn_vo:H=0.8871,top10E=0.09,eRank=367.2,q75/q25=27.67 mlp_w1:H=0.8698,top10E=0.15,eRank=329.6,q75/q25=8.57 mlp_w2:H=0.9290,top10E=0.09,eRank=479.7,q75/q25=5.17 vo_prod:H=0.8104,top10E=0.16,eRank=224.5,q75/q25=496.60 train_time:649175ms step_avg:90.16ms +[2025-08-22 20:50:49] [Rank 0] PRINT: step:7200/10000 val_loss:3.7283 svd_entropy: attn_qk:H=0.9006,top10E=0.08,eRank=397.3,q75/q25=14.32 attn_vo:H=0.8871,top10E=0.09,eRank=367.2,q75/q25=27.67 mlp_w1:H=0.8698,top10E=0.15,eRank=329.6,q75/q25=8.57 mlp_w2:H=0.9290,top10E=0.09,eRank=479.7,q75/q25=5.17 vo_prod:H=0.8104,top10E=0.16,eRank=224.5,q75/q25=496.60 train_time:649175ms step_avg:90.16ms +[2025-08-22 20:50:49] [Rank 0] step:7201/10000 train_time:649194ms step_avg:90.15ms +[2025-08-22 20:50:49] [Rank 0] step:7201/10000 train_time:649194ms step_avg:90.15ms +[2025-08-22 20:50:51] [Rank 0] step:7221/10000 train_time:651098ms step_avg:90.17ms +[2025-08-22 20:50:51] [Rank 0] step:7221/10000 train_time:651098ms step_avg:90.17ms +[2025-08-22 20:50:53] [Rank 0] step:7241/10000 train_time:652989ms step_avg:90.18ms +[2025-08-22 20:50:53] [Rank 0] step:7241/10000 train_time:652989ms step_avg:90.18ms +[2025-08-22 20:50:55] [Rank 0] step:7261/10000 train_time:654879ms step_avg:90.19ms +[2025-08-22 20:50:55] [Rank 0] step:7261/10000 train_time:654879ms step_avg:90.19ms +[2025-08-22 20:50:57] [Rank 0] step:7281/10000 train_time:656782ms step_avg:90.20ms +[2025-08-22 20:50:57] [Rank 0] step:7281/10000 train_time:656782ms step_avg:90.20ms +[2025-08-22 20:50:59] [Rank 0] step:7301/10000 train_time:658673ms step_avg:90.22ms +[2025-08-22 20:50:59] [Rank 0] step:7301/10000 train_time:658673ms step_avg:90.22ms +[2025-08-22 20:51:01] [Rank 0] step:7321/10000 train_time:660581ms step_avg:90.23ms +[2025-08-22 20:51:01] [Rank 0] step:7321/10000 train_time:660581ms step_avg:90.23ms +[2025-08-22 20:51:03] [Rank 0] step:7341/10000 train_time:662475ms step_avg:90.24ms +[2025-08-22 20:51:03] [Rank 0] step:7341/10000 train_time:662475ms step_avg:90.24ms +[2025-08-22 20:51:05] [Rank 0] step:7361/10000 train_time:664446ms step_avg:90.27ms +[2025-08-22 20:51:05] [Rank 0] step:7361/10000 train_time:664446ms step_avg:90.27ms +[2025-08-22 20:51:07] [Rank 0] step:7381/10000 train_time:666420ms step_avg:90.29ms +[2025-08-22 20:51:07] [Rank 0] step:7381/10000 train_time:666420ms step_avg:90.29ms +[2025-08-22 20:51:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:51:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:51:22] [Rank 0] PRINT: step:7400/10000 val_loss:3.7092 svd_entropy: attn_qk:H=0.9007,top10E=0.08,eRank=397.6,q75/q25=14.28 attn_vo:H=0.8876,top10E=0.09,eRank=368.3,q75/q25=27.12 mlp_w1:H=0.8706,top10E=0.15,eRank=331.2,q75/q25=8.54 mlp_w2:H=0.9293,top10E=0.09,eRank=480.5,q75/q25=5.16 vo_prod:H=0.8113,top10E=0.16,eRank=225.7,q75/q25=474.84 train_time:668305ms step_avg:90.31ms +[2025-08-22 20:51:22] [Rank 0] PRINT: step:7400/10000 val_loss:3.7092 svd_entropy: attn_qk:H=0.9007,top10E=0.08,eRank=397.6,q75/q25=14.28 attn_vo:H=0.8876,top10E=0.09,eRank=368.3,q75/q25=27.12 mlp_w1:H=0.8706,top10E=0.15,eRank=331.2,q75/q25=8.54 mlp_w2:H=0.9293,top10E=0.09,eRank=480.5,q75/q25=5.16 vo_prod:H=0.8113,top10E=0.16,eRank=225.7,q75/q25=474.84 train_time:668305ms step_avg:90.31ms +[2025-08-22 20:51:22] [Rank 0] step:7401/10000 train_time:668325ms step_avg:90.30ms +[2025-08-22 20:51:22] [Rank 0] step:7401/10000 train_time:668325ms step_avg:90.30ms +[2025-08-22 20:51:24] [Rank 0] step:7421/10000 train_time:670231ms step_avg:90.32ms +[2025-08-22 20:51:24] [Rank 0] step:7421/10000 train_time:670231ms step_avg:90.32ms +[2025-08-22 20:51:26] [Rank 0] step:7441/10000 train_time:672123ms step_avg:90.33ms +[2025-08-22 20:51:26] [Rank 0] step:7441/10000 train_time:672123ms step_avg:90.33ms +[2025-08-22 20:51:28] [Rank 0] step:7461/10000 train_time:674017ms step_avg:90.34ms +[2025-08-22 20:51:28] [Rank 0] step:7461/10000 train_time:674017ms step_avg:90.34ms +[2025-08-22 20:51:30] [Rank 0] step:7481/10000 train_time:675918ms step_avg:90.35ms +[2025-08-22 20:51:30] [Rank 0] step:7481/10000 train_time:675918ms step_avg:90.35ms +[2025-08-22 20:51:32] [Rank 0] step:7501/10000 train_time:677817ms step_avg:90.36ms +[2025-08-22 20:51:32] [Rank 0] step:7501/10000 train_time:677817ms step_avg:90.36ms +[2025-08-22 20:51:34] [Rank 0] step:7521/10000 train_time:679718ms step_avg:90.38ms +[2025-08-22 20:51:34] [Rank 0] step:7521/10000 train_time:679718ms step_avg:90.38ms +[2025-08-22 20:51:35] [Rank 0] step:7541/10000 train_time:681626ms step_avg:90.39ms +[2025-08-22 20:51:35] [Rank 0] step:7541/10000 train_time:681626ms step_avg:90.39ms +[2025-08-22 20:51:37] [Rank 0] step:7561/10000 train_time:683516ms step_avg:90.40ms +[2025-08-22 20:51:37] [Rank 0] step:7561/10000 train_time:683516ms step_avg:90.40ms +[2025-08-22 20:51:39] [Rank 0] step:7581/10000 train_time:685424ms step_avg:90.41ms +[2025-08-22 20:51:39] [Rank 0] step:7581/10000 train_time:685424ms step_avg:90.41ms +[2025-08-22 20:51:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:51:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:51:55] [Rank 0] PRINT: step:7600/10000 val_loss:3.6992 svd_entropy: attn_qk:H=0.9007,top10E=0.08,eRank=397.8,q75/q25=14.23 attn_vo:H=0.8880,top10E=0.08,eRank=369.2,q75/q25=26.78 mlp_w1:H=0.8712,top10E=0.15,eRank=332.7,q75/q25=8.49 mlp_w2:H=0.9295,top10E=0.09,eRank=481.2,q75/q25=5.13 vo_prod:H=0.8121,top10E=0.16,eRank=226.7,q75/q25=453.09 train_time:687334ms step_avg:90.44ms +[2025-08-22 20:51:55] [Rank 0] PRINT: step:7600/10000 val_loss:3.6992 svd_entropy: attn_qk:H=0.9007,top10E=0.08,eRank=397.8,q75/q25=14.23 attn_vo:H=0.8880,top10E=0.08,eRank=369.2,q75/q25=26.78 mlp_w1:H=0.8712,top10E=0.15,eRank=332.7,q75/q25=8.49 mlp_w2:H=0.9295,top10E=0.09,eRank=481.2,q75/q25=5.13 vo_prod:H=0.8121,top10E=0.16,eRank=226.7,q75/q25=453.09 train_time:687334ms step_avg:90.44ms +[2025-08-22 20:51:55] [Rank 0] step:7601/10000 train_time:687352ms step_avg:90.43ms +[2025-08-22 20:51:55] [Rank 0] step:7601/10000 train_time:687352ms step_avg:90.43ms +[2025-08-22 20:51:57] [Rank 0] step:7621/10000 train_time:689257ms step_avg:90.44ms +[2025-08-22 20:51:57] [Rank 0] step:7621/10000 train_time:689257ms step_avg:90.44ms +[2025-08-22 20:51:59] [Rank 0] step:7641/10000 train_time:691150ms step_avg:90.45ms +[2025-08-22 20:51:59] [Rank 0] step:7641/10000 train_time:691150ms step_avg:90.45ms +[2025-08-22 20:52:01] [Rank 0] step:7661/10000 train_time:693052ms step_avg:90.46ms +[2025-08-22 20:52:01] [Rank 0] step:7661/10000 train_time:693052ms step_avg:90.46ms +[2025-08-22 20:52:03] [Rank 0] step:7681/10000 train_time:694945ms step_avg:90.48ms +[2025-08-22 20:52:03] [Rank 0] step:7681/10000 train_time:694945ms step_avg:90.48ms +[2025-08-22 20:52:04] [Rank 0] step:7701/10000 train_time:696840ms step_avg:90.49ms +[2025-08-22 20:52:04] [Rank 0] step:7701/10000 train_time:696840ms step_avg:90.49ms +[2025-08-22 20:52:06] [Rank 0] step:7721/10000 train_time:698753ms step_avg:90.50ms +[2025-08-22 20:52:06] [Rank 0] step:7721/10000 train_time:698753ms step_avg:90.50ms +[2025-08-22 20:52:08] [Rank 0] step:7741/10000 train_time:700736ms step_avg:90.52ms +[2025-08-22 20:52:08] [Rank 0] step:7741/10000 train_time:700736ms step_avg:90.52ms +[2025-08-22 20:52:10] [Rank 0] step:7761/10000 train_time:702756ms step_avg:90.55ms +[2025-08-22 20:52:10] [Rank 0] step:7761/10000 train_time:702756ms step_avg:90.55ms +[2025-08-22 20:52:12] [Rank 0] step:7781/10000 train_time:704658ms step_avg:90.56ms +[2025-08-22 20:52:12] [Rank 0] step:7781/10000 train_time:704658ms step_avg:90.56ms +[2025-08-22 20:52:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:52:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:52:28] [Rank 0] PRINT: step:7800/10000 val_loss:3.6850 svd_entropy: attn_qk:H=0.9008,top10E=0.08,eRank=398.0,q75/q25=14.18 attn_vo:H=0.8884,top10E=0.08,eRank=370.1,q75/q25=26.47 mlp_w1:H=0.8719,top10E=0.15,eRank=334.2,q75/q25=8.45 mlp_w2:H=0.9297,top10E=0.09,eRank=481.9,q75/q25=5.13 vo_prod:H=0.8128,top10E=0.16,eRank=227.7,q75/q25=438.43 train_time:706574ms step_avg:90.59ms +[2025-08-22 20:52:28] [Rank 0] PRINT: step:7800/10000 val_loss:3.6850 svd_entropy: attn_qk:H=0.9008,top10E=0.08,eRank=398.0,q75/q25=14.18 attn_vo:H=0.8884,top10E=0.08,eRank=370.1,q75/q25=26.47 mlp_w1:H=0.8719,top10E=0.15,eRank=334.2,q75/q25=8.45 mlp_w2:H=0.9297,top10E=0.09,eRank=481.9,q75/q25=5.13 vo_prod:H=0.8128,top10E=0.16,eRank=227.7,q75/q25=438.43 train_time:706574ms step_avg:90.59ms +[2025-08-22 20:52:28] [Rank 0] step:7801/10000 train_time:706592ms step_avg:90.58ms +[2025-08-22 20:52:28] [Rank 0] step:7801/10000 train_time:706592ms step_avg:90.58ms +[2025-08-22 20:52:30] [Rank 0] step:7821/10000 train_time:708492ms step_avg:90.59ms +[2025-08-22 20:52:30] [Rank 0] step:7821/10000 train_time:708492ms step_avg:90.59ms +[2025-08-22 20:52:32] [Rank 0] step:7841/10000 train_time:710389ms step_avg:90.60ms +[2025-08-22 20:52:32] [Rank 0] step:7841/10000 train_time:710389ms step_avg:90.60ms +[2025-08-22 20:52:34] [Rank 0] step:7861/10000 train_time:712297ms step_avg:90.61ms +[2025-08-22 20:52:34] [Rank 0] step:7861/10000 train_time:712297ms step_avg:90.61ms +[2025-08-22 20:52:36] [Rank 0] step:7881/10000 train_time:714207ms step_avg:90.62ms +[2025-08-22 20:52:36] [Rank 0] step:7881/10000 train_time:714207ms step_avg:90.62ms +[2025-08-22 20:52:37] [Rank 0] step:7901/10000 train_time:716105ms step_avg:90.63ms +[2025-08-22 20:52:37] [Rank 0] step:7901/10000 train_time:716105ms step_avg:90.63ms +[2025-08-22 20:52:39] [Rank 0] step:7921/10000 train_time:718015ms step_avg:90.65ms +[2025-08-22 20:52:39] [Rank 0] step:7921/10000 train_time:718015ms step_avg:90.65ms +[2025-08-22 20:52:41] [Rank 0] step:7941/10000 train_time:719927ms step_avg:90.66ms +[2025-08-22 20:52:41] [Rank 0] step:7941/10000 train_time:719927ms step_avg:90.66ms +[2025-08-22 20:52:43] [Rank 0] step:7961/10000 train_time:721834ms step_avg:90.67ms +[2025-08-22 20:52:43] [Rank 0] step:7961/10000 train_time:721834ms step_avg:90.67ms +[2025-08-22 20:52:45] [Rank 0] step:7981/10000 train_time:723733ms step_avg:90.68ms +[2025-08-22 20:52:45] [Rank 0] step:7981/10000 train_time:723733ms step_avg:90.68ms +[2025-08-22 20:52:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:52:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:53:01] [Rank 0] PRINT: step:8000/10000 val_loss:3.6669 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.2,q75/q25=14.12 attn_vo:H=0.8888,top10E=0.08,eRank=371.0,q75/q25=26.12 mlp_w1:H=0.8725,top10E=0.15,eRank=335.5,q75/q25=8.40 mlp_w2:H=0.9299,top10E=0.09,eRank=482.4,q75/q25=5.11 vo_prod:H=0.8135,top10E=0.16,eRank=228.6,q75/q25=422.14 train_time:725647ms step_avg:90.71ms +[2025-08-22 20:53:01] [Rank 0] PRINT: step:8000/10000 val_loss:3.6669 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.2,q75/q25=14.12 attn_vo:H=0.8888,top10E=0.08,eRank=371.0,q75/q25=26.12 mlp_w1:H=0.8725,top10E=0.15,eRank=335.5,q75/q25=8.40 mlp_w2:H=0.9299,top10E=0.09,eRank=482.4,q75/q25=5.11 vo_prod:H=0.8135,top10E=0.16,eRank=228.6,q75/q25=422.14 train_time:725647ms step_avg:90.71ms +[2025-08-22 20:53:01] [Rank 0] step:8001/10000 train_time:725667ms step_avg:90.70ms +[2025-08-22 20:53:01] [Rank 0] step:8001/10000 train_time:725667ms step_avg:90.70ms +[2025-08-22 20:53:03] [Rank 0] step:8021/10000 train_time:727568ms step_avg:90.71ms +[2025-08-22 20:53:03] [Rank 0] step:8021/10000 train_time:727568ms step_avg:90.71ms +[2025-08-22 20:53:05] [Rank 0] step:8041/10000 train_time:729473ms step_avg:90.72ms +[2025-08-22 20:53:05] [Rank 0] step:8041/10000 train_time:729473ms step_avg:90.72ms +[2025-08-22 20:53:07] [Rank 0] step:8061/10000 train_time:731373ms step_avg:90.73ms +[2025-08-22 20:53:07] [Rank 0] step:8061/10000 train_time:731373ms step_avg:90.73ms +[2025-08-22 20:53:08] [Rank 0] step:8081/10000 train_time:733262ms step_avg:90.74ms +[2025-08-22 20:53:08] [Rank 0] step:8081/10000 train_time:733262ms step_avg:90.74ms +[2025-08-22 20:53:10] [Rank 0] step:8101/10000 train_time:735230ms step_avg:90.76ms +[2025-08-22 20:53:10] [Rank 0] step:8101/10000 train_time:735230ms step_avg:90.76ms +[2025-08-22 20:53:12] [Rank 0] step:8121/10000 train_time:737215ms step_avg:90.78ms +[2025-08-22 20:53:12] [Rank 0] step:8121/10000 train_time:737215ms step_avg:90.78ms +[2025-08-22 20:53:15] [Rank 0] step:8141/10000 train_time:739363ms step_avg:90.82ms +[2025-08-22 20:53:15] [Rank 0] step:8141/10000 train_time:739363ms step_avg:90.82ms +[2025-08-22 20:53:16] [Rank 0] step:8161/10000 train_time:741279ms step_avg:90.83ms +[2025-08-22 20:53:16] [Rank 0] step:8161/10000 train_time:741279ms step_avg:90.83ms +[2025-08-22 20:53:18] [Rank 0] step:8181/10000 train_time:743206ms step_avg:90.85ms +[2025-08-22 20:53:18] [Rank 0] step:8181/10000 train_time:743206ms step_avg:90.85ms +[2025-08-22 20:53:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:53:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:53:34] [Rank 0] PRINT: step:8200/10000 val_loss:3.6558 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.14 attn_vo:H=0.8891,top10E=0.08,eRank=371.7,q75/q25=25.81 mlp_w1:H=0.8731,top10E=0.15,eRank=336.6,q75/q25=8.36 mlp_w2:H=0.9300,top10E=0.09,eRank=483.0,q75/q25=5.10 vo_prod:H=0.8141,top10E=0.16,eRank=229.5,q75/q25=408.56 train_time:745161ms step_avg:90.87ms +[2025-08-22 20:53:34] [Rank 0] PRINT: step:8200/10000 val_loss:3.6558 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.14 attn_vo:H=0.8891,top10E=0.08,eRank=371.7,q75/q25=25.81 mlp_w1:H=0.8731,top10E=0.15,eRank=336.6,q75/q25=8.36 mlp_w2:H=0.9300,top10E=0.09,eRank=483.0,q75/q25=5.10 vo_prod:H=0.8141,top10E=0.16,eRank=229.5,q75/q25=408.56 train_time:745161ms step_avg:90.87ms +[2025-08-22 20:53:34] [Rank 0] step:8201/10000 train_time:745180ms step_avg:90.86ms +[2025-08-22 20:53:34] [Rank 0] step:8201/10000 train_time:745180ms step_avg:90.86ms +[2025-08-22 20:53:36] [Rank 0] step:8221/10000 train_time:747120ms step_avg:90.88ms +[2025-08-22 20:53:36] [Rank 0] step:8221/10000 train_time:747120ms step_avg:90.88ms +[2025-08-22 20:53:38] [Rank 0] step:8241/10000 train_time:749054ms step_avg:90.89ms +[2025-08-22 20:53:38] [Rank 0] step:8241/10000 train_time:749054ms step_avg:90.89ms +[2025-08-22 20:53:40] [Rank 0] step:8261/10000 train_time:750988ms step_avg:90.91ms +[2025-08-22 20:53:40] [Rank 0] step:8261/10000 train_time:750988ms step_avg:90.91ms +[2025-08-22 20:53:42] [Rank 0] step:8281/10000 train_time:752912ms step_avg:90.92ms +[2025-08-22 20:53:42] [Rank 0] step:8281/10000 train_time:752912ms step_avg:90.92ms +[2025-08-22 20:53:44] [Rank 0] step:8301/10000 train_time:754838ms step_avg:90.93ms +[2025-08-22 20:53:44] [Rank 0] step:8301/10000 train_time:754838ms step_avg:90.93ms +[2025-08-22 20:53:46] [Rank 0] step:8321/10000 train_time:756758ms step_avg:90.95ms +[2025-08-22 20:53:46] [Rank 0] step:8321/10000 train_time:756758ms step_avg:90.95ms +[2025-08-22 20:53:47] [Rank 0] step:8341/10000 train_time:758690ms step_avg:90.96ms +[2025-08-22 20:53:47] [Rank 0] step:8341/10000 train_time:758690ms step_avg:90.96ms +[2025-08-22 20:53:49] [Rank 0] step:8361/10000 train_time:760616ms step_avg:90.97ms +[2025-08-22 20:53:49] [Rank 0] step:8361/10000 train_time:760616ms step_avg:90.97ms +[2025-08-22 20:53:51] [Rank 0] step:8381/10000 train_time:762540ms step_avg:90.98ms +[2025-08-22 20:53:51] [Rank 0] step:8381/10000 train_time:762540ms step_avg:90.98ms +[2025-08-22 20:53:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:53:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:54:07] [Rank 0] PRINT: step:8400/10000 val_loss:3.6451 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.2,q75/q25=14.09 attn_vo:H=0.8894,top10E=0.08,eRank=372.4,q75/q25=25.54 mlp_w1:H=0.8735,top10E=0.15,eRank=337.7,q75/q25=8.33 mlp_w2:H=0.9302,top10E=0.09,eRank=483.4,q75/q25=5.09 vo_prod:H=0.8147,top10E=0.16,eRank=230.3,q75/q25=395.23 train_time:764470ms step_avg:91.01ms +[2025-08-22 20:54:07] [Rank 0] PRINT: step:8400/10000 val_loss:3.6451 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.2,q75/q25=14.09 attn_vo:H=0.8894,top10E=0.08,eRank=372.4,q75/q25=25.54 mlp_w1:H=0.8735,top10E=0.15,eRank=337.7,q75/q25=8.33 mlp_w2:H=0.9302,top10E=0.09,eRank=483.4,q75/q25=5.09 vo_prod:H=0.8147,top10E=0.16,eRank=230.3,q75/q25=395.23 train_time:764470ms step_avg:91.01ms +[2025-08-22 20:54:07] [Rank 0] step:8401/10000 train_time:764489ms step_avg:91.00ms +[2025-08-22 20:54:07] [Rank 0] step:8401/10000 train_time:764489ms step_avg:91.00ms +[2025-08-22 20:54:09] [Rank 0] step:8421/10000 train_time:766410ms step_avg:91.01ms +[2025-08-22 20:54:09] [Rank 0] step:8421/10000 train_time:766410ms step_avg:91.01ms +[2025-08-22 20:54:11] [Rank 0] step:8441/10000 train_time:768336ms step_avg:91.02ms +[2025-08-22 20:54:11] [Rank 0] step:8441/10000 train_time:768336ms step_avg:91.02ms +[2025-08-22 20:54:13] [Rank 0] step:8461/10000 train_time:770256ms step_avg:91.04ms +[2025-08-22 20:54:13] [Rank 0] step:8461/10000 train_time:770256ms step_avg:91.04ms +[2025-08-22 20:54:15] [Rank 0] step:8481/10000 train_time:772359ms step_avg:91.07ms +[2025-08-22 20:54:15] [Rank 0] step:8481/10000 train_time:772359ms step_avg:91.07ms +[2025-08-22 20:54:17] [Rank 0] step:8501/10000 train_time:774306ms step_avg:91.08ms +[2025-08-22 20:54:17] [Rank 0] step:8501/10000 train_time:774306ms step_avg:91.08ms +[2025-08-22 20:54:19] [Rank 0] step:8521/10000 train_time:776235ms step_avg:91.10ms +[2025-08-22 20:54:19] [Rank 0] step:8521/10000 train_time:776235ms step_avg:91.10ms +[2025-08-22 20:54:21] [Rank 0] step:8541/10000 train_time:778177ms step_avg:91.11ms +[2025-08-22 20:54:21] [Rank 0] step:8541/10000 train_time:778177ms step_avg:91.11ms +[2025-08-22 20:54:23] [Rank 0] step:8561/10000 train_time:780112ms step_avg:91.12ms +[2025-08-22 20:54:23] [Rank 0] step:8561/10000 train_time:780112ms step_avg:91.12ms +[2025-08-22 20:54:25] [Rank 0] step:8581/10000 train_time:782043ms step_avg:91.14ms +[2025-08-22 20:54:25] [Rank 0] step:8581/10000 train_time:782043ms step_avg:91.14ms +[2025-08-22 20:54:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:54:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:54:40] [Rank 0] PRINT: step:8600/10000 val_loss:3.6331 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.06 attn_vo:H=0.8896,top10E=0.08,eRank=373.0,q75/q25=25.33 mlp_w1:H=0.8740,top10E=0.15,eRank=338.6,q75/q25=8.30 mlp_w2:H=0.9303,top10E=0.09,eRank=483.8,q75/q25=5.07 vo_prod:H=0.8152,top10E=0.16,eRank=231.0,q75/q25=385.85 train_time:783972ms step_avg:91.16ms +[2025-08-22 20:54:40] [Rank 0] PRINT: step:8600/10000 val_loss:3.6331 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.06 attn_vo:H=0.8896,top10E=0.08,eRank=373.0,q75/q25=25.33 mlp_w1:H=0.8740,top10E=0.15,eRank=338.6,q75/q25=8.30 mlp_w2:H=0.9303,top10E=0.09,eRank=483.8,q75/q25=5.07 vo_prod:H=0.8152,top10E=0.16,eRank=231.0,q75/q25=385.85 train_time:783972ms step_avg:91.16ms +[2025-08-22 20:54:40] [Rank 0] step:8601/10000 train_time:783991ms step_avg:91.15ms +[2025-08-22 20:54:40] [Rank 0] step:8601/10000 train_time:783991ms step_avg:91.15ms +[2025-08-22 20:54:42] [Rank 0] step:8621/10000 train_time:785917ms step_avg:91.16ms +[2025-08-22 20:54:42] [Rank 0] step:8621/10000 train_time:785917ms step_avg:91.16ms +[2025-08-22 20:54:44] [Rank 0] step:8641/10000 train_time:787846ms step_avg:91.18ms +[2025-08-22 20:54:44] [Rank 0] step:8641/10000 train_time:787846ms step_avg:91.18ms +[2025-08-22 20:54:46] [Rank 0] step:8661/10000 train_time:789776ms step_avg:91.19ms +[2025-08-22 20:54:46] [Rank 0] step:8661/10000 train_time:789776ms step_avg:91.19ms +[2025-08-22 20:54:48] [Rank 0] step:8681/10000 train_time:791708ms step_avg:91.20ms +[2025-08-22 20:54:48] [Rank 0] step:8681/10000 train_time:791708ms step_avg:91.20ms +[2025-08-22 20:54:50] [Rank 0] step:8701/10000 train_time:793644ms step_avg:91.21ms +[2025-08-22 20:54:50] [Rank 0] step:8701/10000 train_time:793644ms step_avg:91.21ms +[2025-08-22 20:54:52] [Rank 0] step:8721/10000 train_time:795579ms step_avg:91.23ms +[2025-08-22 20:54:52] [Rank 0] step:8721/10000 train_time:795579ms step_avg:91.23ms +[2025-08-22 20:54:54] [Rank 0] step:8741/10000 train_time:797503ms step_avg:91.24ms +[2025-08-22 20:54:54] [Rank 0] step:8741/10000 train_time:797503ms step_avg:91.24ms +[2025-08-22 20:54:56] [Rank 0] step:8761/10000 train_time:799435ms step_avg:91.25ms +[2025-08-22 20:54:56] [Rank 0] step:8761/10000 train_time:799435ms step_avg:91.25ms +[2025-08-22 20:54:58] [Rank 0] step:8781/10000 train_time:801372ms step_avg:91.26ms +[2025-08-22 20:54:58] [Rank 0] step:8781/10000 train_time:801372ms step_avg:91.26ms +[2025-08-22 20:55:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:55:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:55:13] [Rank 0] PRINT: step:8800/10000 val_loss:3.6200 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.06 attn_vo:H=0.8899,top10E=0.08,eRank=373.5,q75/q25=25.18 mlp_w1:H=0.8744,top10E=0.15,eRank=339.4,q75/q25=8.27 mlp_w2:H=0.9304,top10E=0.09,eRank=484.1,q75/q25=5.06 vo_prod:H=0.8157,top10E=0.16,eRank=231.7,q75/q25=379.84 train_time:803309ms step_avg:91.29ms +[2025-08-22 20:55:13] [Rank 0] PRINT: step:8800/10000 val_loss:3.6200 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.06 attn_vo:H=0.8899,top10E=0.08,eRank=373.5,q75/q25=25.18 mlp_w1:H=0.8744,top10E=0.15,eRank=339.4,q75/q25=8.27 mlp_w2:H=0.9304,top10E=0.09,eRank=484.1,q75/q25=5.06 vo_prod:H=0.8157,top10E=0.16,eRank=231.7,q75/q25=379.84 train_time:803309ms step_avg:91.29ms +[2025-08-22 20:55:13] [Rank 0] step:8801/10000 train_time:803329ms step_avg:91.28ms +[2025-08-22 20:55:13] [Rank 0] step:8801/10000 train_time:803329ms step_avg:91.28ms +[2025-08-22 20:55:15] [Rank 0] step:8821/10000 train_time:805276ms step_avg:91.29ms +[2025-08-22 20:55:15] [Rank 0] step:8821/10000 train_time:805276ms step_avg:91.29ms +[2025-08-22 20:55:17] [Rank 0] step:8841/10000 train_time:807288ms step_avg:91.31ms +[2025-08-22 20:55:17] [Rank 0] step:8841/10000 train_time:807288ms step_avg:91.31ms +[2025-08-22 20:55:19] [Rank 0] step:8861/10000 train_time:809288ms step_avg:91.33ms +[2025-08-22 20:55:19] [Rank 0] step:8861/10000 train_time:809288ms step_avg:91.33ms +[2025-08-22 20:55:21] [Rank 0] step:8881/10000 train_time:811215ms step_avg:91.34ms +[2025-08-22 20:55:21] [Rank 0] step:8881/10000 train_time:811215ms step_avg:91.34ms +[2025-08-22 20:55:23] [Rank 0] step:8901/10000 train_time:813144ms step_avg:91.35ms +[2025-08-22 20:55:23] [Rank 0] step:8901/10000 train_time:813144ms step_avg:91.35ms +[2025-08-22 20:55:25] [Rank 0] step:8921/10000 train_time:815087ms step_avg:91.37ms +[2025-08-22 20:55:25] [Rank 0] step:8921/10000 train_time:815087ms step_avg:91.37ms +[2025-08-22 20:55:27] [Rank 0] step:8941/10000 train_time:817017ms step_avg:91.38ms +[2025-08-22 20:55:27] [Rank 0] step:8941/10000 train_time:817017ms step_avg:91.38ms +[2025-08-22 20:55:29] [Rank 0] step:8961/10000 train_time:818945ms step_avg:91.39ms +[2025-08-22 20:55:29] [Rank 0] step:8961/10000 train_time:818945ms step_avg:91.39ms +[2025-08-22 20:55:31] [Rank 0] step:8981/10000 train_time:820871ms step_avg:91.40ms +[2025-08-22 20:55:31] [Rank 0] step:8981/10000 train_time:820871ms step_avg:91.40ms +[2025-08-22 20:55:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:55:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:55:47] [Rank 0] PRINT: step:9000/10000 val_loss:3.6093 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.05 attn_vo:H=0.8901,top10E=0.08,eRank=374.0,q75/q25=25.02 mlp_w1:H=0.8747,top10E=0.15,eRank=340.2,q75/q25=8.24 mlp_w2:H=0.9305,top10E=0.09,eRank=484.5,q75/q25=5.05 vo_prod:H=0.8161,top10E=0.16,eRank=232.3,q75/q25=373.20 train_time:822803ms step_avg:91.42ms +[2025-08-22 20:55:47] [Rank 0] PRINT: step:9000/10000 val_loss:3.6093 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.05 attn_vo:H=0.8901,top10E=0.08,eRank=374.0,q75/q25=25.02 mlp_w1:H=0.8747,top10E=0.15,eRank=340.2,q75/q25=8.24 mlp_w2:H=0.9305,top10E=0.09,eRank=484.5,q75/q25=5.05 vo_prod:H=0.8161,top10E=0.16,eRank=232.3,q75/q25=373.20 train_time:822803ms step_avg:91.42ms +[2025-08-22 20:55:47] [Rank 0] step:9001/10000 train_time:822823ms step_avg:91.41ms +[2025-08-22 20:55:47] [Rank 0] step:9001/10000 train_time:822823ms step_avg:91.41ms +[2025-08-22 20:55:49] [Rank 0] step:9021/10000 train_time:824758ms step_avg:91.43ms +[2025-08-22 20:55:49] [Rank 0] step:9021/10000 train_time:824758ms step_avg:91.43ms +[2025-08-22 20:55:51] [Rank 0] step:9041/10000 train_time:826688ms step_avg:91.44ms +[2025-08-22 20:55:51] [Rank 0] step:9041/10000 train_time:826688ms step_avg:91.44ms +[2025-08-22 20:55:53] [Rank 0] step:9061/10000 train_time:828619ms step_avg:91.45ms +[2025-08-22 20:55:53] [Rank 0] step:9061/10000 train_time:828619ms step_avg:91.45ms +[2025-08-22 20:55:55] [Rank 0] step:9081/10000 train_time:830554ms step_avg:91.46ms +[2025-08-22 20:55:55] [Rank 0] step:9081/10000 train_time:830554ms step_avg:91.46ms +[2025-08-22 20:55:56] [Rank 0] step:9101/10000 train_time:832500ms step_avg:91.47ms +[2025-08-22 20:55:56] [Rank 0] step:9101/10000 train_time:832500ms step_avg:91.47ms +[2025-08-22 20:55:58] [Rank 0] step:9121/10000 train_time:834431ms step_avg:91.48ms +[2025-08-22 20:55:58] [Rank 0] step:9121/10000 train_time:834431ms step_avg:91.48ms +[2025-08-22 20:56:00] [Rank 0] step:9141/10000 train_time:836349ms step_avg:91.49ms +[2025-08-22 20:56:00] [Rank 0] step:9141/10000 train_time:836349ms step_avg:91.49ms +[2025-08-22 20:56:02] [Rank 0] step:9161/10000 train_time:838274ms step_avg:91.50ms +[2025-08-22 20:56:02] [Rank 0] step:9161/10000 train_time:838274ms step_avg:91.50ms +[2025-08-22 20:56:04] [Rank 0] step:9181/10000 train_time:840238ms step_avg:91.52ms +[2025-08-22 20:56:04] [Rank 0] step:9181/10000 train_time:840238ms step_avg:91.52ms +[2025-08-22 20:56:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:56:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:56:20] [Rank 0] PRINT: step:9200/10000 val_loss:3.5998 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.04 attn_vo:H=0.8902,top10E=0.08,eRank=374.4,q75/q25=24.83 mlp_w1:H=0.8750,top10E=0.15,eRank=340.8,q75/q25=8.22 mlp_w2:H=0.9306,top10E=0.09,eRank=484.8,q75/q25=5.05 vo_prod:H=0.8165,top10E=0.16,eRank=232.8,q75/q25=365.75 train_time:842166ms step_avg:91.54ms +[2025-08-22 20:56:20] [Rank 0] PRINT: step:9200/10000 val_loss:3.5998 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.04 attn_vo:H=0.8902,top10E=0.08,eRank=374.4,q75/q25=24.83 mlp_w1:H=0.8750,top10E=0.15,eRank=340.8,q75/q25=8.22 mlp_w2:H=0.9306,top10E=0.09,eRank=484.8,q75/q25=5.05 vo_prod:H=0.8165,top10E=0.16,eRank=232.8,q75/q25=365.75 train_time:842166ms step_avg:91.54ms +[2025-08-22 20:56:20] [Rank 0] step:9201/10000 train_time:842185ms step_avg:91.53ms +[2025-08-22 20:56:20] [Rank 0] step:9201/10000 train_time:842185ms step_avg:91.53ms +[2025-08-22 20:56:22] [Rank 0] step:9221/10000 train_time:844190ms step_avg:91.55ms +[2025-08-22 20:56:22] [Rank 0] step:9221/10000 train_time:844190ms step_avg:91.55ms +[2025-08-22 20:56:24] [Rank 0] step:9241/10000 train_time:846128ms step_avg:91.56ms +[2025-08-22 20:56:24] [Rank 0] step:9241/10000 train_time:846128ms step_avg:91.56ms +[2025-08-22 20:56:26] [Rank 0] step:9261/10000 train_time:848062ms step_avg:91.57ms +[2025-08-22 20:56:26] [Rank 0] step:9261/10000 train_time:848062ms step_avg:91.57ms +[2025-08-22 20:56:28] [Rank 0] step:9281/10000 train_time:849982ms step_avg:91.58ms +[2025-08-22 20:56:28] [Rank 0] step:9281/10000 train_time:849982ms step_avg:91.58ms +[2025-08-22 20:56:30] [Rank 0] step:9301/10000 train_time:851908ms step_avg:91.59ms +[2025-08-22 20:56:30] [Rank 0] step:9301/10000 train_time:851908ms step_avg:91.59ms +[2025-08-22 20:56:32] [Rank 0] step:9321/10000 train_time:853842ms step_avg:91.60ms +[2025-08-22 20:56:32] [Rank 0] step:9321/10000 train_time:853842ms step_avg:91.60ms +[2025-08-22 20:56:34] [Rank 0] step:9341/10000 train_time:855773ms step_avg:91.61ms +[2025-08-22 20:56:34] [Rank 0] step:9341/10000 train_time:855773ms step_avg:91.61ms +[2025-08-22 20:56:35] [Rank 0] step:9361/10000 train_time:857713ms step_avg:91.63ms +[2025-08-22 20:56:35] [Rank 0] step:9361/10000 train_time:857713ms step_avg:91.63ms +[2025-08-22 20:56:37] [Rank 0] step:9381/10000 train_time:859661ms step_avg:91.64ms +[2025-08-22 20:56:37] [Rank 0] step:9381/10000 train_time:859661ms step_avg:91.64ms +[2025-08-22 20:56:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:56:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:56:53] [Rank 0] PRINT: step:9400/10000 val_loss:3.5905 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.02 attn_vo:H=0.8904,top10E=0.08,eRank=374.7,q75/q25=24.69 mlp_w1:H=0.8753,top10E=0.15,eRank=341.4,q75/q25=8.20 mlp_w2:H=0.9306,top10E=0.09,eRank=485.0,q75/q25=5.04 vo_prod:H=0.8168,top10E=0.16,eRank=233.2,q75/q25=361.42 train_time:861608ms step_avg:91.66ms +[2025-08-22 20:56:53] [Rank 0] PRINT: step:9400/10000 val_loss:3.5905 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.02 attn_vo:H=0.8904,top10E=0.08,eRank=374.7,q75/q25=24.69 mlp_w1:H=0.8753,top10E=0.15,eRank=341.4,q75/q25=8.20 mlp_w2:H=0.9306,top10E=0.09,eRank=485.0,q75/q25=5.04 vo_prod:H=0.8168,top10E=0.16,eRank=233.2,q75/q25=361.42 train_time:861608ms step_avg:91.66ms +[2025-08-22 20:56:53] [Rank 0] step:9401/10000 train_time:861627ms step_avg:91.65ms +[2025-08-22 20:56:53] [Rank 0] step:9401/10000 train_time:861627ms step_avg:91.65ms +[2025-08-22 20:56:55] [Rank 0] step:9421/10000 train_time:863567ms step_avg:91.66ms +[2025-08-22 20:56:55] [Rank 0] step:9421/10000 train_time:863567ms step_avg:91.66ms +[2025-08-22 20:56:57] [Rank 0] step:9441/10000 train_time:865500ms step_avg:91.67ms +[2025-08-22 20:56:57] [Rank 0] step:9441/10000 train_time:865500ms step_avg:91.67ms +[2025-08-22 20:56:59] [Rank 0] step:9461/10000 train_time:867438ms step_avg:91.69ms +[2025-08-22 20:56:59] [Rank 0] step:9461/10000 train_time:867438ms step_avg:91.69ms +[2025-08-22 20:57:01] [Rank 0] step:9481/10000 train_time:869378ms step_avg:91.70ms +[2025-08-22 20:57:01] [Rank 0] step:9481/10000 train_time:869378ms step_avg:91.70ms +[2025-08-22 20:57:03] [Rank 0] step:9501/10000 train_time:871323ms step_avg:91.71ms +[2025-08-22 20:57:03] [Rank 0] step:9501/10000 train_time:871323ms step_avg:91.71ms +[2025-08-22 20:57:05] [Rank 0] step:9521/10000 train_time:873252ms step_avg:91.72ms +[2025-08-22 20:57:05] [Rank 0] step:9521/10000 train_time:873252ms step_avg:91.72ms +[2025-08-22 20:57:07] [Rank 0] step:9541/10000 train_time:875187ms step_avg:91.73ms +[2025-08-22 20:57:07] [Rank 0] step:9541/10000 train_time:875187ms step_avg:91.73ms +[2025-08-22 20:57:09] [Rank 0] step:9561/10000 train_time:877117ms step_avg:91.74ms +[2025-08-22 20:57:09] [Rank 0] step:9561/10000 train_time:877117ms step_avg:91.74ms +[2025-08-22 20:57:11] [Rank 0] step:9581/10000 train_time:879054ms step_avg:91.75ms +[2025-08-22 20:57:11] [Rank 0] step:9581/10000 train_time:879054ms step_avg:91.75ms +[2025-08-22 20:57:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:57:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:57:26] [Rank 0] PRINT: step:9600/10000 val_loss:3.5825 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.03 attn_vo:H=0.8905,top10E=0.08,eRank=375.0,q75/q25=24.60 mlp_w1:H=0.8755,top10E=0.15,eRank=341.8,q75/q25=8.20 mlp_w2:H=0.9307,top10E=0.09,eRank=485.2,q75/q25=5.03 vo_prod:H=0.8170,top10E=0.16,eRank=233.6,q75/q25=357.99 train_time:881005ms step_avg:91.77ms +[2025-08-22 20:57:26] [Rank 0] PRINT: step:9600/10000 val_loss:3.5825 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.03 attn_vo:H=0.8905,top10E=0.08,eRank=375.0,q75/q25=24.60 mlp_w1:H=0.8755,top10E=0.15,eRank=341.8,q75/q25=8.20 mlp_w2:H=0.9307,top10E=0.09,eRank=485.2,q75/q25=5.03 vo_prod:H=0.8170,top10E=0.16,eRank=233.6,q75/q25=357.99 train_time:881005ms step_avg:91.77ms +[2025-08-22 20:57:26] [Rank 0] step:9601/10000 train_time:881024ms step_avg:91.76ms +[2025-08-22 20:57:26] [Rank 0] step:9601/10000 train_time:881024ms step_avg:91.76ms +[2025-08-22 20:57:28] [Rank 0] step:9621/10000 train_time:882961ms step_avg:91.77ms +[2025-08-22 20:57:28] [Rank 0] step:9621/10000 train_time:882961ms step_avg:91.77ms +[2025-08-22 20:57:30] [Rank 0] step:9641/10000 train_time:884894ms step_avg:91.78ms +[2025-08-22 20:57:30] [Rank 0] step:9641/10000 train_time:884894ms step_avg:91.78ms +[2025-08-22 20:57:32] [Rank 0] step:9661/10000 train_time:886853ms step_avg:91.80ms +[2025-08-22 20:57:32] [Rank 0] step:9661/10000 train_time:886853ms step_avg:91.80ms +[2025-08-22 20:57:34] [Rank 0] step:9681/10000 train_time:888805ms step_avg:91.81ms +[2025-08-22 20:57:34] [Rank 0] step:9681/10000 train_time:888805ms step_avg:91.81ms +[2025-08-22 20:57:36] [Rank 0] step:9701/10000 train_time:890772ms step_avg:91.82ms +[2025-08-22 20:57:36] [Rank 0] step:9701/10000 train_time:890772ms step_avg:91.82ms +[2025-08-22 20:57:38] [Rank 0] step:9721/10000 train_time:892729ms step_avg:91.84ms +[2025-08-22 20:57:38] [Rank 0] step:9721/10000 train_time:892729ms step_avg:91.84ms +[2025-08-22 20:57:40] [Rank 0] step:9741/10000 train_time:894701ms step_avg:91.85ms +[2025-08-22 20:57:40] [Rank 0] step:9741/10000 train_time:894701ms step_avg:91.85ms +[2025-08-22 20:57:42] [Rank 0] step:9761/10000 train_time:896661ms step_avg:91.86ms +[2025-08-22 20:57:42] [Rank 0] step:9761/10000 train_time:896661ms step_avg:91.86ms +[2025-08-22 20:57:44] [Rank 0] step:9781/10000 train_time:898627ms step_avg:91.87ms +[2025-08-22 20:57:44] [Rank 0] step:9781/10000 train_time:898627ms step_avg:91.87ms +[2025-08-22 20:57:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:57:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:58:00] [Rank 0] PRINT: step:9800/10000 val_loss:3.5746 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.02 attn_vo:H=0.8906,top10E=0.08,eRank=375.2,q75/q25=24.56 mlp_w1:H=0.8756,top10E=0.15,eRank=342.1,q75/q25=8.19 mlp_w2:H=0.9307,top10E=0.09,eRank=485.3,q75/q25=5.03 vo_prod:H=0.8172,top10E=0.16,eRank=233.9,q75/q25=353.32 train_time:900603ms step_avg:91.90ms +[2025-08-22 20:58:00] [Rank 0] PRINT: step:9800/10000 val_loss:3.5746 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.3,q75/q25=14.02 attn_vo:H=0.8906,top10E=0.08,eRank=375.2,q75/q25=24.56 mlp_w1:H=0.8756,top10E=0.15,eRank=342.1,q75/q25=8.19 mlp_w2:H=0.9307,top10E=0.09,eRank=485.3,q75/q25=5.03 vo_prod:H=0.8172,top10E=0.16,eRank=233.9,q75/q25=353.32 train_time:900603ms step_avg:91.90ms +[2025-08-22 20:58:00] [Rank 0] step:9801/10000 train_time:900623ms step_avg:91.89ms +[2025-08-22 20:58:00] [Rank 0] step:9801/10000 train_time:900623ms step_avg:91.89ms +[2025-08-22 20:58:02] [Rank 0] step:9821/10000 train_time:902581ms step_avg:91.90ms +[2025-08-22 20:58:02] [Rank 0] step:9821/10000 train_time:902581ms step_avg:91.90ms +[2025-08-22 20:58:04] [Rank 0] step:9841/10000 train_time:904543ms step_avg:91.92ms +[2025-08-22 20:58:04] [Rank 0] step:9841/10000 train_time:904543ms step_avg:91.92ms +[2025-08-22 20:58:06] [Rank 0] step:9861/10000 train_time:906489ms step_avg:91.93ms +[2025-08-22 20:58:06] [Rank 0] step:9861/10000 train_time:906489ms step_avg:91.93ms +[2025-08-22 20:58:08] [Rank 0] step:9881/10000 train_time:908432ms step_avg:91.94ms +[2025-08-22 20:58:08] [Rank 0] step:9881/10000 train_time:908432ms step_avg:91.94ms +[2025-08-22 20:58:10] [Rank 0] step:9901/10000 train_time:910400ms step_avg:91.95ms +[2025-08-22 20:58:10] [Rank 0] step:9901/10000 train_time:910400ms step_avg:91.95ms +[2025-08-22 20:58:12] [Rank 0] step:9921/10000 train_time:912350ms step_avg:91.96ms +[2025-08-22 20:58:12] [Rank 0] step:9921/10000 train_time:912350ms step_avg:91.96ms +[2025-08-22 20:58:13] [Rank 0] step:9941/10000 train_time:914316ms step_avg:91.97ms +[2025-08-22 20:58:13] [Rank 0] step:9941/10000 train_time:914316ms step_avg:91.97ms +[2025-08-22 20:58:15] [Rank 0] step:9961/10000 train_time:916262ms step_avg:91.98ms +[2025-08-22 20:58:15] [Rank 0] step:9961/10000 train_time:916262ms step_avg:91.98ms +[2025-08-22 20:58:17] [Rank 0] step:9981/10000 train_time:918220ms step_avg:92.00ms +[2025-08-22 20:58:17] [Rank 0] step:9981/10000 train_time:918220ms step_avg:92.00ms +[2025-08-22 20:58:19] [Rank 0] step:10000/10000 train_time:920086ms step_avg:92.01ms +[2025-08-22 20:58:19] [Rank 0] step:10000/10000 train_time:920086ms step_avg:92.01ms +[2025-08-22 20:58:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:58:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:58:33] [Rank 0] PRINT: step:10000/10000 val_loss:3.5670 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.2,q75/q25=14.05 attn_vo:H=0.8907,top10E=0.08,eRank=375.3,q75/q25=24.51 mlp_w1:H=0.8757,top10E=0.15,eRank=342.4,q75/q25=8.18 mlp_w2:H=0.9308,top10E=0.09,eRank=485.4,q75/q25=5.02 vo_prod:H=0.8174,top10E=0.16,eRank=234.1,q75/q25=350.40 train_time:920192ms step_avg:92.02ms +[2025-08-22 20:58:33] [Rank 0] PRINT: step:10000/10000 val_loss:3.5670 svd_entropy: attn_qk:H=0.9009,top10E=0.08,eRank=398.2,q75/q25=14.05 attn_vo:H=0.8907,top10E=0.08,eRank=375.3,q75/q25=24.51 mlp_w1:H=0.8757,top10E=0.15,eRank=342.4,q75/q25=8.18 mlp_w2:H=0.9308,top10E=0.09,eRank=485.4,q75/q25=5.02 vo_prod:H=0.8174,top10E=0.16,eRank=234.1,q75/q25=350.40 train_time:920192ms step_avg:92.02ms +[2025-08-22 20:58:33] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 20:58:33 2025 --- +[2025-08-22 20:58:33] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 20:58:33 2025 --- +[2025-08-22 20:58:33] [Rank 0] PRINT: Peak memory allocated: 11485 MiB reserved: 15856 MiB +[2025-08-22 20:58:33] [Rank 0] PRINT: Peak memory allocated: 11485 MiB reserved: 15856 MiB diff --git a/logs_svd_gated/mode_4_param_gated_seed_41/config.json b/logs_svd_gated/mode_4_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..53905f3c7b0dc1be7f16a62cb42dc69b59694366 --- /dev/null +++ b/logs_svd_gated/mode_4_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 4, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "513ab361-d804-435a-9b86-a02c5b2c5beb", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_4_param_gated_seed_41/training_log_513ab361-d804-435a-9b86-a02c5b2c5beb.txt b/logs_svd_gated/mode_4_param_gated_seed_41/training_log_513ab361-d804-435a-9b86-a02c5b2c5beb.txt new file mode 100644 index 0000000000000000000000000000000000000000..38610b40e5f3ec09365c02b142cf444043c60ffa --- /dev/null +++ b/logs_svd_gated/mode_4_param_gated_seed_41/training_log_513ab361-d804-435a-9b86-a02c5b2c5beb.txt @@ -0,0 +1,2926 @@ +[2025-08-22 10:31:11] [Rank 0] PRINT: --- Script Start: Fri Aug 22 10:31:11 2025 --- +[2025-08-22 10:31:11] [Rank 0] PRINT: --- Script Start: Fri Aug 22 10:31:11 2025 --- +[2025-08-22 10:31:11] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=4, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 10:31:11] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=4, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 10:31:12] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 10:31:12] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 10:31:12] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 10:31:12] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 10:31:12] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_4_param_gated_seed_41 +[2025-08-22 10:31:12] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_4_param_gated_seed_41 +[2025-08-22 10:31:12] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 10:31:12] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 10:31:12] [Rank 0] PRINT: Constructing model... +[2025-08-22 10:31:12] [Rank 0] PRINT: Constructing model... +[2025-08-22 10:31:13] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 10:31:13] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 10:31:13] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 10:31:13] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 10:31:13] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 10:31:13] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 10:31:13] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-08-22 10:31:13] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-08-22 10:31:14] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.05). +[2025-08-22 10:31:14] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.05). +[2025-08-22 10:31:14] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 10:31:14] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 10:31:14] [Rank 0] PRINT: Muon optimizer is active with 36 parameters. +[2025-08-22 10:31:14] [Rank 0] PRINT: Muon optimizer is active with 36 parameters. +[2025-08-22 10:31:14] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 10:31:14] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 10:31:14] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 10:31:14] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 10:31:14] [Rank 0] PRINT: Starting warmup... +[2025-08-22 10:31:14] [Rank 0] PRINT: Starting warmup... +[2025-08-22 10:32:01] [Rank 0] PRINT: Warmup complete. +[2025-08-22 10:32:01] [Rank 0] PRINT: Warmup complete. +[2025-08-22 10:32:02] [Rank 0] PRINT: Starting training... +[2025-08-22 10:32:02] [Rank 0] PRINT: Starting training... +[2025-08-22 10:32:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:32:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:32:19] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 10:32:19] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 10:32:21] [Rank 0] step:21/10000 train_time:1899ms step_avg:90.45ms +[2025-08-22 10:32:21] [Rank 0] step:21/10000 train_time:1899ms step_avg:90.45ms +[2025-08-22 10:32:23] [Rank 0] step:41/10000 train_time:3660ms step_avg:89.28ms +[2025-08-22 10:32:23] [Rank 0] step:41/10000 train_time:3660ms step_avg:89.28ms +[2025-08-22 10:32:25] [Rank 0] step:61/10000 train_time:5422ms step_avg:88.88ms +[2025-08-22 10:32:25] [Rank 0] step:61/10000 train_time:5422ms step_avg:88.88ms +[2025-08-22 10:32:27] [Rank 0] step:81/10000 train_time:7185ms step_avg:88.70ms +[2025-08-22 10:32:27] [Rank 0] step:81/10000 train_time:7185ms step_avg:88.70ms +[2025-08-22 10:32:28] [Rank 0] step:101/10000 train_time:8948ms step_avg:88.60ms +[2025-08-22 10:32:28] [Rank 0] step:101/10000 train_time:8948ms step_avg:88.60ms +[2025-08-22 10:32:30] [Rank 0] step:121/10000 train_time:10713ms step_avg:88.54ms +[2025-08-22 10:32:30] [Rank 0] step:121/10000 train_time:10713ms step_avg:88.54ms +[2025-08-22 10:32:32] [Rank 0] step:141/10000 train_time:12479ms step_avg:88.50ms +[2025-08-22 10:32:32] [Rank 0] step:141/10000 train_time:12479ms step_avg:88.50ms +[2025-08-22 10:32:34] [Rank 0] step:161/10000 train_time:14252ms step_avg:88.52ms +[2025-08-22 10:32:34] [Rank 0] step:161/10000 train_time:14252ms step_avg:88.52ms +[2025-08-22 10:32:35] [Rank 0] step:181/10000 train_time:16028ms step_avg:88.55ms +[2025-08-22 10:32:35] [Rank 0] step:181/10000 train_time:16028ms step_avg:88.55ms +[2025-08-22 10:32:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:32:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:32:51] [Rank 0] PRINT: step:200/10000 val_loss:6.6657 svd_entropy: attn_qk:H=0.2696,top10E=0.90,eRank=12.2,q75/q25=16.76 attn_vo:H=0.1706,top10E=0.96,eRank=4.3,q75/q25=95.63 mlp_w1:H=0.8577,top10E=0.29,eRank=299.5,q75/q25=2.50 mlp_w2:H=0.7810,top10E=0.28,eRank=252.1,q75/q25=9.99 vo_prod:H=0.0446,top10E=1.00,eRank=1.5,q75/q25=661.21 train_time:17821ms step_avg:89.10ms +[2025-08-22 10:32:51] [Rank 0] PRINT: step:200/10000 val_loss:6.6657 svd_entropy: attn_qk:H=0.2696,top10E=0.90,eRank=12.2,q75/q25=16.76 attn_vo:H=0.1706,top10E=0.96,eRank=4.3,q75/q25=95.63 mlp_w1:H=0.8577,top10E=0.29,eRank=299.5,q75/q25=2.50 mlp_w2:H=0.7810,top10E=0.28,eRank=252.1,q75/q25=9.99 vo_prod:H=0.0446,top10E=1.00,eRank=1.5,q75/q25=661.21 train_time:17821ms step_avg:89.10ms +[2025-08-22 10:32:51] [Rank 0] step:201/10000 train_time:17840ms step_avg:88.76ms +[2025-08-22 10:32:51] [Rank 0] step:201/10000 train_time:17840ms step_avg:88.76ms +[2025-08-22 10:32:53] [Rank 0] step:221/10000 train_time:19595ms step_avg:88.66ms +[2025-08-22 10:32:53] [Rank 0] step:221/10000 train_time:19595ms step_avg:88.66ms +[2025-08-22 10:32:55] [Rank 0] step:241/10000 train_time:21370ms step_avg:88.67ms +[2025-08-22 10:32:55] [Rank 0] step:241/10000 train_time:21370ms step_avg:88.67ms +[2025-08-22 10:32:56] [Rank 0] step:261/10000 train_time:23146ms step_avg:88.68ms +[2025-08-22 10:32:56] [Rank 0] step:261/10000 train_time:23146ms step_avg:88.68ms +[2025-08-22 10:32:58] [Rank 0] step:281/10000 train_time:24925ms step_avg:88.70ms +[2025-08-22 10:32:58] [Rank 0] step:281/10000 train_time:24925ms step_avg:88.70ms +[2025-08-22 10:33:00] [Rank 0] step:301/10000 train_time:26705ms step_avg:88.72ms +[2025-08-22 10:33:00] [Rank 0] step:301/10000 train_time:26705ms step_avg:88.72ms +[2025-08-22 10:33:02] [Rank 0] step:321/10000 train_time:28485ms step_avg:88.74ms +[2025-08-22 10:33:02] [Rank 0] step:321/10000 train_time:28485ms step_avg:88.74ms +[2025-08-22 10:33:03] [Rank 0] step:341/10000 train_time:30267ms step_avg:88.76ms +[2025-08-22 10:33:03] [Rank 0] step:341/10000 train_time:30267ms step_avg:88.76ms +[2025-08-22 10:33:05] [Rank 0] step:361/10000 train_time:32048ms step_avg:88.78ms +[2025-08-22 10:33:05] [Rank 0] step:361/10000 train_time:32048ms step_avg:88.78ms +[2025-08-22 10:33:07] [Rank 0] step:381/10000 train_time:33831ms step_avg:88.80ms +[2025-08-22 10:33:07] [Rank 0] step:381/10000 train_time:33831ms step_avg:88.80ms +[2025-08-22 10:33:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:33:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:33:22] [Rank 0] PRINT: step:400/10000 val_loss:5.8169 svd_entropy: attn_qk:H=0.3339,top10E=0.85,eRank=16.7,q75/q25=26.99 attn_vo:H=0.2748,top10E=0.89,eRank=11.7,q75/q25=66.04 mlp_w1:H=0.7665,top10E=0.40,eRank=164.2,q75/q25=2.58 mlp_w2:H=0.7471,top10E=0.34,eRank=192.5,q75/q25=10.26 vo_prod:H=0.1195,top10E=0.98,eRank=3.2,q75/q25=492.10 train_time:35627ms step_avg:89.07ms +[2025-08-22 10:33:22] [Rank 0] PRINT: step:400/10000 val_loss:5.8169 svd_entropy: attn_qk:H=0.3339,top10E=0.85,eRank=16.7,q75/q25=26.99 attn_vo:H=0.2748,top10E=0.89,eRank=11.7,q75/q25=66.04 mlp_w1:H=0.7665,top10E=0.40,eRank=164.2,q75/q25=2.58 mlp_w2:H=0.7471,top10E=0.34,eRank=192.5,q75/q25=10.26 vo_prod:H=0.1195,top10E=0.98,eRank=3.2,q75/q25=492.10 train_time:35627ms step_avg:89.07ms +[2025-08-22 10:33:22] [Rank 0] step:401/10000 train_time:35647ms step_avg:88.90ms +[2025-08-22 10:33:22] [Rank 0] step:401/10000 train_time:35647ms step_avg:88.90ms +[2025-08-22 10:33:24] [Rank 0] step:421/10000 train_time:37424ms step_avg:88.89ms +[2025-08-22 10:33:24] [Rank 0] step:421/10000 train_time:37424ms step_avg:88.89ms +[2025-08-22 10:33:26] [Rank 0] step:441/10000 train_time:39235ms step_avg:88.97ms +[2025-08-22 10:33:26] [Rank 0] step:441/10000 train_time:39235ms step_avg:88.97ms +[2025-08-22 10:33:28] [Rank 0] step:461/10000 train_time:41015ms step_avg:88.97ms +[2025-08-22 10:33:28] [Rank 0] step:461/10000 train_time:41015ms step_avg:88.97ms +[2025-08-22 10:33:30] [Rank 0] step:481/10000 train_time:42798ms step_avg:88.98ms +[2025-08-22 10:33:30] [Rank 0] step:481/10000 train_time:42798ms step_avg:88.98ms +[2025-08-22 10:33:31] [Rank 0] step:501/10000 train_time:44579ms step_avg:88.98ms +[2025-08-22 10:33:31] [Rank 0] step:501/10000 train_time:44579ms step_avg:88.98ms +[2025-08-22 10:33:33] [Rank 0] step:521/10000 train_time:46360ms step_avg:88.98ms +[2025-08-22 10:33:33] [Rank 0] step:521/10000 train_time:46360ms step_avg:88.98ms +[2025-08-22 10:33:35] [Rank 0] step:541/10000 train_time:48143ms step_avg:88.99ms +[2025-08-22 10:33:35] [Rank 0] step:541/10000 train_time:48143ms step_avg:88.99ms +[2025-08-22 10:33:37] [Rank 0] step:561/10000 train_time:49926ms step_avg:88.99ms +[2025-08-22 10:33:37] [Rank 0] step:561/10000 train_time:49926ms step_avg:88.99ms +[2025-08-22 10:33:38] [Rank 0] step:581/10000 train_time:51709ms step_avg:89.00ms +[2025-08-22 10:33:38] [Rank 0] step:581/10000 train_time:51709ms step_avg:89.00ms +[2025-08-22 10:33:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:33:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:33:54] [Rank 0] PRINT: step:600/10000 val_loss:5.4562 svd_entropy: attn_qk:H=0.4045,top10E=0.80,eRank=22.0,q75/q25=40.97 attn_vo:H=0.3449,top10E=0.84,eRank=19.6,q75/q25=55.11 mlp_w1:H=0.7229,top10E=0.42,eRank=124.9,q75/q25=3.36 mlp_w2:H=0.7216,top10E=0.37,eRank=156.7,q75/q25=11.11 vo_prod:H=0.1963,top10E=0.95,eRank=6.0,q75/q25=491.54 train_time:53507ms step_avg:89.18ms +[2025-08-22 10:33:54] [Rank 0] PRINT: step:600/10000 val_loss:5.4562 svd_entropy: attn_qk:H=0.4045,top10E=0.80,eRank=22.0,q75/q25=40.97 attn_vo:H=0.3449,top10E=0.84,eRank=19.6,q75/q25=55.11 mlp_w1:H=0.7229,top10E=0.42,eRank=124.9,q75/q25=3.36 mlp_w2:H=0.7216,top10E=0.37,eRank=156.7,q75/q25=11.11 vo_prod:H=0.1963,top10E=0.95,eRank=6.0,q75/q25=491.54 train_time:53507ms step_avg:89.18ms +[2025-08-22 10:33:54] [Rank 0] step:601/10000 train_time:53525ms step_avg:89.06ms +[2025-08-22 10:33:54] [Rank 0] step:601/10000 train_time:53525ms step_avg:89.06ms +[2025-08-22 10:33:56] [Rank 0] step:621/10000 train_time:55288ms step_avg:89.03ms +[2025-08-22 10:33:56] [Rank 0] step:621/10000 train_time:55288ms step_avg:89.03ms +[2025-08-22 10:33:57] [Rank 0] step:641/10000 train_time:57069ms step_avg:89.03ms +[2025-08-22 10:33:57] [Rank 0] step:641/10000 train_time:57069ms step_avg:89.03ms +[2025-08-22 10:33:59] [Rank 0] step:661/10000 train_time:58847ms step_avg:89.03ms +[2025-08-22 10:33:59] [Rank 0] step:661/10000 train_time:58847ms step_avg:89.03ms +[2025-08-22 10:34:01] [Rank 0] step:681/10000 train_time:60625ms step_avg:89.02ms +[2025-08-22 10:34:01] [Rank 0] step:681/10000 train_time:60625ms step_avg:89.02ms +[2025-08-22 10:34:03] [Rank 0] step:701/10000 train_time:62403ms step_avg:89.02ms +[2025-08-22 10:34:03] [Rank 0] step:701/10000 train_time:62403ms step_avg:89.02ms +[2025-08-22 10:34:04] [Rank 0] step:721/10000 train_time:64184ms step_avg:89.02ms +[2025-08-22 10:34:04] [Rank 0] step:721/10000 train_time:64184ms step_avg:89.02ms +[2025-08-22 10:34:06] [Rank 0] step:741/10000 train_time:65963ms step_avg:89.02ms +[2025-08-22 10:34:06] [Rank 0] step:741/10000 train_time:65963ms step_avg:89.02ms +[2025-08-22 10:34:08] [Rank 0] step:761/10000 train_time:67759ms step_avg:89.04ms +[2025-08-22 10:34:08] [Rank 0] step:761/10000 train_time:67759ms step_avg:89.04ms +[2025-08-22 10:34:10] [Rank 0] step:781/10000 train_time:69551ms step_avg:89.05ms +[2025-08-22 10:34:10] [Rank 0] step:781/10000 train_time:69551ms step_avg:89.05ms +[2025-08-22 10:34:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:34:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:34:25] [Rank 0] PRINT: step:800/10000 val_loss:5.2639 svd_entropy: attn_qk:H=0.4309,top10E=0.77,eRank=24.5,q75/q25=63.51 attn_vo:H=0.3799,top10E=0.81,eRank=26.1,q75/q25=55.41 mlp_w1:H=0.6952,top10E=0.43,eRank=109.3,q75/q25=5.31 mlp_w2:H=0.7028,top10E=0.39,eRank=138.8,q75/q25=12.52 vo_prod:H=0.2458,top10E=0.92,eRank=8.4,q75/q25=696.61 train_time:71359ms step_avg:89.20ms +[2025-08-22 10:34:25] [Rank 0] PRINT: step:800/10000 val_loss:5.2639 svd_entropy: attn_qk:H=0.4309,top10E=0.77,eRank=24.5,q75/q25=63.51 attn_vo:H=0.3799,top10E=0.81,eRank=26.1,q75/q25=55.41 mlp_w1:H=0.6952,top10E=0.43,eRank=109.3,q75/q25=5.31 mlp_w2:H=0.7028,top10E=0.39,eRank=138.8,q75/q25=12.52 vo_prod:H=0.2458,top10E=0.92,eRank=8.4,q75/q25=696.61 train_time:71359ms step_avg:89.20ms +[2025-08-22 10:34:25] [Rank 0] step:801/10000 train_time:71377ms step_avg:89.11ms +[2025-08-22 10:34:25] [Rank 0] step:801/10000 train_time:71377ms step_avg:89.11ms +[2025-08-22 10:34:27] [Rank 0] step:821/10000 train_time:73211ms step_avg:89.17ms +[2025-08-22 10:34:27] [Rank 0] step:821/10000 train_time:73211ms step_avg:89.17ms +[2025-08-22 10:34:29] [Rank 0] step:841/10000 train_time:75073ms step_avg:89.27ms +[2025-08-22 10:34:29] [Rank 0] step:841/10000 train_time:75073ms step_avg:89.27ms +[2025-08-22 10:34:31] [Rank 0] step:861/10000 train_time:76862ms step_avg:89.27ms +[2025-08-22 10:34:31] [Rank 0] step:861/10000 train_time:76862ms step_avg:89.27ms +[2025-08-22 10:34:32] [Rank 0] step:881/10000 train_time:78651ms step_avg:89.27ms +[2025-08-22 10:34:32] [Rank 0] step:881/10000 train_time:78651ms step_avg:89.27ms +[2025-08-22 10:34:34] [Rank 0] step:901/10000 train_time:80442ms step_avg:89.28ms +[2025-08-22 10:34:34] [Rank 0] step:901/10000 train_time:80442ms step_avg:89.28ms +[2025-08-22 10:34:36] [Rank 0] step:921/10000 train_time:82237ms step_avg:89.29ms +[2025-08-22 10:34:36] [Rank 0] step:921/10000 train_time:82237ms step_avg:89.29ms +[2025-08-22 10:34:38] [Rank 0] step:941/10000 train_time:84029ms step_avg:89.30ms +[2025-08-22 10:34:38] [Rank 0] step:941/10000 train_time:84029ms step_avg:89.30ms +[2025-08-22 10:34:40] [Rank 0] step:961/10000 train_time:85824ms step_avg:89.31ms +[2025-08-22 10:34:40] [Rank 0] step:961/10000 train_time:85824ms step_avg:89.31ms +[2025-08-22 10:34:41] [Rank 0] step:981/10000 train_time:87621ms step_avg:89.32ms +[2025-08-22 10:34:41] [Rank 0] step:981/10000 train_time:87621ms step_avg:89.32ms +[2025-08-22 10:34:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:34:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:34:57] [Rank 0] PRINT: step:1000/10000 val_loss:5.1195 svd_entropy: attn_qk:H=0.4412,top10E=0.76,eRank=26.9,q75/q25=82.79 attn_vo:H=0.3967,top10E=0.78,eRank=30.9,q75/q25=54.96 mlp_w1:H=0.6841,top10E=0.43,eRank=106.5,q75/q25=7.86 mlp_w2:H=0.6922,top10E=0.40,eRank=132.3,q75/q25=14.20 vo_prod:H=0.2665,top10E=0.91,eRank=10.3,q75/q25=987.22 train_time:89431ms step_avg:89.43ms +[2025-08-22 10:34:57] [Rank 0] PRINT: step:1000/10000 val_loss:5.1195 svd_entropy: attn_qk:H=0.4412,top10E=0.76,eRank=26.9,q75/q25=82.79 attn_vo:H=0.3967,top10E=0.78,eRank=30.9,q75/q25=54.96 mlp_w1:H=0.6841,top10E=0.43,eRank=106.5,q75/q25=7.86 mlp_w2:H=0.6922,top10E=0.40,eRank=132.3,q75/q25=14.20 vo_prod:H=0.2665,top10E=0.91,eRank=10.3,q75/q25=987.22 train_time:89431ms step_avg:89.43ms +[2025-08-22 10:34:57] [Rank 0] step:1001/10000 train_time:89449ms step_avg:89.36ms +[2025-08-22 10:34:57] [Rank 0] step:1001/10000 train_time:89449ms step_avg:89.36ms +[2025-08-22 10:34:58] [Rank 0] step:1021/10000 train_time:91240ms step_avg:89.36ms +[2025-08-22 10:34:58] [Rank 0] step:1021/10000 train_time:91240ms step_avg:89.36ms +[2025-08-22 10:35:00] [Rank 0] step:1041/10000 train_time:93035ms step_avg:89.37ms +[2025-08-22 10:35:00] [Rank 0] step:1041/10000 train_time:93035ms step_avg:89.37ms +[2025-08-22 10:35:02] [Rank 0] step:1061/10000 train_time:94830ms step_avg:89.38ms +[2025-08-22 10:35:02] [Rank 0] step:1061/10000 train_time:94830ms step_avg:89.38ms +[2025-08-22 10:35:04] [Rank 0] step:1081/10000 train_time:96625ms step_avg:89.39ms +[2025-08-22 10:35:04] [Rank 0] step:1081/10000 train_time:96625ms step_avg:89.39ms +[2025-08-22 10:35:06] [Rank 0] step:1101/10000 train_time:98422ms step_avg:89.39ms +[2025-08-22 10:35:06] [Rank 0] step:1101/10000 train_time:98422ms step_avg:89.39ms +[2025-08-22 10:35:07] [Rank 0] step:1121/10000 train_time:100219ms step_avg:89.40ms +[2025-08-22 10:35:07] [Rank 0] step:1121/10000 train_time:100219ms step_avg:89.40ms +[2025-08-22 10:35:09] [Rank 0] step:1141/10000 train_time:102016ms step_avg:89.41ms +[2025-08-22 10:35:09] [Rank 0] step:1141/10000 train_time:102016ms step_avg:89.41ms +[2025-08-22 10:35:11] [Rank 0] step:1161/10000 train_time:103814ms step_avg:89.42ms +[2025-08-22 10:35:11] [Rank 0] step:1161/10000 train_time:103814ms step_avg:89.42ms +[2025-08-22 10:35:13] [Rank 0] step:1181/10000 train_time:105612ms step_avg:89.43ms +[2025-08-22 10:35:13] [Rank 0] step:1181/10000 train_time:105612ms step_avg:89.43ms +[2025-08-22 10:35:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:35:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:35:28] [Rank 0] PRINT: step:1200/10000 val_loss:4.9733 svd_entropy: attn_qk:H=0.4507,top10E=0.74,eRank=29.2,q75/q25=100.05 attn_vo:H=0.4118,top10E=0.77,eRank=35.3,q75/q25=58.65 mlp_w1:H=0.6727,top10E=0.44,eRank=104.1,q75/q25=10.29 mlp_w2:H=0.6832,top10E=0.41,eRank=128.7,q75/q25=15.86 vo_prod:H=0.2827,top10E=0.89,eRank=11.9,q75/q25=1242.59 train_time:107423ms step_avg:89.52ms +[2025-08-22 10:35:28] [Rank 0] PRINT: step:1200/10000 val_loss:4.9733 svd_entropy: attn_qk:H=0.4507,top10E=0.74,eRank=29.2,q75/q25=100.05 attn_vo:H=0.4118,top10E=0.77,eRank=35.3,q75/q25=58.65 mlp_w1:H=0.6727,top10E=0.44,eRank=104.1,q75/q25=10.29 mlp_w2:H=0.6832,top10E=0.41,eRank=128.7,q75/q25=15.86 vo_prod:H=0.2827,top10E=0.89,eRank=11.9,q75/q25=1242.59 train_time:107423ms step_avg:89.52ms +[2025-08-22 10:35:28] [Rank 0] step:1201/10000 train_time:107441ms step_avg:89.46ms +[2025-08-22 10:35:28] [Rank 0] step:1201/10000 train_time:107441ms step_avg:89.46ms +[2025-08-22 10:35:30] [Rank 0] step:1221/10000 train_time:109301ms step_avg:89.52ms +[2025-08-22 10:35:30] [Rank 0] step:1221/10000 train_time:109301ms step_avg:89.52ms +[2025-08-22 10:35:32] [Rank 0] step:1241/10000 train_time:111151ms step_avg:89.57ms +[2025-08-22 10:35:32] [Rank 0] step:1241/10000 train_time:111151ms step_avg:89.57ms +[2025-08-22 10:35:34] [Rank 0] step:1261/10000 train_time:112944ms step_avg:89.57ms +[2025-08-22 10:35:34] [Rank 0] step:1261/10000 train_time:112944ms step_avg:89.57ms +[2025-08-22 10:35:35] [Rank 0] step:1281/10000 train_time:114736ms step_avg:89.57ms +[2025-08-22 10:35:35] [Rank 0] step:1281/10000 train_time:114736ms step_avg:89.57ms +[2025-08-22 10:35:37] [Rank 0] step:1301/10000 train_time:116530ms step_avg:89.57ms +[2025-08-22 10:35:37] [Rank 0] step:1301/10000 train_time:116530ms step_avg:89.57ms +[2025-08-22 10:35:39] [Rank 0] step:1321/10000 train_time:118325ms step_avg:89.57ms +[2025-08-22 10:35:39] [Rank 0] step:1321/10000 train_time:118325ms step_avg:89.57ms +[2025-08-22 10:35:41] [Rank 0] step:1341/10000 train_time:120119ms step_avg:89.57ms +[2025-08-22 10:35:41] [Rank 0] step:1341/10000 train_time:120119ms step_avg:89.57ms +[2025-08-22 10:35:43] [Rank 0] step:1361/10000 train_time:121914ms step_avg:89.58ms +[2025-08-22 10:35:43] [Rank 0] step:1361/10000 train_time:121914ms step_avg:89.58ms +[2025-08-22 10:35:44] [Rank 0] step:1381/10000 train_time:123709ms step_avg:89.58ms +[2025-08-22 10:35:44] [Rank 0] step:1381/10000 train_time:123709ms step_avg:89.58ms +[2025-08-22 10:35:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:35:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:36:00] [Rank 0] PRINT: step:1400/10000 val_loss:4.8479 svd_entropy: attn_qk:H=0.4543,top10E=0.73,eRank=30.9,q75/q25=113.28 attn_vo:H=0.4274,top10E=0.75,eRank=39.3,q75/q25=60.92 mlp_w1:H=0.6650,top10E=0.44,eRank=103.9,q75/q25=12.30 mlp_w2:H=0.6772,top10E=0.41,eRank=127.0,q75/q25=17.11 vo_prod:H=0.2968,top10E=0.88,eRank=13.1,q75/q25=1540.46 train_time:125518ms step_avg:89.66ms +[2025-08-22 10:36:00] [Rank 0] PRINT: step:1400/10000 val_loss:4.8479 svd_entropy: attn_qk:H=0.4543,top10E=0.73,eRank=30.9,q75/q25=113.28 attn_vo:H=0.4274,top10E=0.75,eRank=39.3,q75/q25=60.92 mlp_w1:H=0.6650,top10E=0.44,eRank=103.9,q75/q25=12.30 mlp_w2:H=0.6772,top10E=0.41,eRank=127.0,q75/q25=17.11 vo_prod:H=0.2968,top10E=0.88,eRank=13.1,q75/q25=1540.46 train_time:125518ms step_avg:89.66ms +[2025-08-22 10:36:00] [Rank 0] step:1401/10000 train_time:125535ms step_avg:89.60ms +[2025-08-22 10:36:00] [Rank 0] step:1401/10000 train_time:125535ms step_avg:89.60ms +[2025-08-22 10:36:01] [Rank 0] step:1421/10000 train_time:127320ms step_avg:89.60ms +[2025-08-22 10:36:01] [Rank 0] step:1421/10000 train_time:127320ms step_avg:89.60ms +[2025-08-22 10:36:03] [Rank 0] step:1441/10000 train_time:129109ms step_avg:89.60ms +[2025-08-22 10:36:03] [Rank 0] step:1441/10000 train_time:129109ms step_avg:89.60ms +[2025-08-22 10:36:05] [Rank 0] step:1461/10000 train_time:130899ms step_avg:89.60ms +[2025-08-22 10:36:05] [Rank 0] step:1461/10000 train_time:130899ms step_avg:89.60ms +[2025-08-22 10:36:07] [Rank 0] step:1481/10000 train_time:132690ms step_avg:89.59ms +[2025-08-22 10:36:07] [Rank 0] step:1481/10000 train_time:132690ms step_avg:89.59ms +[2025-08-22 10:36:09] [Rank 0] step:1501/10000 train_time:134492ms step_avg:89.60ms +[2025-08-22 10:36:09] [Rank 0] step:1501/10000 train_time:134492ms step_avg:89.60ms +[2025-08-22 10:36:10] [Rank 0] step:1521/10000 train_time:136295ms step_avg:89.61ms +[2025-08-22 10:36:10] [Rank 0] step:1521/10000 train_time:136295ms step_avg:89.61ms +[2025-08-22 10:36:12] [Rank 0] step:1541/10000 train_time:138099ms step_avg:89.62ms +[2025-08-22 10:36:12] [Rank 0] step:1541/10000 train_time:138099ms step_avg:89.62ms +[2025-08-22 10:36:14] [Rank 0] step:1561/10000 train_time:139903ms step_avg:89.62ms +[2025-08-22 10:36:14] [Rank 0] step:1561/10000 train_time:139903ms step_avg:89.62ms +[2025-08-22 10:36:16] [Rank 0] step:1581/10000 train_time:141709ms step_avg:89.63ms +[2025-08-22 10:36:16] [Rank 0] step:1581/10000 train_time:141709ms step_avg:89.63ms +[2025-08-22 10:36:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:36:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:36:31] [Rank 0] PRINT: step:1600/10000 val_loss:4.7086 svd_entropy: attn_qk:H=0.4602,top10E=0.72,eRank=32.6,q75/q25=122.44 attn_vo:H=0.4441,top10E=0.73,eRank=43.4,q75/q25=62.92 mlp_w1:H=0.6575,top10E=0.44,eRank=104.1,q75/q25=13.94 mlp_w2:H=0.6722,top10E=0.41,eRank=126.3,q75/q25=18.05 vo_prod:H=0.3093,top10E=0.87,eRank=14.5,q75/q25=1803.85 train_time:143529ms step_avg:89.71ms +[2025-08-22 10:36:31] [Rank 0] PRINT: step:1600/10000 val_loss:4.7086 svd_entropy: attn_qk:H=0.4602,top10E=0.72,eRank=32.6,q75/q25=122.44 attn_vo:H=0.4441,top10E=0.73,eRank=43.4,q75/q25=62.92 mlp_w1:H=0.6575,top10E=0.44,eRank=104.1,q75/q25=13.94 mlp_w2:H=0.6722,top10E=0.41,eRank=126.3,q75/q25=18.05 vo_prod:H=0.3093,top10E=0.87,eRank=14.5,q75/q25=1803.85 train_time:143529ms step_avg:89.71ms +[2025-08-22 10:36:31] [Rank 0] step:1601/10000 train_time:143548ms step_avg:89.66ms +[2025-08-22 10:36:31] [Rank 0] step:1601/10000 train_time:143548ms step_avg:89.66ms +[2025-08-22 10:36:33] [Rank 0] step:1621/10000 train_time:145407ms step_avg:89.70ms +[2025-08-22 10:36:33] [Rank 0] step:1621/10000 train_time:145407ms step_avg:89.70ms +[2025-08-22 10:36:35] [Rank 0] step:1641/10000 train_time:147278ms step_avg:89.75ms +[2025-08-22 10:36:35] [Rank 0] step:1641/10000 train_time:147278ms step_avg:89.75ms +[2025-08-22 10:36:37] [Rank 0] step:1661/10000 train_time:149078ms step_avg:89.75ms +[2025-08-22 10:36:37] [Rank 0] step:1661/10000 train_time:149078ms step_avg:89.75ms +[2025-08-22 10:36:39] [Rank 0] step:1681/10000 train_time:150881ms step_avg:89.76ms +[2025-08-22 10:36:39] [Rank 0] step:1681/10000 train_time:150881ms step_avg:89.76ms +[2025-08-22 10:36:40] [Rank 0] step:1701/10000 train_time:152684ms step_avg:89.76ms +[2025-08-22 10:36:40] [Rank 0] step:1701/10000 train_time:152684ms step_avg:89.76ms +[2025-08-22 10:36:42] [Rank 0] step:1721/10000 train_time:154488ms step_avg:89.77ms +[2025-08-22 10:36:42] [Rank 0] step:1721/10000 train_time:154488ms step_avg:89.77ms +[2025-08-22 10:36:44] [Rank 0] step:1741/10000 train_time:156294ms step_avg:89.77ms +[2025-08-22 10:36:44] [Rank 0] step:1741/10000 train_time:156294ms step_avg:89.77ms +[2025-08-22 10:36:46] [Rank 0] step:1761/10000 train_time:158101ms step_avg:89.78ms +[2025-08-22 10:36:46] [Rank 0] step:1761/10000 train_time:158101ms step_avg:89.78ms +[2025-08-22 10:36:48] [Rank 0] step:1781/10000 train_time:159909ms step_avg:89.79ms +[2025-08-22 10:36:48] [Rank 0] step:1781/10000 train_time:159909ms step_avg:89.79ms +[2025-08-22 10:36:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:36:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:37:03] [Rank 0] PRINT: step:1800/10000 val_loss:4.6067 svd_entropy: attn_qk:H=0.4668,top10E=0.71,eRank=34.3,q75/q25=129.44 attn_vo:H=0.4578,top10E=0.72,eRank=47.1,q75/q25=64.43 mlp_w1:H=0.6524,top10E=0.44,eRank=105.1,q75/q25=15.72 mlp_w2:H=0.6688,top10E=0.41,eRank=126.2,q75/q25=18.90 vo_prod:H=0.3266,top10E=0.86,eRank=15.9,q75/q25=2198.56 train_time:161731ms step_avg:89.85ms +[2025-08-22 10:37:03] [Rank 0] PRINT: step:1800/10000 val_loss:4.6067 svd_entropy: attn_qk:H=0.4668,top10E=0.71,eRank=34.3,q75/q25=129.44 attn_vo:H=0.4578,top10E=0.72,eRank=47.1,q75/q25=64.43 mlp_w1:H=0.6524,top10E=0.44,eRank=105.1,q75/q25=15.72 mlp_w2:H=0.6688,top10E=0.41,eRank=126.2,q75/q25=18.90 vo_prod:H=0.3266,top10E=0.86,eRank=15.9,q75/q25=2198.56 train_time:161731ms step_avg:89.85ms +[2025-08-22 10:37:03] [Rank 0] step:1801/10000 train_time:161749ms step_avg:89.81ms +[2025-08-22 10:37:03] [Rank 0] step:1801/10000 train_time:161749ms step_avg:89.81ms +[2025-08-22 10:37:05] [Rank 0] step:1821/10000 train_time:163551ms step_avg:89.81ms +[2025-08-22 10:37:05] [Rank 0] step:1821/10000 train_time:163551ms step_avg:89.81ms +[2025-08-22 10:37:06] [Rank 0] step:1841/10000 train_time:165356ms step_avg:89.82ms +[2025-08-22 10:37:06] [Rank 0] step:1841/10000 train_time:165356ms step_avg:89.82ms +[2025-08-22 10:37:08] [Rank 0] step:1861/10000 train_time:167162ms step_avg:89.82ms +[2025-08-22 10:37:08] [Rank 0] step:1861/10000 train_time:167162ms step_avg:89.82ms +[2025-08-22 10:37:10] [Rank 0] step:1881/10000 train_time:168970ms step_avg:89.83ms +[2025-08-22 10:37:10] [Rank 0] step:1881/10000 train_time:168970ms step_avg:89.83ms +[2025-08-22 10:37:12] [Rank 0] step:1901/10000 train_time:170777ms step_avg:89.84ms +[2025-08-22 10:37:12] [Rank 0] step:1901/10000 train_time:170777ms step_avg:89.84ms +[2025-08-22 10:37:14] [Rank 0] step:1921/10000 train_time:172588ms step_avg:89.84ms +[2025-08-22 10:37:14] [Rank 0] step:1921/10000 train_time:172588ms step_avg:89.84ms +[2025-08-22 10:37:15] [Rank 0] step:1941/10000 train_time:174395ms step_avg:89.85ms +[2025-08-22 10:37:15] [Rank 0] step:1941/10000 train_time:174395ms step_avg:89.85ms +[2025-08-22 10:37:17] [Rank 0] step:1961/10000 train_time:176205ms step_avg:89.85ms +[2025-08-22 10:37:17] [Rank 0] step:1961/10000 train_time:176205ms step_avg:89.85ms +[2025-08-22 10:37:19] [Rank 0] step:1981/10000 train_time:178016ms step_avg:89.86ms +[2025-08-22 10:37:19] [Rank 0] step:1981/10000 train_time:178016ms step_avg:89.86ms +[2025-08-22 10:37:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:37:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:37:34] [Rank 0] PRINT: step:2000/10000 val_loss:4.5496 svd_entropy: attn_qk:H=0.4740,top10E=0.70,eRank=36.1,q75/q25=136.38 attn_vo:H=0.4707,top10E=0.70,eRank=50.8,q75/q25=64.08 mlp_w1:H=0.6506,top10E=0.44,eRank=106.9,q75/q25=17.82 mlp_w2:H=0.6674,top10E=0.40,eRank=127.1,q75/q25=19.65 vo_prod:H=0.3392,top10E=0.85,eRank=17.4,q75/q25=2636.94 train_time:179840ms step_avg:89.92ms +[2025-08-22 10:37:34] [Rank 0] PRINT: step:2000/10000 val_loss:4.5496 svd_entropy: attn_qk:H=0.4740,top10E=0.70,eRank=36.1,q75/q25=136.38 attn_vo:H=0.4707,top10E=0.70,eRank=50.8,q75/q25=64.08 mlp_w1:H=0.6506,top10E=0.44,eRank=106.9,q75/q25=17.82 mlp_w2:H=0.6674,top10E=0.40,eRank=127.1,q75/q25=19.65 vo_prod:H=0.3392,top10E=0.85,eRank=17.4,q75/q25=2636.94 train_time:179840ms step_avg:89.92ms +[2025-08-22 10:37:34] [Rank 0] step:2001/10000 train_time:179858ms step_avg:89.88ms +[2025-08-22 10:37:34] [Rank 0] step:2001/10000 train_time:179858ms step_avg:89.88ms +[2025-08-22 10:37:36] [Rank 0] step:2021/10000 train_time:181716ms step_avg:89.91ms +[2025-08-22 10:37:36] [Rank 0] step:2021/10000 train_time:181716ms step_avg:89.91ms +[2025-08-22 10:37:39] [Rank 0] step:2041/10000 train_time:184241ms step_avg:90.27ms +[2025-08-22 10:37:39] [Rank 0] step:2041/10000 train_time:184241ms step_avg:90.27ms +[2025-08-22 10:37:41] [Rank 0] step:2061/10000 train_time:186047ms step_avg:90.27ms +[2025-08-22 10:37:41] [Rank 0] step:2061/10000 train_time:186047ms step_avg:90.27ms +[2025-08-22 10:37:42] [Rank 0] step:2081/10000 train_time:187852ms step_avg:90.27ms +[2025-08-22 10:37:42] [Rank 0] step:2081/10000 train_time:187852ms step_avg:90.27ms +[2025-08-22 10:37:44] [Rank 0] step:2101/10000 train_time:189658ms step_avg:90.27ms +[2025-08-22 10:37:44] [Rank 0] step:2101/10000 train_time:189658ms step_avg:90.27ms +[2025-08-22 10:37:46] [Rank 0] step:2121/10000 train_time:191463ms step_avg:90.27ms +[2025-08-22 10:37:46] [Rank 0] step:2121/10000 train_time:191463ms step_avg:90.27ms +[2025-08-22 10:37:48] [Rank 0] step:2141/10000 train_time:193269ms step_avg:90.27ms +[2025-08-22 10:37:48] [Rank 0] step:2141/10000 train_time:193269ms step_avg:90.27ms +[2025-08-22 10:37:50] [Rank 0] step:2161/10000 train_time:195077ms step_avg:90.27ms +[2025-08-22 10:37:50] [Rank 0] step:2161/10000 train_time:195077ms step_avg:90.27ms +[2025-08-22 10:37:51] [Rank 0] step:2181/10000 train_time:196884ms step_avg:90.27ms +[2025-08-22 10:37:51] [Rank 0] step:2181/10000 train_time:196884ms step_avg:90.27ms +[2025-08-22 10:37:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:37:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:38:07] [Rank 0] PRINT: step:2200/10000 val_loss:4.4844 svd_entropy: attn_qk:H=0.4822,top10E=0.69,eRank=37.8,q75/q25=140.18 attn_vo:H=0.4834,top10E=0.68,eRank=54.3,q75/q25=58.78 mlp_w1:H=0.6508,top10E=0.43,eRank=109.1,q75/q25=20.07 mlp_w2:H=0.6674,top10E=0.39,eRank=128.2,q75/q25=20.42 vo_prod:H=0.3511,top10E=0.84,eRank=18.7,q75/q25=2977.10 train_time:198705ms step_avg:90.32ms +[2025-08-22 10:38:07] [Rank 0] PRINT: step:2200/10000 val_loss:4.4844 svd_entropy: attn_qk:H=0.4822,top10E=0.69,eRank=37.8,q75/q25=140.18 attn_vo:H=0.4834,top10E=0.68,eRank=54.3,q75/q25=58.78 mlp_w1:H=0.6508,top10E=0.43,eRank=109.1,q75/q25=20.07 mlp_w2:H=0.6674,top10E=0.39,eRank=128.2,q75/q25=20.42 vo_prod:H=0.3511,top10E=0.84,eRank=18.7,q75/q25=2977.10 train_time:198705ms step_avg:90.32ms +[2025-08-22 10:38:07] [Rank 0] step:2201/10000 train_time:198722ms step_avg:90.29ms +[2025-08-22 10:38:07] [Rank 0] step:2201/10000 train_time:198722ms step_avg:90.29ms +[2025-08-22 10:38:09] [Rank 0] step:2221/10000 train_time:200500ms step_avg:90.27ms +[2025-08-22 10:38:09] [Rank 0] step:2221/10000 train_time:200500ms step_avg:90.27ms +[2025-08-22 10:38:10] [Rank 0] step:2241/10000 train_time:202336ms step_avg:90.29ms +[2025-08-22 10:38:10] [Rank 0] step:2241/10000 train_time:202336ms step_avg:90.29ms +[2025-08-22 10:38:12] [Rank 0] step:2261/10000 train_time:204182ms step_avg:90.31ms +[2025-08-22 10:38:12] [Rank 0] step:2261/10000 train_time:204182ms step_avg:90.31ms +[2025-08-22 10:38:14] [Rank 0] step:2281/10000 train_time:206028ms step_avg:90.32ms +[2025-08-22 10:38:14] [Rank 0] step:2281/10000 train_time:206028ms step_avg:90.32ms +[2025-08-22 10:38:16] [Rank 0] step:2301/10000 train_time:207874ms step_avg:90.34ms +[2025-08-22 10:38:16] [Rank 0] step:2301/10000 train_time:207874ms step_avg:90.34ms +[2025-08-22 10:38:18] [Rank 0] step:2321/10000 train_time:209723ms step_avg:90.36ms +[2025-08-22 10:38:18] [Rank 0] step:2321/10000 train_time:209723ms step_avg:90.36ms +[2025-08-22 10:38:20] [Rank 0] step:2341/10000 train_time:211574ms step_avg:90.38ms +[2025-08-22 10:38:20] [Rank 0] step:2341/10000 train_time:211574ms step_avg:90.38ms +[2025-08-22 10:38:22] [Rank 0] step:2361/10000 train_time:213467ms step_avg:90.41ms +[2025-08-22 10:38:22] [Rank 0] step:2361/10000 train_time:213467ms step_avg:90.41ms +[2025-08-22 10:38:23] [Rank 0] step:2381/10000 train_time:215319ms step_avg:90.43ms +[2025-08-22 10:38:23] [Rank 0] step:2381/10000 train_time:215319ms step_avg:90.43ms +[2025-08-22 10:38:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:38:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:38:39] [Rank 0] PRINT: step:2400/10000 val_loss:4.4052 svd_entropy: attn_qk:H=0.4881,top10E=0.68,eRank=39.3,q75/q25=144.74 attn_vo:H=0.4923,top10E=0.67,eRank=57.0,q75/q25=55.43 mlp_w1:H=0.6531,top10E=0.41,eRank=111.7,q75/q25=22.03 mlp_w2:H=0.6691,top10E=0.38,eRank=129.9,q75/q25=20.82 vo_prod:H=0.3567,top10E=0.83,eRank=19.6,q75/q25=2888.08 train_time:217185ms step_avg:90.49ms +[2025-08-22 10:38:39] [Rank 0] PRINT: step:2400/10000 val_loss:4.4052 svd_entropy: attn_qk:H=0.4881,top10E=0.68,eRank=39.3,q75/q25=144.74 attn_vo:H=0.4923,top10E=0.67,eRank=57.0,q75/q25=55.43 mlp_w1:H=0.6531,top10E=0.41,eRank=111.7,q75/q25=22.03 mlp_w2:H=0.6691,top10E=0.38,eRank=129.9,q75/q25=20.82 vo_prod:H=0.3567,top10E=0.83,eRank=19.6,q75/q25=2888.08 train_time:217185ms step_avg:90.49ms +[2025-08-22 10:38:39] [Rank 0] step:2401/10000 train_time:217202ms step_avg:90.46ms +[2025-08-22 10:38:39] [Rank 0] step:2401/10000 train_time:217202ms step_avg:90.46ms +[2025-08-22 10:38:41] [Rank 0] step:2421/10000 train_time:219126ms step_avg:90.51ms +[2025-08-22 10:38:41] [Rank 0] step:2421/10000 train_time:219126ms step_avg:90.51ms +[2025-08-22 10:38:43] [Rank 0] step:2441/10000 train_time:220975ms step_avg:90.53ms +[2025-08-22 10:38:43] [Rank 0] step:2441/10000 train_time:220975ms step_avg:90.53ms +[2025-08-22 10:38:44] [Rank 0] step:2461/10000 train_time:222827ms step_avg:90.54ms +[2025-08-22 10:38:44] [Rank 0] step:2461/10000 train_time:222827ms step_avg:90.54ms +[2025-08-22 10:38:46] [Rank 0] step:2481/10000 train_time:224680ms step_avg:90.56ms +[2025-08-22 10:38:46] [Rank 0] step:2481/10000 train_time:224680ms step_avg:90.56ms +[2025-08-22 10:38:48] [Rank 0] step:2501/10000 train_time:226535ms step_avg:90.58ms +[2025-08-22 10:38:48] [Rank 0] step:2501/10000 train_time:226535ms step_avg:90.58ms +[2025-08-22 10:38:50] [Rank 0] step:2521/10000 train_time:228390ms step_avg:90.59ms +[2025-08-22 10:38:50] [Rank 0] step:2521/10000 train_time:228390ms step_avg:90.59ms +[2025-08-22 10:38:52] [Rank 0] step:2541/10000 train_time:230244ms step_avg:90.61ms +[2025-08-22 10:38:52] [Rank 0] step:2541/10000 train_time:230244ms step_avg:90.61ms +[2025-08-22 10:38:54] [Rank 0] step:2561/10000 train_time:232102ms step_avg:90.63ms +[2025-08-22 10:38:54] [Rank 0] step:2561/10000 train_time:232102ms step_avg:90.63ms +[2025-08-22 10:38:56] [Rank 0] step:2581/10000 train_time:233958ms step_avg:90.65ms +[2025-08-22 10:38:56] [Rank 0] step:2581/10000 train_time:233958ms step_avg:90.65ms +[2025-08-22 10:38:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:38:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:39:11] [Rank 0] PRINT: step:2600/10000 val_loss:4.3622 svd_entropy: attn_qk:H=0.4950,top10E=0.67,eRank=40.9,q75/q25=148.25 attn_vo:H=0.5028,top10E=0.66,eRank=60.0,q75/q25=54.29 mlp_w1:H=0.6559,top10E=0.40,eRank=114.5,q75/q25=23.87 mlp_w2:H=0.6715,top10E=0.37,eRank=131.9,q75/q25=21.20 vo_prod:H=0.3642,top10E=0.83,eRank=20.6,q75/q25=2733.13 train_time:235827ms step_avg:90.70ms +[2025-08-22 10:39:11] [Rank 0] PRINT: step:2600/10000 val_loss:4.3622 svd_entropy: attn_qk:H=0.4950,top10E=0.67,eRank=40.9,q75/q25=148.25 attn_vo:H=0.5028,top10E=0.66,eRank=60.0,q75/q25=54.29 mlp_w1:H=0.6559,top10E=0.40,eRank=114.5,q75/q25=23.87 mlp_w2:H=0.6715,top10E=0.37,eRank=131.9,q75/q25=21.20 vo_prod:H=0.3642,top10E=0.83,eRank=20.6,q75/q25=2733.13 train_time:235827ms step_avg:90.70ms +[2025-08-22 10:39:11] [Rank 0] step:2601/10000 train_time:235844ms step_avg:90.67ms +[2025-08-22 10:39:11] [Rank 0] step:2601/10000 train_time:235844ms step_avg:90.67ms +[2025-08-22 10:39:13] [Rank 0] step:2621/10000 train_time:237687ms step_avg:90.69ms +[2025-08-22 10:39:13] [Rank 0] step:2621/10000 train_time:237687ms step_avg:90.69ms +[2025-08-22 10:39:15] [Rank 0] step:2641/10000 train_time:239533ms step_avg:90.70ms +[2025-08-22 10:39:15] [Rank 0] step:2641/10000 train_time:239533ms step_avg:90.70ms +[2025-08-22 10:39:16] [Rank 0] step:2661/10000 train_time:241380ms step_avg:90.71ms +[2025-08-22 10:39:16] [Rank 0] step:2661/10000 train_time:241380ms step_avg:90.71ms +[2025-08-22 10:39:18] [Rank 0] step:2681/10000 train_time:243229ms step_avg:90.72ms +[2025-08-22 10:39:18] [Rank 0] step:2681/10000 train_time:243229ms step_avg:90.72ms +[2025-08-22 10:39:20] [Rank 0] step:2701/10000 train_time:245079ms step_avg:90.74ms +[2025-08-22 10:39:20] [Rank 0] step:2701/10000 train_time:245079ms step_avg:90.74ms +[2025-08-22 10:39:22] [Rank 0] step:2721/10000 train_time:246929ms step_avg:90.75ms +[2025-08-22 10:39:22] [Rank 0] step:2721/10000 train_time:246929ms step_avg:90.75ms +[2025-08-22 10:39:24] [Rank 0] step:2741/10000 train_time:248781ms step_avg:90.76ms +[2025-08-22 10:39:24] [Rank 0] step:2741/10000 train_time:248781ms step_avg:90.76ms +[2025-08-22 10:39:26] [Rank 0] step:2761/10000 train_time:250632ms step_avg:90.78ms +[2025-08-22 10:39:26] [Rank 0] step:2761/10000 train_time:250632ms step_avg:90.78ms +[2025-08-22 10:39:28] [Rank 0] step:2781/10000 train_time:252484ms step_avg:90.79ms +[2025-08-22 10:39:28] [Rank 0] step:2781/10000 train_time:252484ms step_avg:90.79ms +[2025-08-22 10:39:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:39:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:39:43] [Rank 0] PRINT: step:2800/10000 val_loss:4.3340 svd_entropy: attn_qk:H=0.5022,top10E=0.66,eRank=42.6,q75/q25=151.95 attn_vo:H=0.5134,top10E=0.64,eRank=63.2,q75/q25=52.30 mlp_w1:H=0.6597,top10E=0.39,eRank=117.4,q75/q25=25.41 mlp_w2:H=0.6748,top10E=0.35,eRank=134.3,q75/q25=21.50 vo_prod:H=0.3727,top10E=0.82,eRank=21.9,q75/q25=2517.34 train_time:254350ms step_avg:90.84ms +[2025-08-22 10:39:43] [Rank 0] PRINT: step:2800/10000 val_loss:4.3340 svd_entropy: attn_qk:H=0.5022,top10E=0.66,eRank=42.6,q75/q25=151.95 attn_vo:H=0.5134,top10E=0.64,eRank=63.2,q75/q25=52.30 mlp_w1:H=0.6597,top10E=0.39,eRank=117.4,q75/q25=25.41 mlp_w2:H=0.6748,top10E=0.35,eRank=134.3,q75/q25=21.50 vo_prod:H=0.3727,top10E=0.82,eRank=21.9,q75/q25=2517.34 train_time:254350ms step_avg:90.84ms +[2025-08-22 10:39:43] [Rank 0] step:2801/10000 train_time:254367ms step_avg:90.81ms +[2025-08-22 10:39:43] [Rank 0] step:2801/10000 train_time:254367ms step_avg:90.81ms +[2025-08-22 10:39:45] [Rank 0] step:2821/10000 train_time:256263ms step_avg:90.84ms +[2025-08-22 10:39:45] [Rank 0] step:2821/10000 train_time:256263ms step_avg:90.84ms +[2025-08-22 10:39:47] [Rank 0] step:2841/10000 train_time:258107ms step_avg:90.85ms +[2025-08-22 10:39:47] [Rank 0] step:2841/10000 train_time:258107ms step_avg:90.85ms +[2025-08-22 10:39:48] [Rank 0] step:2861/10000 train_time:259957ms step_avg:90.86ms +[2025-08-22 10:39:48] [Rank 0] step:2861/10000 train_time:259957ms step_avg:90.86ms +[2025-08-22 10:39:50] [Rank 0] step:2881/10000 train_time:261804ms step_avg:90.87ms +[2025-08-22 10:39:50] [Rank 0] step:2881/10000 train_time:261804ms step_avg:90.87ms +[2025-08-22 10:39:52] [Rank 0] step:2901/10000 train_time:263654ms step_avg:90.88ms +[2025-08-22 10:39:52] [Rank 0] step:2901/10000 train_time:263654ms step_avg:90.88ms +[2025-08-22 10:39:54] [Rank 0] step:2921/10000 train_time:265505ms step_avg:90.90ms +[2025-08-22 10:39:54] [Rank 0] step:2921/10000 train_time:265505ms step_avg:90.90ms +[2025-08-22 10:39:56] [Rank 0] step:2941/10000 train_time:267356ms step_avg:90.91ms +[2025-08-22 10:39:56] [Rank 0] step:2941/10000 train_time:267356ms step_avg:90.91ms +[2025-08-22 10:39:58] [Rank 0] step:2961/10000 train_time:269209ms step_avg:90.92ms +[2025-08-22 10:39:58] [Rank 0] step:2961/10000 train_time:269209ms step_avg:90.92ms +[2025-08-22 10:40:00] [Rank 0] step:2981/10000 train_time:271069ms step_avg:90.93ms +[2025-08-22 10:40:00] [Rank 0] step:2981/10000 train_time:271069ms step_avg:90.93ms +[2025-08-22 10:40:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:40:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:40:15] [Rank 0] PRINT: step:3000/10000 val_loss:4.2930 svd_entropy: attn_qk:H=0.5093,top10E=0.65,eRank=44.2,q75/q25=153.13 attn_vo:H=0.5229,top10E=0.63,eRank=66.2,q75/q25=51.17 mlp_w1:H=0.6632,top10E=0.38,eRank=120.3,q75/q25=26.93 mlp_w2:H=0.6781,top10E=0.34,eRank=136.8,q75/q25=21.55 vo_prod:H=0.3777,top10E=0.81,eRank=22.9,q75/q25=2213.31 train_time:272944ms step_avg:90.98ms +[2025-08-22 10:40:15] [Rank 0] PRINT: step:3000/10000 val_loss:4.2930 svd_entropy: attn_qk:H=0.5093,top10E=0.65,eRank=44.2,q75/q25=153.13 attn_vo:H=0.5229,top10E=0.63,eRank=66.2,q75/q25=51.17 mlp_w1:H=0.6632,top10E=0.38,eRank=120.3,q75/q25=26.93 mlp_w2:H=0.6781,top10E=0.34,eRank=136.8,q75/q25=21.55 vo_prod:H=0.3777,top10E=0.81,eRank=22.9,q75/q25=2213.31 train_time:272944ms step_avg:90.98ms +[2025-08-22 10:40:15] [Rank 0] step:3001/10000 train_time:272961ms step_avg:90.96ms +[2025-08-22 10:40:15] [Rank 0] step:3001/10000 train_time:272961ms step_avg:90.96ms +[2025-08-22 10:40:17] [Rank 0] step:3021/10000 train_time:274812ms step_avg:90.97ms +[2025-08-22 10:40:17] [Rank 0] step:3021/10000 train_time:274812ms step_avg:90.97ms +[2025-08-22 10:40:19] [Rank 0] step:3041/10000 train_time:276666ms step_avg:90.98ms +[2025-08-22 10:40:19] [Rank 0] step:3041/10000 train_time:276666ms step_avg:90.98ms +[2025-08-22 10:40:20] [Rank 0] step:3061/10000 train_time:278522ms step_avg:90.99ms +[2025-08-22 10:40:20] [Rank 0] step:3061/10000 train_time:278522ms step_avg:90.99ms +[2025-08-22 10:40:22] [Rank 0] step:3081/10000 train_time:280379ms step_avg:91.00ms +[2025-08-22 10:40:22] [Rank 0] step:3081/10000 train_time:280379ms step_avg:91.00ms +[2025-08-22 10:40:24] [Rank 0] step:3101/10000 train_time:282238ms step_avg:91.02ms +[2025-08-22 10:40:24] [Rank 0] step:3101/10000 train_time:282238ms step_avg:91.02ms +[2025-08-22 10:40:26] [Rank 0] step:3121/10000 train_time:284098ms step_avg:91.03ms +[2025-08-22 10:40:26] [Rank 0] step:3121/10000 train_time:284098ms step_avg:91.03ms +[2025-08-22 10:40:28] [Rank 0] step:3141/10000 train_time:285959ms step_avg:91.04ms +[2025-08-22 10:40:28] [Rank 0] step:3141/10000 train_time:285959ms step_avg:91.04ms +[2025-08-22 10:40:30] [Rank 0] step:3161/10000 train_time:287822ms step_avg:91.05ms +[2025-08-22 10:40:30] [Rank 0] step:3161/10000 train_time:287822ms step_avg:91.05ms +[2025-08-22 10:40:32] [Rank 0] step:3181/10000 train_time:289684ms step_avg:91.07ms +[2025-08-22 10:40:32] [Rank 0] step:3181/10000 train_time:289684ms step_avg:91.07ms +[2025-08-22 10:40:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:40:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:40:47] [Rank 0] PRINT: step:3200/10000 val_loss:4.2617 svd_entropy: attn_qk:H=0.5159,top10E=0.64,eRank=45.7,q75/q25=154.71 attn_vo:H=0.5315,top10E=0.62,eRank=69.0,q75/q25=50.29 mlp_w1:H=0.6668,top10E=0.36,eRank=123.1,q75/q25=28.22 mlp_w2:H=0.6813,top10E=0.34,eRank=139.2,q75/q25=21.88 vo_prod:H=0.3868,top10E=0.80,eRank=24.1,q75/q25=1975.43 train_time:291562ms step_avg:91.11ms +[2025-08-22 10:40:47] [Rank 0] PRINT: step:3200/10000 val_loss:4.2617 svd_entropy: attn_qk:H=0.5159,top10E=0.64,eRank=45.7,q75/q25=154.71 attn_vo:H=0.5315,top10E=0.62,eRank=69.0,q75/q25=50.29 mlp_w1:H=0.6668,top10E=0.36,eRank=123.1,q75/q25=28.22 mlp_w2:H=0.6813,top10E=0.34,eRank=139.2,q75/q25=21.88 vo_prod:H=0.3868,top10E=0.80,eRank=24.1,q75/q25=1975.43 train_time:291562ms step_avg:91.11ms +[2025-08-22 10:40:47] [Rank 0] step:3201/10000 train_time:291580ms step_avg:91.09ms +[2025-08-22 10:40:47] [Rank 0] step:3201/10000 train_time:291580ms step_avg:91.09ms +[2025-08-22 10:40:49] [Rank 0] step:3221/10000 train_time:293431ms step_avg:91.10ms +[2025-08-22 10:40:49] [Rank 0] step:3221/10000 train_time:293431ms step_avg:91.10ms +[2025-08-22 10:40:51] [Rank 0] step:3241/10000 train_time:295289ms step_avg:91.11ms +[2025-08-22 10:40:51] [Rank 0] step:3241/10000 train_time:295289ms step_avg:91.11ms +[2025-08-22 10:40:53] [Rank 0] step:3261/10000 train_time:297149ms step_avg:91.12ms +[2025-08-22 10:40:53] [Rank 0] step:3261/10000 train_time:297149ms step_avg:91.12ms +[2025-08-22 10:40:55] [Rank 0] step:3281/10000 train_time:299012ms step_avg:91.13ms +[2025-08-22 10:40:55] [Rank 0] step:3281/10000 train_time:299012ms step_avg:91.13ms +[2025-08-22 10:40:57] [Rank 0] step:3301/10000 train_time:300873ms step_avg:91.15ms +[2025-08-22 10:40:57] [Rank 0] step:3301/10000 train_time:300873ms step_avg:91.15ms +[2025-08-22 10:40:58] [Rank 0] step:3321/10000 train_time:302737ms step_avg:91.16ms +[2025-08-22 10:40:58] [Rank 0] step:3321/10000 train_time:302737ms step_avg:91.16ms +[2025-08-22 10:41:00] [Rank 0] step:3341/10000 train_time:304601ms step_avg:91.17ms +[2025-08-22 10:41:00] [Rank 0] step:3341/10000 train_time:304601ms step_avg:91.17ms +[2025-08-22 10:41:02] [Rank 0] step:3361/10000 train_time:306464ms step_avg:91.18ms +[2025-08-22 10:41:02] [Rank 0] step:3361/10000 train_time:306464ms step_avg:91.18ms +[2025-08-22 10:41:04] [Rank 0] step:3381/10000 train_time:308329ms step_avg:91.19ms +[2025-08-22 10:41:04] [Rank 0] step:3381/10000 train_time:308329ms step_avg:91.19ms +[2025-08-22 10:41:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:41:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:41:19] [Rank 0] PRINT: step:3400/10000 val_loss:4.2342 svd_entropy: attn_qk:H=0.5229,top10E=0.63,eRank=47.4,q75/q25=155.64 attn_vo:H=0.5402,top10E=0.61,eRank=71.9,q75/q25=49.77 mlp_w1:H=0.6713,top10E=0.35,eRank=126.3,q75/q25=28.87 mlp_w2:H=0.6850,top10E=0.33,eRank=141.8,q75/q25=22.04 vo_prod:H=0.3948,top10E=0.79,eRank=25.1,q75/q25=1857.88 train_time:310208ms step_avg:91.24ms +[2025-08-22 10:41:19] [Rank 0] PRINT: step:3400/10000 val_loss:4.2342 svd_entropy: attn_qk:H=0.5229,top10E=0.63,eRank=47.4,q75/q25=155.64 attn_vo:H=0.5402,top10E=0.61,eRank=71.9,q75/q25=49.77 mlp_w1:H=0.6713,top10E=0.35,eRank=126.3,q75/q25=28.87 mlp_w2:H=0.6850,top10E=0.33,eRank=141.8,q75/q25=22.04 vo_prod:H=0.3948,top10E=0.79,eRank=25.1,q75/q25=1857.88 train_time:310208ms step_avg:91.24ms +[2025-08-22 10:41:20] [Rank 0] step:3401/10000 train_time:310226ms step_avg:91.22ms +[2025-08-22 10:41:20] [Rank 0] step:3401/10000 train_time:310226ms step_avg:91.22ms +[2025-08-22 10:41:21] [Rank 0] step:3421/10000 train_time:312064ms step_avg:91.22ms +[2025-08-22 10:41:21] [Rank 0] step:3421/10000 train_time:312064ms step_avg:91.22ms +[2025-08-22 10:41:23] [Rank 0] step:3441/10000 train_time:313917ms step_avg:91.23ms +[2025-08-22 10:41:23] [Rank 0] step:3441/10000 train_time:313917ms step_avg:91.23ms +[2025-08-22 10:41:25] [Rank 0] step:3461/10000 train_time:315772ms step_avg:91.24ms +[2025-08-22 10:41:25] [Rank 0] step:3461/10000 train_time:315772ms step_avg:91.24ms +[2025-08-22 10:41:27] [Rank 0] step:3481/10000 train_time:317625ms step_avg:91.25ms +[2025-08-22 10:41:27] [Rank 0] step:3481/10000 train_time:317625ms step_avg:91.25ms +[2025-08-22 10:41:29] [Rank 0] step:3501/10000 train_time:319484ms step_avg:91.26ms +[2025-08-22 10:41:29] [Rank 0] step:3501/10000 train_time:319484ms step_avg:91.26ms +[2025-08-22 10:41:31] [Rank 0] step:3521/10000 train_time:321345ms step_avg:91.27ms +[2025-08-22 10:41:31] [Rank 0] step:3521/10000 train_time:321345ms step_avg:91.27ms +[2025-08-22 10:41:33] [Rank 0] step:3541/10000 train_time:323204ms step_avg:91.27ms +[2025-08-22 10:41:33] [Rank 0] step:3541/10000 train_time:323204ms step_avg:91.27ms +[2025-08-22 10:41:34] [Rank 0] step:3561/10000 train_time:325064ms step_avg:91.28ms +[2025-08-22 10:41:34] [Rank 0] step:3561/10000 train_time:325064ms step_avg:91.28ms +[2025-08-22 10:41:36] [Rank 0] step:3581/10000 train_time:326922ms step_avg:91.29ms +[2025-08-22 10:41:36] [Rank 0] step:3581/10000 train_time:326922ms step_avg:91.29ms +[2025-08-22 10:41:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:41:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:41:52] [Rank 0] PRINT: step:3600/10000 val_loss:4.2243 svd_entropy: attn_qk:H=0.5293,top10E=0.62,eRank=48.9,q75/q25=154.77 attn_vo:H=0.5477,top10E=0.60,eRank=74.5,q75/q25=49.19 mlp_w1:H=0.6756,top10E=0.34,eRank=129.2,q75/q25=29.33 mlp_w2:H=0.6888,top10E=0.32,eRank=144.3,q75/q25=22.46 vo_prod:H=0.3996,top10E=0.79,eRank=25.7,q75/q25=1669.19 train_time:328797ms step_avg:91.33ms +[2025-08-22 10:41:52] [Rank 0] PRINT: step:3600/10000 val_loss:4.2243 svd_entropy: attn_qk:H=0.5293,top10E=0.62,eRank=48.9,q75/q25=154.77 attn_vo:H=0.5477,top10E=0.60,eRank=74.5,q75/q25=49.19 mlp_w1:H=0.6756,top10E=0.34,eRank=129.2,q75/q25=29.33 mlp_w2:H=0.6888,top10E=0.32,eRank=144.3,q75/q25=22.46 vo_prod:H=0.3996,top10E=0.79,eRank=25.7,q75/q25=1669.19 train_time:328797ms step_avg:91.33ms +[2025-08-22 10:41:52] [Rank 0] step:3601/10000 train_time:328815ms step_avg:91.31ms +[2025-08-22 10:41:52] [Rank 0] step:3601/10000 train_time:328815ms step_avg:91.31ms +[2025-08-22 10:41:54] [Rank 0] step:3621/10000 train_time:330652ms step_avg:91.32ms +[2025-08-22 10:41:54] [Rank 0] step:3621/10000 train_time:330652ms step_avg:91.32ms +[2025-08-22 10:41:56] [Rank 0] step:3641/10000 train_time:332504ms step_avg:91.32ms +[2025-08-22 10:41:56] [Rank 0] step:3641/10000 train_time:332504ms step_avg:91.32ms +[2025-08-22 10:41:57] [Rank 0] step:3661/10000 train_time:334357ms step_avg:91.33ms +[2025-08-22 10:41:57] [Rank 0] step:3661/10000 train_time:334357ms step_avg:91.33ms +[2025-08-22 10:41:59] [Rank 0] step:3681/10000 train_time:336213ms step_avg:91.34ms +[2025-08-22 10:41:59] [Rank 0] step:3681/10000 train_time:336213ms step_avg:91.34ms +[2025-08-22 10:42:01] [Rank 0] step:3701/10000 train_time:338070ms step_avg:91.35ms +[2025-08-22 10:42:01] [Rank 0] step:3701/10000 train_time:338070ms step_avg:91.35ms +[2025-08-22 10:42:03] [Rank 0] step:3721/10000 train_time:339956ms step_avg:91.36ms +[2025-08-22 10:42:03] [Rank 0] step:3721/10000 train_time:339956ms step_avg:91.36ms +[2025-08-22 10:42:05] [Rank 0] step:3741/10000 train_time:341849ms step_avg:91.38ms +[2025-08-22 10:42:05] [Rank 0] step:3741/10000 train_time:341849ms step_avg:91.38ms +[2025-08-22 10:42:07] [Rank 0] step:3761/10000 train_time:343744ms step_avg:91.40ms +[2025-08-22 10:42:07] [Rank 0] step:3761/10000 train_time:343744ms step_avg:91.40ms +[2025-08-22 10:42:09] [Rank 0] step:3781/10000 train_time:345641ms step_avg:91.42ms +[2025-08-22 10:42:09] [Rank 0] step:3781/10000 train_time:345641ms step_avg:91.42ms +[2025-08-22 10:42:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:42:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:42:24] [Rank 0] PRINT: step:3800/10000 val_loss:4.1808 svd_entropy: attn_qk:H=0.5358,top10E=0.61,eRank=50.5,q75/q25=155.90 attn_vo:H=0.5552,top10E=0.59,eRank=77.2,q75/q25=48.73 mlp_w1:H=0.6802,top10E=0.33,eRank=132.2,q75/q25=29.92 mlp_w2:H=0.6928,top10E=0.31,eRank=146.8,q75/q25=22.91 vo_prod:H=0.4064,top10E=0.78,eRank=26.7,q75/q25=1537.22 train_time:347551ms step_avg:91.46ms +[2025-08-22 10:42:24] [Rank 0] PRINT: step:3800/10000 val_loss:4.1808 svd_entropy: attn_qk:H=0.5358,top10E=0.61,eRank=50.5,q75/q25=155.90 attn_vo:H=0.5552,top10E=0.59,eRank=77.2,q75/q25=48.73 mlp_w1:H=0.6802,top10E=0.33,eRank=132.2,q75/q25=29.92 mlp_w2:H=0.6928,top10E=0.31,eRank=146.8,q75/q25=22.91 vo_prod:H=0.4064,top10E=0.78,eRank=26.7,q75/q25=1537.22 train_time:347551ms step_avg:91.46ms +[2025-08-22 10:42:24] [Rank 0] step:3801/10000 train_time:347568ms step_avg:91.44ms +[2025-08-22 10:42:24] [Rank 0] step:3801/10000 train_time:347568ms step_avg:91.44ms +[2025-08-22 10:42:26] [Rank 0] step:3821/10000 train_time:349450ms step_avg:91.46ms +[2025-08-22 10:42:26] [Rank 0] step:3821/10000 train_time:349450ms step_avg:91.46ms +[2025-08-22 10:42:28] [Rank 0] step:3841/10000 train_time:351348ms step_avg:91.47ms +[2025-08-22 10:42:28] [Rank 0] step:3841/10000 train_time:351348ms step_avg:91.47ms +[2025-08-22 10:42:30] [Rank 0] step:3861/10000 train_time:353245ms step_avg:91.49ms +[2025-08-22 10:42:30] [Rank 0] step:3861/10000 train_time:353245ms step_avg:91.49ms +[2025-08-22 10:42:32] [Rank 0] step:3881/10000 train_time:355140ms step_avg:91.51ms +[2025-08-22 10:42:32] [Rank 0] step:3881/10000 train_time:355140ms step_avg:91.51ms +[2025-08-22 10:42:34] [Rank 0] step:3901/10000 train_time:357036ms step_avg:91.52ms +[2025-08-22 10:42:34] [Rank 0] step:3901/10000 train_time:357036ms step_avg:91.52ms +[2025-08-22 10:42:36] [Rank 0] step:3921/10000 train_time:358934ms step_avg:91.54ms +[2025-08-22 10:42:36] [Rank 0] step:3921/10000 train_time:358934ms step_avg:91.54ms +[2025-08-22 10:42:37] [Rank 0] step:3941/10000 train_time:360833ms step_avg:91.56ms +[2025-08-22 10:42:37] [Rank 0] step:3941/10000 train_time:360833ms step_avg:91.56ms +[2025-08-22 10:42:39] [Rank 0] step:3961/10000 train_time:362730ms step_avg:91.58ms +[2025-08-22 10:42:39] [Rank 0] step:3961/10000 train_time:362730ms step_avg:91.58ms +[2025-08-22 10:42:41] [Rank 0] step:3981/10000 train_time:364629ms step_avg:91.59ms +[2025-08-22 10:42:41] [Rank 0] step:3981/10000 train_time:364629ms step_avg:91.59ms +[2025-08-22 10:42:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:42:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:42:57] [Rank 0] PRINT: step:4000/10000 val_loss:4.1588 svd_entropy: attn_qk:H=0.5419,top10E=0.60,eRank=52.1,q75/q25=154.52 attn_vo:H=0.5613,top10E=0.58,eRank=79.7,q75/q25=48.06 mlp_w1:H=0.6845,top10E=0.32,eRank=135.1,q75/q25=30.29 mlp_w2:H=0.6965,top10E=0.30,eRank=149.2,q75/q25=23.40 vo_prod:H=0.4116,top10E=0.78,eRank=27.5,q75/q25=1423.74 train_time:366542ms step_avg:91.64ms +[2025-08-22 10:42:57] [Rank 0] PRINT: step:4000/10000 val_loss:4.1588 svd_entropy: attn_qk:H=0.5419,top10E=0.60,eRank=52.1,q75/q25=154.52 attn_vo:H=0.5613,top10E=0.58,eRank=79.7,q75/q25=48.06 mlp_w1:H=0.6845,top10E=0.32,eRank=135.1,q75/q25=30.29 mlp_w2:H=0.6965,top10E=0.30,eRank=149.2,q75/q25=23.40 vo_prod:H=0.4116,top10E=0.78,eRank=27.5,q75/q25=1423.74 train_time:366542ms step_avg:91.64ms +[2025-08-22 10:42:57] [Rank 0] step:4001/10000 train_time:366561ms step_avg:91.62ms +[2025-08-22 10:42:57] [Rank 0] step:4001/10000 train_time:366561ms step_avg:91.62ms +[2025-08-22 10:42:59] [Rank 0] step:4021/10000 train_time:368436ms step_avg:91.63ms +[2025-08-22 10:42:59] [Rank 0] step:4021/10000 train_time:368436ms step_avg:91.63ms +[2025-08-22 10:43:00] [Rank 0] step:4041/10000 train_time:370327ms step_avg:91.64ms +[2025-08-22 10:43:00] [Rank 0] step:4041/10000 train_time:370327ms step_avg:91.64ms +[2025-08-22 10:43:02] [Rank 0] step:4061/10000 train_time:372217ms step_avg:91.66ms +[2025-08-22 10:43:02] [Rank 0] step:4061/10000 train_time:372217ms step_avg:91.66ms +[2025-08-22 10:43:05] [Rank 0] step:4081/10000 train_time:374756ms step_avg:91.83ms +[2025-08-22 10:43:05] [Rank 0] step:4081/10000 train_time:374756ms step_avg:91.83ms +[2025-08-22 10:43:07] [Rank 0] step:4101/10000 train_time:376649ms step_avg:91.84ms +[2025-08-22 10:43:07] [Rank 0] step:4101/10000 train_time:376649ms step_avg:91.84ms +[2025-08-22 10:43:09] [Rank 0] step:4121/10000 train_time:378539ms step_avg:91.86ms +[2025-08-22 10:43:09] [Rank 0] step:4121/10000 train_time:378539ms step_avg:91.86ms +[2025-08-22 10:43:11] [Rank 0] step:4141/10000 train_time:380433ms step_avg:91.87ms +[2025-08-22 10:43:11] [Rank 0] step:4141/10000 train_time:380433ms step_avg:91.87ms +[2025-08-22 10:43:12] [Rank 0] step:4161/10000 train_time:382325ms step_avg:91.88ms +[2025-08-22 10:43:12] [Rank 0] step:4161/10000 train_time:382325ms step_avg:91.88ms +[2025-08-22 10:43:14] [Rank 0] step:4181/10000 train_time:384218ms step_avg:91.90ms +[2025-08-22 10:43:14] [Rank 0] step:4181/10000 train_time:384218ms step_avg:91.90ms +[2025-08-22 10:43:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:43:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:43:30] [Rank 0] PRINT: step:4200/10000 val_loss:4.1456 svd_entropy: attn_qk:H=0.5475,top10E=0.59,eRank=53.6,q75/q25=153.05 attn_vo:H=0.5685,top10E=0.56,eRank=82.6,q75/q25=47.51 mlp_w1:H=0.6885,top10E=0.31,eRank=137.9,q75/q25=30.49 mlp_w2:H=0.6999,top10E=0.29,eRank=151.5,q75/q25=24.02 vo_prod:H=0.4188,top10E=0.77,eRank=28.6,q75/q25=1333.10 train_time:386127ms step_avg:91.93ms +[2025-08-22 10:43:30] [Rank 0] PRINT: step:4200/10000 val_loss:4.1456 svd_entropy: attn_qk:H=0.5475,top10E=0.59,eRank=53.6,q75/q25=153.05 attn_vo:H=0.5685,top10E=0.56,eRank=82.6,q75/q25=47.51 mlp_w1:H=0.6885,top10E=0.31,eRank=137.9,q75/q25=30.49 mlp_w2:H=0.6999,top10E=0.29,eRank=151.5,q75/q25=24.02 vo_prod:H=0.4188,top10E=0.77,eRank=28.6,q75/q25=1333.10 train_time:386127ms step_avg:91.93ms +[2025-08-22 10:43:30] [Rank 0] step:4201/10000 train_time:386144ms step_avg:91.92ms +[2025-08-22 10:43:30] [Rank 0] step:4201/10000 train_time:386144ms step_avg:91.92ms +[2025-08-22 10:43:32] [Rank 0] step:4221/10000 train_time:388019ms step_avg:91.93ms +[2025-08-22 10:43:32] [Rank 0] step:4221/10000 train_time:388019ms step_avg:91.93ms +[2025-08-22 10:43:34] [Rank 0] step:4241/10000 train_time:389908ms step_avg:91.94ms +[2025-08-22 10:43:34] [Rank 0] step:4241/10000 train_time:389908ms step_avg:91.94ms +[2025-08-22 10:43:35] [Rank 0] step:4261/10000 train_time:391797ms step_avg:91.95ms +[2025-08-22 10:43:35] [Rank 0] step:4261/10000 train_time:391797ms step_avg:91.95ms +[2025-08-22 10:43:37] [Rank 0] step:4281/10000 train_time:393688ms step_avg:91.96ms +[2025-08-22 10:43:37] [Rank 0] step:4281/10000 train_time:393688ms step_avg:91.96ms +[2025-08-22 10:43:39] [Rank 0] step:4301/10000 train_time:395577ms step_avg:91.97ms +[2025-08-22 10:43:39] [Rank 0] step:4301/10000 train_time:395577ms step_avg:91.97ms +[2025-08-22 10:43:41] [Rank 0] step:4321/10000 train_time:397469ms step_avg:91.99ms +[2025-08-22 10:43:41] [Rank 0] step:4321/10000 train_time:397469ms step_avg:91.99ms +[2025-08-22 10:43:43] [Rank 0] step:4341/10000 train_time:399358ms step_avg:92.00ms +[2025-08-22 10:43:43] [Rank 0] step:4341/10000 train_time:399358ms step_avg:92.00ms +[2025-08-22 10:43:45] [Rank 0] step:4361/10000 train_time:401249ms step_avg:92.01ms +[2025-08-22 10:43:45] [Rank 0] step:4361/10000 train_time:401249ms step_avg:92.01ms +[2025-08-22 10:43:47] [Rank 0] step:4381/10000 train_time:403142ms step_avg:92.02ms +[2025-08-22 10:43:47] [Rank 0] step:4381/10000 train_time:403142ms step_avg:92.02ms +[2025-08-22 10:43:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:43:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:44:02] [Rank 0] PRINT: step:4400/10000 val_loss:4.1263 svd_entropy: attn_qk:H=0.5532,top10E=0.58,eRank=55.1,q75/q25=149.45 attn_vo:H=0.5743,top10E=0.55,eRank=85.0,q75/q25=48.00 mlp_w1:H=0.6924,top10E=0.30,eRank=140.6,q75/q25=31.02 mlp_w2:H=0.7035,top10E=0.29,eRank=153.9,q75/q25=24.47 vo_prod:H=0.4232,top10E=0.76,eRank=29.4,q75/q25=1300.74 train_time:405047ms step_avg:92.06ms +[2025-08-22 10:44:02] [Rank 0] PRINT: step:4400/10000 val_loss:4.1263 svd_entropy: attn_qk:H=0.5532,top10E=0.58,eRank=55.1,q75/q25=149.45 attn_vo:H=0.5743,top10E=0.55,eRank=85.0,q75/q25=48.00 mlp_w1:H=0.6924,top10E=0.30,eRank=140.6,q75/q25=31.02 mlp_w2:H=0.7035,top10E=0.29,eRank=153.9,q75/q25=24.47 vo_prod:H=0.4232,top10E=0.76,eRank=29.4,q75/q25=1300.74 train_time:405047ms step_avg:92.06ms +[2025-08-22 10:44:02] [Rank 0] step:4401/10000 train_time:405064ms step_avg:92.04ms +[2025-08-22 10:44:02] [Rank 0] step:4401/10000 train_time:405064ms step_avg:92.04ms +[2025-08-22 10:44:04] [Rank 0] step:4421/10000 train_time:406946ms step_avg:92.05ms +[2025-08-22 10:44:04] [Rank 0] step:4421/10000 train_time:406946ms step_avg:92.05ms +[2025-08-22 10:44:06] [Rank 0] step:4441/10000 train_time:408830ms step_avg:92.06ms +[2025-08-22 10:44:06] [Rank 0] step:4441/10000 train_time:408830ms step_avg:92.06ms +[2025-08-22 10:44:08] [Rank 0] step:4461/10000 train_time:410723ms step_avg:92.07ms +[2025-08-22 10:44:08] [Rank 0] step:4461/10000 train_time:410723ms step_avg:92.07ms +[2025-08-22 10:44:10] [Rank 0] step:4481/10000 train_time:412619ms step_avg:92.08ms +[2025-08-22 10:44:10] [Rank 0] step:4481/10000 train_time:412619ms step_avg:92.08ms +[2025-08-22 10:44:12] [Rank 0] step:4501/10000 train_time:414514ms step_avg:92.09ms +[2025-08-22 10:44:12] [Rank 0] step:4501/10000 train_time:414514ms step_avg:92.09ms +[2025-08-22 10:44:14] [Rank 0] step:4521/10000 train_time:416412ms step_avg:92.11ms +[2025-08-22 10:44:14] [Rank 0] step:4521/10000 train_time:416412ms step_avg:92.11ms +[2025-08-22 10:44:16] [Rank 0] step:4541/10000 train_time:418311ms step_avg:92.12ms +[2025-08-22 10:44:16] [Rank 0] step:4541/10000 train_time:418311ms step_avg:92.12ms +[2025-08-22 10:44:17] [Rank 0] step:4561/10000 train_time:420211ms step_avg:92.13ms +[2025-08-22 10:44:17] [Rank 0] step:4561/10000 train_time:420211ms step_avg:92.13ms +[2025-08-22 10:44:19] [Rank 0] step:4581/10000 train_time:422115ms step_avg:92.14ms +[2025-08-22 10:44:19] [Rank 0] step:4581/10000 train_time:422115ms step_avg:92.14ms +[2025-08-22 10:44:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:44:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:44:35] [Rank 0] PRINT: step:4600/10000 val_loss:4.1044 svd_entropy: attn_qk:H=0.5582,top10E=0.57,eRank=56.6,q75/q25=147.96 attn_vo:H=0.5802,top10E=0.54,eRank=87.6,q75/q25=47.84 mlp_w1:H=0.6964,top10E=0.29,eRank=143.3,q75/q25=31.94 mlp_w2:H=0.7069,top10E=0.28,eRank=156.1,q75/q25=25.33 vo_prod:H=0.4293,top10E=0.76,eRank=30.3,q75/q25=1240.76 train_time:424030ms step_avg:92.18ms +[2025-08-22 10:44:35] [Rank 0] PRINT: step:4600/10000 val_loss:4.1044 svd_entropy: attn_qk:H=0.5582,top10E=0.57,eRank=56.6,q75/q25=147.96 attn_vo:H=0.5802,top10E=0.54,eRank=87.6,q75/q25=47.84 mlp_w1:H=0.6964,top10E=0.29,eRank=143.3,q75/q25=31.94 mlp_w2:H=0.7069,top10E=0.28,eRank=156.1,q75/q25=25.33 vo_prod:H=0.4293,top10E=0.76,eRank=30.3,q75/q25=1240.76 train_time:424030ms step_avg:92.18ms +[2025-08-22 10:44:35] [Rank 0] step:4601/10000 train_time:424047ms step_avg:92.16ms +[2025-08-22 10:44:35] [Rank 0] step:4601/10000 train_time:424047ms step_avg:92.16ms +[2025-08-22 10:44:37] [Rank 0] step:4621/10000 train_time:425951ms step_avg:92.18ms +[2025-08-22 10:44:37] [Rank 0] step:4621/10000 train_time:425951ms step_avg:92.18ms +[2025-08-22 10:44:39] [Rank 0] step:4641/10000 train_time:427850ms step_avg:92.19ms +[2025-08-22 10:44:39] [Rank 0] step:4641/10000 train_time:427850ms step_avg:92.19ms +[2025-08-22 10:44:40] [Rank 0] step:4661/10000 train_time:429750ms step_avg:92.20ms +[2025-08-22 10:44:40] [Rank 0] step:4661/10000 train_time:429750ms step_avg:92.20ms +[2025-08-22 10:44:42] [Rank 0] step:4681/10000 train_time:431650ms step_avg:92.21ms +[2025-08-22 10:44:42] [Rank 0] step:4681/10000 train_time:431650ms step_avg:92.21ms +[2025-08-22 10:44:44] [Rank 0] step:4701/10000 train_time:433553ms step_avg:92.23ms +[2025-08-22 10:44:44] [Rank 0] step:4701/10000 train_time:433553ms step_avg:92.23ms +[2025-08-22 10:44:46] [Rank 0] step:4721/10000 train_time:435454ms step_avg:92.24ms +[2025-08-22 10:44:46] [Rank 0] step:4721/10000 train_time:435454ms step_avg:92.24ms +[2025-08-22 10:44:48] [Rank 0] step:4741/10000 train_time:437358ms step_avg:92.25ms +[2025-08-22 10:44:48] [Rank 0] step:4741/10000 train_time:437358ms step_avg:92.25ms +[2025-08-22 10:44:50] [Rank 0] step:4761/10000 train_time:439262ms step_avg:92.26ms +[2025-08-22 10:44:50] [Rank 0] step:4761/10000 train_time:439262ms step_avg:92.26ms +[2025-08-22 10:44:52] [Rank 0] step:4781/10000 train_time:441163ms step_avg:92.27ms +[2025-08-22 10:44:52] [Rank 0] step:4781/10000 train_time:441163ms step_avg:92.27ms +[2025-08-22 10:44:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:44:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:45:07] [Rank 0] PRINT: step:4800/10000 val_loss:4.0955 svd_entropy: attn_qk:H=0.5632,top10E=0.56,eRank=58.0,q75/q25=145.17 attn_vo:H=0.5851,top10E=0.54,eRank=89.7,q75/q25=47.77 mlp_w1:H=0.7001,top10E=0.28,eRank=146.0,q75/q25=32.64 mlp_w2:H=0.7102,top10E=0.27,eRank=158.4,q75/q25=25.82 vo_prod:H=0.4339,top10E=0.75,eRank=30.8,q75/q25=1223.20 train_time:443080ms step_avg:92.31ms +[2025-08-22 10:45:07] [Rank 0] PRINT: step:4800/10000 val_loss:4.0955 svd_entropy: attn_qk:H=0.5632,top10E=0.56,eRank=58.0,q75/q25=145.17 attn_vo:H=0.5851,top10E=0.54,eRank=89.7,q75/q25=47.77 mlp_w1:H=0.7001,top10E=0.28,eRank=146.0,q75/q25=32.64 mlp_w2:H=0.7102,top10E=0.27,eRank=158.4,q75/q25=25.82 vo_prod:H=0.4339,top10E=0.75,eRank=30.8,q75/q25=1223.20 train_time:443080ms step_avg:92.31ms +[2025-08-22 10:45:07] [Rank 0] step:4801/10000 train_time:443097ms step_avg:92.29ms +[2025-08-22 10:45:07] [Rank 0] step:4801/10000 train_time:443097ms step_avg:92.29ms +[2025-08-22 10:45:09] [Rank 0] step:4821/10000 train_time:444985ms step_avg:92.30ms +[2025-08-22 10:45:09] [Rank 0] step:4821/10000 train_time:444985ms step_avg:92.30ms +[2025-08-22 10:45:11] [Rank 0] step:4841/10000 train_time:446879ms step_avg:92.31ms +[2025-08-22 10:45:11] [Rank 0] step:4841/10000 train_time:446879ms step_avg:92.31ms +[2025-08-22 10:45:13] [Rank 0] step:4861/10000 train_time:448775ms step_avg:92.32ms +[2025-08-22 10:45:13] [Rank 0] step:4861/10000 train_time:448775ms step_avg:92.32ms +[2025-08-22 10:45:15] [Rank 0] step:4881/10000 train_time:450672ms step_avg:92.33ms +[2025-08-22 10:45:15] [Rank 0] step:4881/10000 train_time:450672ms step_avg:92.33ms +[2025-08-22 10:45:17] [Rank 0] step:4901/10000 train_time:452568ms step_avg:92.34ms +[2025-08-22 10:45:17] [Rank 0] step:4901/10000 train_time:452568ms step_avg:92.34ms +[2025-08-22 10:45:19] [Rank 0] step:4921/10000 train_time:454469ms step_avg:92.35ms +[2025-08-22 10:45:19] [Rank 0] step:4921/10000 train_time:454469ms step_avg:92.35ms +[2025-08-22 10:45:21] [Rank 0] step:4941/10000 train_time:456371ms step_avg:92.36ms +[2025-08-22 10:45:21] [Rank 0] step:4941/10000 train_time:456371ms step_avg:92.36ms +[2025-08-22 10:45:22] [Rank 0] step:4961/10000 train_time:458269ms step_avg:92.37ms +[2025-08-22 10:45:22] [Rank 0] step:4961/10000 train_time:458269ms step_avg:92.37ms +[2025-08-22 10:45:24] [Rank 0] step:4981/10000 train_time:460171ms step_avg:92.39ms +[2025-08-22 10:45:24] [Rank 0] step:4981/10000 train_time:460171ms step_avg:92.39ms +[2025-08-22 10:45:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:45:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:45:40] [Rank 0] PRINT: step:5000/10000 val_loss:4.0760 svd_entropy: attn_qk:H=0.5679,top10E=0.55,eRank=59.5,q75/q25=144.57 attn_vo:H=0.5906,top10E=0.53,eRank=92.0,q75/q25=47.70 mlp_w1:H=0.7035,top10E=0.28,eRank=148.4,q75/q25=33.31 mlp_w2:H=0.7131,top10E=0.27,eRank=160.4,q75/q25=26.59 vo_prod:H=0.4399,top10E=0.74,eRank=31.9,q75/q25=1187.74 train_time:462086ms step_avg:92.42ms +[2025-08-22 10:45:40] [Rank 0] PRINT: step:5000/10000 val_loss:4.0760 svd_entropy: attn_qk:H=0.5679,top10E=0.55,eRank=59.5,q75/q25=144.57 attn_vo:H=0.5906,top10E=0.53,eRank=92.0,q75/q25=47.70 mlp_w1:H=0.7035,top10E=0.28,eRank=148.4,q75/q25=33.31 mlp_w2:H=0.7131,top10E=0.27,eRank=160.4,q75/q25=26.59 vo_prod:H=0.4399,top10E=0.74,eRank=31.9,q75/q25=1187.74 train_time:462086ms step_avg:92.42ms +[2025-08-22 10:45:40] [Rank 0] step:5001/10000 train_time:462104ms step_avg:92.40ms +[2025-08-22 10:45:40] [Rank 0] step:5001/10000 train_time:462104ms step_avg:92.40ms +[2025-08-22 10:45:42] [Rank 0] step:5021/10000 train_time:463991ms step_avg:92.41ms +[2025-08-22 10:45:42] [Rank 0] step:5021/10000 train_time:463991ms step_avg:92.41ms +[2025-08-22 10:45:44] [Rank 0] step:5041/10000 train_time:465887ms step_avg:92.42ms +[2025-08-22 10:45:44] [Rank 0] step:5041/10000 train_time:465887ms step_avg:92.42ms +[2025-08-22 10:45:46] [Rank 0] step:5061/10000 train_time:467778ms step_avg:92.43ms +[2025-08-22 10:45:46] [Rank 0] step:5061/10000 train_time:467778ms step_avg:92.43ms +[2025-08-22 10:45:47] [Rank 0] step:5081/10000 train_time:469675ms step_avg:92.44ms +[2025-08-22 10:45:47] [Rank 0] step:5081/10000 train_time:469675ms step_avg:92.44ms +[2025-08-22 10:45:49] [Rank 0] step:5101/10000 train_time:471569ms step_avg:92.45ms +[2025-08-22 10:45:49] [Rank 0] step:5101/10000 train_time:471569ms step_avg:92.45ms +[2025-08-22 10:45:51] [Rank 0] step:5121/10000 train_time:473469ms step_avg:92.46ms +[2025-08-22 10:45:51] [Rank 0] step:5121/10000 train_time:473469ms step_avg:92.46ms +[2025-08-22 10:45:53] [Rank 0] step:5141/10000 train_time:475369ms step_avg:92.47ms +[2025-08-22 10:45:53] [Rank 0] step:5141/10000 train_time:475369ms step_avg:92.47ms +[2025-08-22 10:45:55] [Rank 0] step:5161/10000 train_time:477268ms step_avg:92.48ms +[2025-08-22 10:45:55] [Rank 0] step:5161/10000 train_time:477268ms step_avg:92.48ms +[2025-08-22 10:45:57] [Rank 0] step:5181/10000 train_time:479171ms step_avg:92.49ms +[2025-08-22 10:45:57] [Rank 0] step:5181/10000 train_time:479171ms step_avg:92.49ms +[2025-08-22 10:45:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:45:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:46:12] [Rank 0] PRINT: step:5200/10000 val_loss:4.0596 svd_entropy: attn_qk:H=0.5722,top10E=0.55,eRank=60.8,q75/q25=139.68 attn_vo:H=0.5950,top10E=0.52,eRank=93.9,q75/q25=48.25 mlp_w1:H=0.7068,top10E=0.27,eRank=150.9,q75/q25=34.55 mlp_w2:H=0.7159,top10E=0.26,eRank=162.4,q75/q25=27.36 vo_prod:H=0.4442,top10E=0.74,eRank=32.4,q75/q25=1209.83 train_time:481110ms step_avg:92.52ms +[2025-08-22 10:46:12] [Rank 0] PRINT: step:5200/10000 val_loss:4.0596 svd_entropy: attn_qk:H=0.5722,top10E=0.55,eRank=60.8,q75/q25=139.68 attn_vo:H=0.5950,top10E=0.52,eRank=93.9,q75/q25=48.25 mlp_w1:H=0.7068,top10E=0.27,eRank=150.9,q75/q25=34.55 mlp_w2:H=0.7159,top10E=0.26,eRank=162.4,q75/q25=27.36 vo_prod:H=0.4442,top10E=0.74,eRank=32.4,q75/q25=1209.83 train_time:481110ms step_avg:92.52ms +[2025-08-22 10:46:13] [Rank 0] step:5201/10000 train_time:481127ms step_avg:92.51ms +[2025-08-22 10:46:13] [Rank 0] step:5201/10000 train_time:481127ms step_avg:92.51ms +[2025-08-22 10:46:14] [Rank 0] step:5221/10000 train_time:483038ms step_avg:92.52ms +[2025-08-22 10:46:14] [Rank 0] step:5221/10000 train_time:483038ms step_avg:92.52ms +[2025-08-22 10:46:16] [Rank 0] step:5241/10000 train_time:484963ms step_avg:92.53ms +[2025-08-22 10:46:16] [Rank 0] step:5241/10000 train_time:484963ms step_avg:92.53ms +[2025-08-22 10:46:18] [Rank 0] step:5261/10000 train_time:486889ms step_avg:92.55ms +[2025-08-22 10:46:18] [Rank 0] step:5261/10000 train_time:486889ms step_avg:92.55ms +[2025-08-22 10:46:20] [Rank 0] step:5281/10000 train_time:488815ms step_avg:92.56ms +[2025-08-22 10:46:20] [Rank 0] step:5281/10000 train_time:488815ms step_avg:92.56ms +[2025-08-22 10:46:22] [Rank 0] step:5301/10000 train_time:490754ms step_avg:92.58ms +[2025-08-22 10:46:22] [Rank 0] step:5301/10000 train_time:490754ms step_avg:92.58ms +[2025-08-22 10:46:24] [Rank 0] step:5321/10000 train_time:492684ms step_avg:92.59ms +[2025-08-22 10:46:24] [Rank 0] step:5321/10000 train_time:492684ms step_avg:92.59ms +[2025-08-22 10:46:26] [Rank 0] step:5341/10000 train_time:494614ms step_avg:92.61ms +[2025-08-22 10:46:26] [Rank 0] step:5341/10000 train_time:494614ms step_avg:92.61ms +[2025-08-22 10:46:28] [Rank 0] step:5361/10000 train_time:496548ms step_avg:92.62ms +[2025-08-22 10:46:28] [Rank 0] step:5361/10000 train_time:496548ms step_avg:92.62ms +[2025-08-22 10:46:30] [Rank 0] step:5381/10000 train_time:498482ms step_avg:92.64ms +[2025-08-22 10:46:30] [Rank 0] step:5381/10000 train_time:498482ms step_avg:92.64ms +[2025-08-22 10:46:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:46:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:46:45] [Rank 0] PRINT: step:5400/10000 val_loss:4.0449 svd_entropy: attn_qk:H=0.5766,top10E=0.54,eRank=62.2,q75/q25=136.93 attn_vo:H=0.5989,top10E=0.51,eRank=95.4,q75/q25=49.22 mlp_w1:H=0.7098,top10E=0.26,eRank=153.2,q75/q25=35.36 mlp_w2:H=0.7186,top10E=0.26,eRank=164.3,q75/q25=28.23 vo_prod:H=0.4473,top10E=0.74,eRank=32.8,q75/q25=1260.77 train_time:500425ms step_avg:92.67ms +[2025-08-22 10:46:45] [Rank 0] PRINT: step:5400/10000 val_loss:4.0449 svd_entropy: attn_qk:H=0.5766,top10E=0.54,eRank=62.2,q75/q25=136.93 attn_vo:H=0.5989,top10E=0.51,eRank=95.4,q75/q25=49.22 mlp_w1:H=0.7098,top10E=0.26,eRank=153.2,q75/q25=35.36 mlp_w2:H=0.7186,top10E=0.26,eRank=164.3,q75/q25=28.23 vo_prod:H=0.4473,top10E=0.74,eRank=32.8,q75/q25=1260.77 train_time:500425ms step_avg:92.67ms +[2025-08-22 10:46:46] [Rank 0] step:5401/10000 train_time:500443ms step_avg:92.66ms +[2025-08-22 10:46:46] [Rank 0] step:5401/10000 train_time:500443ms step_avg:92.66ms +[2025-08-22 10:46:48] [Rank 0] step:5421/10000 train_time:502382ms step_avg:92.67ms +[2025-08-22 10:46:48] [Rank 0] step:5421/10000 train_time:502382ms step_avg:92.67ms +[2025-08-22 10:46:49] [Rank 0] step:5441/10000 train_time:504311ms step_avg:92.69ms +[2025-08-22 10:46:49] [Rank 0] step:5441/10000 train_time:504311ms step_avg:92.69ms +[2025-08-22 10:46:51] [Rank 0] step:5461/10000 train_time:506247ms step_avg:92.70ms +[2025-08-22 10:46:51] [Rank 0] step:5461/10000 train_time:506247ms step_avg:92.70ms +[2025-08-22 10:46:53] [Rank 0] step:5481/10000 train_time:508179ms step_avg:92.72ms +[2025-08-22 10:46:53] [Rank 0] step:5481/10000 train_time:508179ms step_avg:92.72ms +[2025-08-22 10:46:55] [Rank 0] step:5501/10000 train_time:510124ms step_avg:92.73ms +[2025-08-22 10:46:55] [Rank 0] step:5501/10000 train_time:510124ms step_avg:92.73ms +[2025-08-22 10:46:57] [Rank 0] step:5521/10000 train_time:512063ms step_avg:92.75ms +[2025-08-22 10:46:57] [Rank 0] step:5521/10000 train_time:512063ms step_avg:92.75ms +[2025-08-22 10:46:59] [Rank 0] step:5541/10000 train_time:514000ms step_avg:92.76ms +[2025-08-22 10:46:59] [Rank 0] step:5541/10000 train_time:514000ms step_avg:92.76ms +[2025-08-22 10:47:01] [Rank 0] step:5561/10000 train_time:515937ms step_avg:92.78ms +[2025-08-22 10:47:01] [Rank 0] step:5561/10000 train_time:515937ms step_avg:92.78ms +[2025-08-22 10:47:03] [Rank 0] step:5581/10000 train_time:517873ms step_avg:92.79ms +[2025-08-22 10:47:03] [Rank 0] step:5581/10000 train_time:517873ms step_avg:92.79ms +[2025-08-22 10:47:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:47:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:47:19] [Rank 0] PRINT: step:5600/10000 val_loss:4.0337 svd_entropy: attn_qk:H=0.5807,top10E=0.53,eRank=63.5,q75/q25=135.14 attn_vo:H=0.6023,top10E=0.51,eRank=96.9,q75/q25=49.94 mlp_w1:H=0.7127,top10E=0.26,eRank=155.3,q75/q25=36.28 mlp_w2:H=0.7212,top10E=0.25,eRank=166.2,q75/q25=29.17 vo_prod:H=0.4490,top10E=0.73,eRank=33.0,q75/q25=1309.18 train_time:519893ms step_avg:92.84ms +[2025-08-22 10:47:19] [Rank 0] PRINT: step:5600/10000 val_loss:4.0337 svd_entropy: attn_qk:H=0.5807,top10E=0.53,eRank=63.5,q75/q25=135.14 attn_vo:H=0.6023,top10E=0.51,eRank=96.9,q75/q25=49.94 mlp_w1:H=0.7127,top10E=0.26,eRank=155.3,q75/q25=36.28 mlp_w2:H=0.7212,top10E=0.25,eRank=166.2,q75/q25=29.17 vo_prod:H=0.4490,top10E=0.73,eRank=33.0,q75/q25=1309.18 train_time:519893ms step_avg:92.84ms +[2025-08-22 10:47:19] [Rank 0] step:5601/10000 train_time:519911ms step_avg:92.82ms +[2025-08-22 10:47:19] [Rank 0] step:5601/10000 train_time:519911ms step_avg:92.82ms +[2025-08-22 10:47:21] [Rank 0] step:5621/10000 train_time:521836ms step_avg:92.84ms +[2025-08-22 10:47:21] [Rank 0] step:5621/10000 train_time:521836ms step_avg:92.84ms +[2025-08-22 10:47:23] [Rank 0] step:5641/10000 train_time:523764ms step_avg:92.85ms +[2025-08-22 10:47:23] [Rank 0] step:5641/10000 train_time:523764ms step_avg:92.85ms +[2025-08-22 10:47:25] [Rank 0] step:5661/10000 train_time:525689ms step_avg:92.86ms +[2025-08-22 10:47:25] [Rank 0] step:5661/10000 train_time:525689ms step_avg:92.86ms +[2025-08-22 10:47:26] [Rank 0] step:5681/10000 train_time:527621ms step_avg:92.87ms +[2025-08-22 10:47:26] [Rank 0] step:5681/10000 train_time:527621ms step_avg:92.87ms +[2025-08-22 10:47:28] [Rank 0] step:5701/10000 train_time:529552ms step_avg:92.89ms +[2025-08-22 10:47:28] [Rank 0] step:5701/10000 train_time:529552ms step_avg:92.89ms +[2025-08-22 10:47:30] [Rank 0] step:5721/10000 train_time:531486ms step_avg:92.90ms +[2025-08-22 10:47:30] [Rank 0] step:5721/10000 train_time:531486ms step_avg:92.90ms +[2025-08-22 10:47:32] [Rank 0] step:5741/10000 train_time:533416ms step_avg:92.91ms +[2025-08-22 10:47:32] [Rank 0] step:5741/10000 train_time:533416ms step_avg:92.91ms +[2025-08-22 10:47:34] [Rank 0] step:5761/10000 train_time:535350ms step_avg:92.93ms +[2025-08-22 10:47:34] [Rank 0] step:5761/10000 train_time:535350ms step_avg:92.93ms +[2025-08-22 10:47:36] [Rank 0] step:5781/10000 train_time:537283ms step_avg:92.94ms +[2025-08-22 10:47:36] [Rank 0] step:5781/10000 train_time:537283ms step_avg:92.94ms +[2025-08-22 10:47:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:47:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:47:52] [Rank 0] PRINT: step:5800/10000 val_loss:4.0288 svd_entropy: attn_qk:H=0.5845,top10E=0.53,eRank=64.7,q75/q25=132.08 attn_vo:H=0.6061,top10E=0.50,eRank=98.6,q75/q25=50.90 mlp_w1:H=0.7154,top10E=0.25,eRank=157.5,q75/q25=37.09 mlp_w2:H=0.7236,top10E=0.25,eRank=167.9,q75/q25=29.78 vo_prod:H=0.4527,top10E=0.73,eRank=33.5,q75/q25=1360.60 train_time:539230ms step_avg:92.97ms +[2025-08-22 10:47:52] [Rank 0] PRINT: step:5800/10000 val_loss:4.0288 svd_entropy: attn_qk:H=0.5845,top10E=0.53,eRank=64.7,q75/q25=132.08 attn_vo:H=0.6061,top10E=0.50,eRank=98.6,q75/q25=50.90 mlp_w1:H=0.7154,top10E=0.25,eRank=157.5,q75/q25=37.09 mlp_w2:H=0.7236,top10E=0.25,eRank=167.9,q75/q25=29.78 vo_prod:H=0.4527,top10E=0.73,eRank=33.5,q75/q25=1360.60 train_time:539230ms step_avg:92.97ms +[2025-08-22 10:47:52] [Rank 0] step:5801/10000 train_time:539249ms step_avg:92.96ms +[2025-08-22 10:47:52] [Rank 0] step:5801/10000 train_time:539249ms step_avg:92.96ms +[2025-08-22 10:47:54] [Rank 0] step:5821/10000 train_time:541151ms step_avg:92.97ms +[2025-08-22 10:47:54] [Rank 0] step:5821/10000 train_time:541151ms step_avg:92.97ms +[2025-08-22 10:47:56] [Rank 0] step:5841/10000 train_time:543074ms step_avg:92.98ms +[2025-08-22 10:47:56] [Rank 0] step:5841/10000 train_time:543074ms step_avg:92.98ms +[2025-08-22 10:47:58] [Rank 0] step:5861/10000 train_time:545005ms step_avg:92.99ms +[2025-08-22 10:47:58] [Rank 0] step:5861/10000 train_time:545005ms step_avg:92.99ms +[2025-08-22 10:47:59] [Rank 0] step:5881/10000 train_time:546933ms step_avg:93.00ms +[2025-08-22 10:47:59] [Rank 0] step:5881/10000 train_time:546933ms step_avg:93.00ms +[2025-08-22 10:48:01] [Rank 0] step:5901/10000 train_time:548862ms step_avg:93.01ms +[2025-08-22 10:48:01] [Rank 0] step:5901/10000 train_time:548862ms step_avg:93.01ms +[2025-08-22 10:48:03] [Rank 0] step:5921/10000 train_time:550791ms step_avg:93.02ms +[2025-08-22 10:48:03] [Rank 0] step:5921/10000 train_time:550791ms step_avg:93.02ms +[2025-08-22 10:48:05] [Rank 0] step:5941/10000 train_time:552729ms step_avg:93.04ms +[2025-08-22 10:48:05] [Rank 0] step:5941/10000 train_time:552729ms step_avg:93.04ms +[2025-08-22 10:48:07] [Rank 0] step:5961/10000 train_time:554663ms step_avg:93.05ms +[2025-08-22 10:48:07] [Rank 0] step:5961/10000 train_time:554663ms step_avg:93.05ms +[2025-08-22 10:48:09] [Rank 0] step:5981/10000 train_time:556648ms step_avg:93.07ms +[2025-08-22 10:48:09] [Rank 0] step:5981/10000 train_time:556648ms step_avg:93.07ms +[2025-08-22 10:48:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:48:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:48:25] [Rank 0] PRINT: step:6000/10000 val_loss:4.0044 svd_entropy: attn_qk:H=0.5881,top10E=0.52,eRank=66.0,q75/q25=130.55 attn_vo:H=0.6100,top10E=0.49,eRank=100.4,q75/q25=51.57 mlp_w1:H=0.7179,top10E=0.25,eRank=159.4,q75/q25=37.64 mlp_w2:H=0.7259,top10E=0.25,eRank=169.6,q75/q25=30.80 vo_prod:H=0.4577,top10E=0.72,eRank=34.2,q75/q25=1455.03 train_time:558661ms step_avg:93.11ms +[2025-08-22 10:48:25] [Rank 0] PRINT: step:6000/10000 val_loss:4.0044 svd_entropy: attn_qk:H=0.5881,top10E=0.52,eRank=66.0,q75/q25=130.55 attn_vo:H=0.6100,top10E=0.49,eRank=100.4,q75/q25=51.57 mlp_w1:H=0.7179,top10E=0.25,eRank=159.4,q75/q25=37.64 mlp_w2:H=0.7259,top10E=0.25,eRank=169.6,q75/q25=30.80 vo_prod:H=0.4577,top10E=0.72,eRank=34.2,q75/q25=1455.03 train_time:558661ms step_avg:93.11ms +[2025-08-22 10:48:25] [Rank 0] step:6001/10000 train_time:558679ms step_avg:93.10ms +[2025-08-22 10:48:25] [Rank 0] step:6001/10000 train_time:558679ms step_avg:93.10ms +[2025-08-22 10:48:27] [Rank 0] step:6021/10000 train_time:560589ms step_avg:93.11ms +[2025-08-22 10:48:27] [Rank 0] step:6021/10000 train_time:560589ms step_avg:93.11ms +[2025-08-22 10:48:29] [Rank 0] step:6041/10000 train_time:562519ms step_avg:93.12ms +[2025-08-22 10:48:29] [Rank 0] step:6041/10000 train_time:562519ms step_avg:93.12ms +[2025-08-22 10:48:30] [Rank 0] step:6061/10000 train_time:564454ms step_avg:93.13ms +[2025-08-22 10:48:30] [Rank 0] step:6061/10000 train_time:564454ms step_avg:93.13ms +[2025-08-22 10:48:32] [Rank 0] step:6081/10000 train_time:566385ms step_avg:93.14ms +[2025-08-22 10:48:32] [Rank 0] step:6081/10000 train_time:566385ms step_avg:93.14ms +[2025-08-22 10:48:34] [Rank 0] step:6101/10000 train_time:568322ms step_avg:93.15ms +[2025-08-22 10:48:34] [Rank 0] step:6101/10000 train_time:568322ms step_avg:93.15ms +[2025-08-22 10:48:37] [Rank 0] step:6121/10000 train_time:570527ms step_avg:93.21ms +[2025-08-22 10:48:37] [Rank 0] step:6121/10000 train_time:570527ms step_avg:93.21ms +[2025-08-22 10:48:38] [Rank 0] step:6141/10000 train_time:572471ms step_avg:93.22ms +[2025-08-22 10:48:38] [Rank 0] step:6141/10000 train_time:572471ms step_avg:93.22ms +[2025-08-22 10:48:40] [Rank 0] step:6161/10000 train_time:574407ms step_avg:93.23ms +[2025-08-22 10:48:40] [Rank 0] step:6161/10000 train_time:574407ms step_avg:93.23ms +[2025-08-22 10:48:42] [Rank 0] step:6181/10000 train_time:576346ms step_avg:93.24ms +[2025-08-22 10:48:42] [Rank 0] step:6181/10000 train_time:576346ms step_avg:93.24ms +[2025-08-22 10:48:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:48:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:48:58] [Rank 0] PRINT: step:6200/10000 val_loss:3.9892 svd_entropy: attn_qk:H=0.5915,top10E=0.51,eRank=67.1,q75/q25=129.40 attn_vo:H=0.6138,top10E=0.49,eRank=102.2,q75/q25=52.85 mlp_w1:H=0.7202,top10E=0.24,eRank=161.3,q75/q25=38.31 mlp_w2:H=0.7280,top10E=0.24,eRank=171.2,q75/q25=31.62 vo_prod:H=0.4625,top10E=0.72,eRank=34.9,q75/q25=1548.10 train_time:578297ms step_avg:93.27ms +[2025-08-22 10:48:58] [Rank 0] PRINT: step:6200/10000 val_loss:3.9892 svd_entropy: attn_qk:H=0.5915,top10E=0.51,eRank=67.1,q75/q25=129.40 attn_vo:H=0.6138,top10E=0.49,eRank=102.2,q75/q25=52.85 mlp_w1:H=0.7202,top10E=0.24,eRank=161.3,q75/q25=38.31 mlp_w2:H=0.7280,top10E=0.24,eRank=171.2,q75/q25=31.62 vo_prod:H=0.4625,top10E=0.72,eRank=34.9,q75/q25=1548.10 train_time:578297ms step_avg:93.27ms +[2025-08-22 10:48:58] [Rank 0] step:6201/10000 train_time:578315ms step_avg:93.26ms +[2025-08-22 10:48:58] [Rank 0] step:6201/10000 train_time:578315ms step_avg:93.26ms +[2025-08-22 10:49:00] [Rank 0] step:6221/10000 train_time:580234ms step_avg:93.27ms +[2025-08-22 10:49:00] [Rank 0] step:6221/10000 train_time:580234ms step_avg:93.27ms +[2025-08-22 10:49:02] [Rank 0] step:6241/10000 train_time:582166ms step_avg:93.28ms +[2025-08-22 10:49:02] [Rank 0] step:6241/10000 train_time:582166ms step_avg:93.28ms +[2025-08-22 10:49:04] [Rank 0] step:6261/10000 train_time:584103ms step_avg:93.29ms +[2025-08-22 10:49:04] [Rank 0] step:6261/10000 train_time:584103ms step_avg:93.29ms +[2025-08-22 10:49:06] [Rank 0] step:6281/10000 train_time:586042ms step_avg:93.30ms +[2025-08-22 10:49:06] [Rank 0] step:6281/10000 train_time:586042ms step_avg:93.30ms +[2025-08-22 10:49:08] [Rank 0] step:6301/10000 train_time:587981ms step_avg:93.32ms +[2025-08-22 10:49:08] [Rank 0] step:6301/10000 train_time:587981ms step_avg:93.32ms +[2025-08-22 10:49:10] [Rank 0] step:6321/10000 train_time:589919ms step_avg:93.33ms +[2025-08-22 10:49:10] [Rank 0] step:6321/10000 train_time:589919ms step_avg:93.33ms +[2025-08-22 10:49:12] [Rank 0] step:6341/10000 train_time:591922ms step_avg:93.35ms +[2025-08-22 10:49:12] [Rank 0] step:6341/10000 train_time:591922ms step_avg:93.35ms +[2025-08-22 10:49:14] [Rank 0] step:6361/10000 train_time:593948ms step_avg:93.37ms +[2025-08-22 10:49:14] [Rank 0] step:6361/10000 train_time:593948ms step_avg:93.37ms +[2025-08-22 10:49:16] [Rank 0] step:6381/10000 train_time:595884ms step_avg:93.38ms +[2025-08-22 10:49:16] [Rank 0] step:6381/10000 train_time:595884ms step_avg:93.38ms +[2025-08-22 10:49:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:49:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:49:31] [Rank 0] PRINT: step:6400/10000 val_loss:3.9762 svd_entropy: attn_qk:H=0.5942,top10E=0.51,eRank=68.2,q75/q25=129.11 attn_vo:H=0.6170,top10E=0.48,eRank=103.7,q75/q25=53.45 mlp_w1:H=0.7223,top10E=0.24,eRank=163.0,q75/q25=38.16 mlp_w2:H=0.7299,top10E=0.24,eRank=172.6,q75/q25=32.69 vo_prod:H=0.4656,top10E=0.71,eRank=35.5,q75/q25=1652.83 train_time:597835ms step_avg:93.41ms +[2025-08-22 10:49:31] [Rank 0] PRINT: step:6400/10000 val_loss:3.9762 svd_entropy: attn_qk:H=0.5942,top10E=0.51,eRank=68.2,q75/q25=129.11 attn_vo:H=0.6170,top10E=0.48,eRank=103.7,q75/q25=53.45 mlp_w1:H=0.7223,top10E=0.24,eRank=163.0,q75/q25=38.16 mlp_w2:H=0.7299,top10E=0.24,eRank=172.6,q75/q25=32.69 vo_prod:H=0.4656,top10E=0.71,eRank=35.5,q75/q25=1652.83 train_time:597835ms step_avg:93.41ms +[2025-08-22 10:49:31] [Rank 0] step:6401/10000 train_time:597853ms step_avg:93.40ms +[2025-08-22 10:49:31] [Rank 0] step:6401/10000 train_time:597853ms step_avg:93.40ms +[2025-08-22 10:49:33] [Rank 0] step:6421/10000 train_time:599780ms step_avg:93.41ms +[2025-08-22 10:49:33] [Rank 0] step:6421/10000 train_time:599780ms step_avg:93.41ms +[2025-08-22 10:49:35] [Rank 0] step:6441/10000 train_time:601712ms step_avg:93.42ms +[2025-08-22 10:49:35] [Rank 0] step:6441/10000 train_time:601712ms step_avg:93.42ms +[2025-08-22 10:49:37] [Rank 0] step:6461/10000 train_time:603648ms step_avg:93.43ms +[2025-08-22 10:49:37] [Rank 0] step:6461/10000 train_time:603648ms step_avg:93.43ms +[2025-08-22 10:49:39] [Rank 0] step:6481/10000 train_time:605588ms step_avg:93.44ms +[2025-08-22 10:49:39] [Rank 0] step:6481/10000 train_time:605588ms step_avg:93.44ms +[2025-08-22 10:49:41] [Rank 0] step:6501/10000 train_time:607518ms step_avg:93.45ms +[2025-08-22 10:49:41] [Rank 0] step:6501/10000 train_time:607518ms step_avg:93.45ms +[2025-08-22 10:49:43] [Rank 0] step:6521/10000 train_time:609448ms step_avg:93.46ms +[2025-08-22 10:49:43] [Rank 0] step:6521/10000 train_time:609448ms step_avg:93.46ms +[2025-08-22 10:49:45] [Rank 0] step:6541/10000 train_time:611384ms step_avg:93.47ms +[2025-08-22 10:49:45] [Rank 0] step:6541/10000 train_time:611384ms step_avg:93.47ms +[2025-08-22 10:49:47] [Rank 0] step:6561/10000 train_time:613320ms step_avg:93.48ms +[2025-08-22 10:49:47] [Rank 0] step:6561/10000 train_time:613320ms step_avg:93.48ms +[2025-08-22 10:49:49] [Rank 0] step:6581/10000 train_time:615252ms step_avg:93.49ms +[2025-08-22 10:49:49] [Rank 0] step:6581/10000 train_time:615252ms step_avg:93.49ms +[2025-08-22 10:49:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:49:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:50:04] [Rank 0] PRINT: step:6600/10000 val_loss:3.9633 svd_entropy: attn_qk:H=0.5969,top10E=0.51,eRank=69.2,q75/q25=128.51 attn_vo:H=0.6200,top10E=0.48,eRank=105.2,q75/q25=54.36 mlp_w1:H=0.7243,top10E=0.24,eRank=164.7,q75/q25=38.74 mlp_w2:H=0.7318,top10E=0.24,eRank=174.0,q75/q25=33.69 vo_prod:H=0.4691,top10E=0.71,eRank=36.1,q75/q25=1723.07 train_time:617204ms step_avg:93.52ms +[2025-08-22 10:50:04] [Rank 0] PRINT: step:6600/10000 val_loss:3.9633 svd_entropy: attn_qk:H=0.5969,top10E=0.51,eRank=69.2,q75/q25=128.51 attn_vo:H=0.6200,top10E=0.48,eRank=105.2,q75/q25=54.36 mlp_w1:H=0.7243,top10E=0.24,eRank=164.7,q75/q25=38.74 mlp_w2:H=0.7318,top10E=0.24,eRank=174.0,q75/q25=33.69 vo_prod:H=0.4691,top10E=0.71,eRank=36.1,q75/q25=1723.07 train_time:617204ms step_avg:93.52ms +[2025-08-22 10:50:04] [Rank 0] step:6601/10000 train_time:617222ms step_avg:93.50ms +[2025-08-22 10:50:04] [Rank 0] step:6601/10000 train_time:617222ms step_avg:93.50ms +[2025-08-22 10:50:06] [Rank 0] step:6621/10000 train_time:619140ms step_avg:93.51ms +[2025-08-22 10:50:06] [Rank 0] step:6621/10000 train_time:619140ms step_avg:93.51ms +[2025-08-22 10:50:08] [Rank 0] step:6641/10000 train_time:621080ms step_avg:93.52ms +[2025-08-22 10:50:08] [Rank 0] step:6641/10000 train_time:621080ms step_avg:93.52ms +[2025-08-22 10:50:10] [Rank 0] step:6661/10000 train_time:623013ms step_avg:93.53ms +[2025-08-22 10:50:10] [Rank 0] step:6661/10000 train_time:623013ms step_avg:93.53ms +[2025-08-22 10:50:12] [Rank 0] step:6681/10000 train_time:624963ms step_avg:93.54ms +[2025-08-22 10:50:12] [Rank 0] step:6681/10000 train_time:624963ms step_avg:93.54ms +[2025-08-22 10:50:14] [Rank 0] step:6701/10000 train_time:626991ms step_avg:93.57ms +[2025-08-22 10:50:14] [Rank 0] step:6701/10000 train_time:626991ms step_avg:93.57ms +[2025-08-22 10:50:16] [Rank 0] step:6721/10000 train_time:629017ms step_avg:93.59ms +[2025-08-22 10:50:16] [Rank 0] step:6721/10000 train_time:629017ms step_avg:93.59ms +[2025-08-22 10:50:18] [Rank 0] step:6741/10000 train_time:630979ms step_avg:93.60ms +[2025-08-22 10:50:18] [Rank 0] step:6741/10000 train_time:630979ms step_avg:93.60ms +[2025-08-22 10:50:20] [Rank 0] step:6761/10000 train_time:632939ms step_avg:93.62ms +[2025-08-22 10:50:20] [Rank 0] step:6761/10000 train_time:632939ms step_avg:93.62ms +[2025-08-22 10:50:22] [Rank 0] step:6781/10000 train_time:634903ms step_avg:93.63ms +[2025-08-22 10:50:22] [Rank 0] step:6781/10000 train_time:634903ms step_avg:93.63ms +[2025-08-22 10:50:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:50:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:50:38] [Rank 0] PRINT: step:6800/10000 val_loss:3.9467 svd_entropy: attn_qk:H=0.5994,top10E=0.50,eRank=70.2,q75/q25=127.67 attn_vo:H=0.6229,top10E=0.47,eRank=106.6,q75/q25=54.81 mlp_w1:H=0.7261,top10E=0.23,eRank=166.1,q75/q25=38.81 mlp_w2:H=0.7333,top10E=0.23,eRank=175.2,q75/q25=35.06 vo_prod:H=0.4719,top10E=0.70,eRank=36.6,q75/q25=1831.74 train_time:636883ms step_avg:93.66ms +[2025-08-22 10:50:38] [Rank 0] PRINT: step:6800/10000 val_loss:3.9467 svd_entropy: attn_qk:H=0.5994,top10E=0.50,eRank=70.2,q75/q25=127.67 attn_vo:H=0.6229,top10E=0.47,eRank=106.6,q75/q25=54.81 mlp_w1:H=0.7261,top10E=0.23,eRank=166.1,q75/q25=38.81 mlp_w2:H=0.7333,top10E=0.23,eRank=175.2,q75/q25=35.06 vo_prod:H=0.4719,top10E=0.70,eRank=36.6,q75/q25=1831.74 train_time:636883ms step_avg:93.66ms +[2025-08-22 10:50:38] [Rank 0] step:6801/10000 train_time:636901ms step_avg:93.65ms +[2025-08-22 10:50:38] [Rank 0] step:6801/10000 train_time:636901ms step_avg:93.65ms +[2025-08-22 10:50:40] [Rank 0] step:6821/10000 train_time:638849ms step_avg:93.66ms +[2025-08-22 10:50:40] [Rank 0] step:6821/10000 train_time:638849ms step_avg:93.66ms +[2025-08-22 10:50:42] [Rank 0] step:6841/10000 train_time:640808ms step_avg:93.67ms +[2025-08-22 10:50:42] [Rank 0] step:6841/10000 train_time:640808ms step_avg:93.67ms +[2025-08-22 10:50:44] [Rank 0] step:6861/10000 train_time:642764ms step_avg:93.68ms +[2025-08-22 10:50:44] [Rank 0] step:6861/10000 train_time:642764ms step_avg:93.68ms +[2025-08-22 10:50:46] [Rank 0] step:6881/10000 train_time:644729ms step_avg:93.70ms +[2025-08-22 10:50:46] [Rank 0] step:6881/10000 train_time:644729ms step_avg:93.70ms +[2025-08-22 10:50:47] [Rank 0] step:6901/10000 train_time:646689ms step_avg:93.71ms +[2025-08-22 10:50:47] [Rank 0] step:6901/10000 train_time:646689ms step_avg:93.71ms +[2025-08-22 10:50:49] [Rank 0] step:6921/10000 train_time:648647ms step_avg:93.72ms +[2025-08-22 10:50:49] [Rank 0] step:6921/10000 train_time:648647ms step_avg:93.72ms +[2025-08-22 10:50:51] [Rank 0] step:6941/10000 train_time:650619ms step_avg:93.74ms +[2025-08-22 10:50:51] [Rank 0] step:6941/10000 train_time:650619ms step_avg:93.74ms +[2025-08-22 10:50:53] [Rank 0] step:6961/10000 train_time:652598ms step_avg:93.75ms +[2025-08-22 10:50:53] [Rank 0] step:6961/10000 train_time:652598ms step_avg:93.75ms +[2025-08-22 10:50:55] [Rank 0] step:6981/10000 train_time:654570ms step_avg:93.76ms +[2025-08-22 10:50:55] [Rank 0] step:6981/10000 train_time:654570ms step_avg:93.76ms +[2025-08-22 10:50:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:50:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:51:11] [Rank 0] PRINT: step:7000/10000 val_loss:3.9300 svd_entropy: attn_qk:H=0.6017,top10E=0.50,eRank=71.0,q75/q25=126.20 attn_vo:H=0.6254,top10E=0.47,eRank=107.9,q75/q25=55.48 mlp_w1:H=0.7276,top10E=0.23,eRank=167.5,q75/q25=38.92 mlp_w2:H=0.7348,top10E=0.23,eRank=176.4,q75/q25=36.14 vo_prod:H=0.4743,top10E=0.70,eRank=37.1,q75/q25=1966.64 train_time:656552ms step_avg:93.79ms +[2025-08-22 10:51:11] [Rank 0] PRINT: step:7000/10000 val_loss:3.9300 svd_entropy: attn_qk:H=0.6017,top10E=0.50,eRank=71.0,q75/q25=126.20 attn_vo:H=0.6254,top10E=0.47,eRank=107.9,q75/q25=55.48 mlp_w1:H=0.7276,top10E=0.23,eRank=167.5,q75/q25=38.92 mlp_w2:H=0.7348,top10E=0.23,eRank=176.4,q75/q25=36.14 vo_prod:H=0.4743,top10E=0.70,eRank=37.1,q75/q25=1966.64 train_time:656552ms step_avg:93.79ms +[2025-08-22 10:51:11] [Rank 0] step:7001/10000 train_time:656570ms step_avg:93.78ms +[2025-08-22 10:51:11] [Rank 0] step:7001/10000 train_time:656570ms step_avg:93.78ms +[2025-08-22 10:51:13] [Rank 0] step:7021/10000 train_time:658522ms step_avg:93.79ms +[2025-08-22 10:51:13] [Rank 0] step:7021/10000 train_time:658522ms step_avg:93.79ms +[2025-08-22 10:51:15] [Rank 0] step:7041/10000 train_time:660487ms step_avg:93.81ms +[2025-08-22 10:51:15] [Rank 0] step:7041/10000 train_time:660487ms step_avg:93.81ms +[2025-08-22 10:51:17] [Rank 0] step:7061/10000 train_time:662451ms step_avg:93.82ms +[2025-08-22 10:51:17] [Rank 0] step:7061/10000 train_time:662451ms step_avg:93.82ms +[2025-08-22 10:51:19] [Rank 0] step:7081/10000 train_time:664464ms step_avg:93.84ms +[2025-08-22 10:51:19] [Rank 0] step:7081/10000 train_time:664464ms step_avg:93.84ms +[2025-08-22 10:51:21] [Rank 0] step:7101/10000 train_time:666504ms step_avg:93.86ms +[2025-08-22 10:51:21] [Rank 0] step:7101/10000 train_time:666504ms step_avg:93.86ms +[2025-08-22 10:51:23] [Rank 0] step:7121/10000 train_time:668469ms step_avg:93.87ms +[2025-08-22 10:51:23] [Rank 0] step:7121/10000 train_time:668469ms step_avg:93.87ms +[2025-08-22 10:51:25] [Rank 0] step:7141/10000 train_time:670437ms step_avg:93.89ms +[2025-08-22 10:51:25] [Rank 0] step:7141/10000 train_time:670437ms step_avg:93.89ms +[2025-08-22 10:51:27] [Rank 0] step:7161/10000 train_time:672410ms step_avg:93.90ms +[2025-08-22 10:51:27] [Rank 0] step:7161/10000 train_time:672410ms step_avg:93.90ms +[2025-08-22 10:51:29] [Rank 0] step:7181/10000 train_time:674379ms step_avg:93.91ms +[2025-08-22 10:51:29] [Rank 0] step:7181/10000 train_time:674379ms step_avg:93.91ms +[2025-08-22 10:51:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:51:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:51:44] [Rank 0] PRINT: step:7200/10000 val_loss:3.9192 svd_entropy: attn_qk:H=0.6036,top10E=0.49,eRank=71.8,q75/q25=125.52 attn_vo:H=0.6279,top10E=0.46,eRank=109.3,q75/q25=56.35 mlp_w1:H=0.7291,top10E=0.23,eRank=168.8,q75/q25=38.98 mlp_w2:H=0.7361,top10E=0.23,eRank=177.5,q75/q25=36.91 vo_prod:H=0.4776,top10E=0.69,eRank=37.8,q75/q25=2040.77 train_time:676365ms step_avg:93.94ms +[2025-08-22 10:51:44] [Rank 0] PRINT: step:7200/10000 val_loss:3.9192 svd_entropy: attn_qk:H=0.6036,top10E=0.49,eRank=71.8,q75/q25=125.52 attn_vo:H=0.6279,top10E=0.46,eRank=109.3,q75/q25=56.35 mlp_w1:H=0.7291,top10E=0.23,eRank=168.8,q75/q25=38.98 mlp_w2:H=0.7361,top10E=0.23,eRank=177.5,q75/q25=36.91 vo_prod:H=0.4776,top10E=0.69,eRank=37.8,q75/q25=2040.77 train_time:676365ms step_avg:93.94ms +[2025-08-22 10:51:44] [Rank 0] step:7201/10000 train_time:676382ms step_avg:93.93ms +[2025-08-22 10:51:44] [Rank 0] step:7201/10000 train_time:676382ms step_avg:93.93ms +[2025-08-22 10:51:46] [Rank 0] step:7221/10000 train_time:678334ms step_avg:93.94ms +[2025-08-22 10:51:46] [Rank 0] step:7221/10000 train_time:678334ms step_avg:93.94ms +[2025-08-22 10:51:48] [Rank 0] step:7241/10000 train_time:680294ms step_avg:93.95ms +[2025-08-22 10:51:48] [Rank 0] step:7241/10000 train_time:680294ms step_avg:93.95ms +[2025-08-22 10:51:50] [Rank 0] step:7261/10000 train_time:682254ms step_avg:93.96ms +[2025-08-22 10:51:50] [Rank 0] step:7261/10000 train_time:682254ms step_avg:93.96ms +[2025-08-22 10:51:52] [Rank 0] step:7281/10000 train_time:684229ms step_avg:93.97ms +[2025-08-22 10:51:52] [Rank 0] step:7281/10000 train_time:684229ms step_avg:93.97ms +[2025-08-22 10:51:54] [Rank 0] step:7301/10000 train_time:686194ms step_avg:93.99ms +[2025-08-22 10:51:54] [Rank 0] step:7301/10000 train_time:686194ms step_avg:93.99ms +[2025-08-22 10:51:56] [Rank 0] step:7321/10000 train_time:688174ms step_avg:94.00ms +[2025-08-22 10:51:56] [Rank 0] step:7321/10000 train_time:688174ms step_avg:94.00ms +[2025-08-22 10:51:58] [Rank 0] step:7341/10000 train_time:690139ms step_avg:94.01ms +[2025-08-22 10:51:58] [Rank 0] step:7341/10000 train_time:690139ms step_avg:94.01ms +[2025-08-22 10:52:00] [Rank 0] step:7361/10000 train_time:692117ms step_avg:94.02ms +[2025-08-22 10:52:00] [Rank 0] step:7361/10000 train_time:692117ms step_avg:94.02ms +[2025-08-22 10:52:02] [Rank 0] step:7381/10000 train_time:694090ms step_avg:94.04ms +[2025-08-22 10:52:02] [Rank 0] step:7381/10000 train_time:694090ms step_avg:94.04ms +[2025-08-22 10:52:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:52:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:52:17] [Rank 0] PRINT: step:7400/10000 val_loss:3.8988 svd_entropy: attn_qk:H=0.6056,top10E=0.49,eRank=72.6,q75/q25=124.42 attn_vo:H=0.6298,top10E=0.46,eRank=110.3,q75/q25=57.05 mlp_w1:H=0.7305,top10E=0.23,eRank=169.9,q75/q25=39.33 mlp_w2:H=0.7373,top10E=0.23,eRank=178.4,q75/q25=38.28 vo_prod:H=0.4792,top10E=0.69,eRank=38.1,q75/q25=2174.26 train_time:696061ms step_avg:94.06ms +[2025-08-22 10:52:17] [Rank 0] PRINT: step:7400/10000 val_loss:3.8988 svd_entropy: attn_qk:H=0.6056,top10E=0.49,eRank=72.6,q75/q25=124.42 attn_vo:H=0.6298,top10E=0.46,eRank=110.3,q75/q25=57.05 mlp_w1:H=0.7305,top10E=0.23,eRank=169.9,q75/q25=39.33 mlp_w2:H=0.7373,top10E=0.23,eRank=178.4,q75/q25=38.28 vo_prod:H=0.4792,top10E=0.69,eRank=38.1,q75/q25=2174.26 train_time:696061ms step_avg:94.06ms +[2025-08-22 10:52:17] [Rank 0] step:7401/10000 train_time:696078ms step_avg:94.05ms +[2025-08-22 10:52:17] [Rank 0] step:7401/10000 train_time:696078ms step_avg:94.05ms +[2025-08-22 10:52:19] [Rank 0] step:7421/10000 train_time:698040ms step_avg:94.06ms +[2025-08-22 10:52:19] [Rank 0] step:7421/10000 train_time:698040ms step_avg:94.06ms +[2025-08-22 10:52:21] [Rank 0] step:7441/10000 train_time:700063ms step_avg:94.08ms +[2025-08-22 10:52:21] [Rank 0] step:7441/10000 train_time:700063ms step_avg:94.08ms +[2025-08-22 10:52:23] [Rank 0] step:7461/10000 train_time:702086ms step_avg:94.10ms +[2025-08-22 10:52:23] [Rank 0] step:7461/10000 train_time:702086ms step_avg:94.10ms +[2025-08-22 10:52:25] [Rank 0] step:7481/10000 train_time:704060ms step_avg:94.11ms +[2025-08-22 10:52:25] [Rank 0] step:7481/10000 train_time:704060ms step_avg:94.11ms +[2025-08-22 10:52:27] [Rank 0] step:7501/10000 train_time:706028ms step_avg:94.12ms +[2025-08-22 10:52:27] [Rank 0] step:7501/10000 train_time:706028ms step_avg:94.12ms +[2025-08-22 10:52:29] [Rank 0] step:7521/10000 train_time:708001ms step_avg:94.14ms +[2025-08-22 10:52:29] [Rank 0] step:7521/10000 train_time:708001ms step_avg:94.14ms +[2025-08-22 10:52:31] [Rank 0] step:7541/10000 train_time:709978ms step_avg:94.15ms +[2025-08-22 10:52:31] [Rank 0] step:7541/10000 train_time:709978ms step_avg:94.15ms +[2025-08-22 10:52:33] [Rank 0] step:7561/10000 train_time:711939ms step_avg:94.16ms +[2025-08-22 10:52:33] [Rank 0] step:7561/10000 train_time:711939ms step_avg:94.16ms +[2025-08-22 10:52:35] [Rank 0] step:7581/10000 train_time:713924ms step_avg:94.17ms +[2025-08-22 10:52:35] [Rank 0] step:7581/10000 train_time:713924ms step_avg:94.17ms +[2025-08-22 10:52:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:52:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:52:51] [Rank 0] PRINT: step:7600/10000 val_loss:3.8914 svd_entropy: attn_qk:H=0.6073,top10E=0.49,eRank=73.3,q75/q25=124.09 attn_vo:H=0.6318,top10E=0.46,eRank=111.3,q75/q25=57.93 mlp_w1:H=0.7318,top10E=0.22,eRank=171.0,q75/q25=39.47 mlp_w2:H=0.7385,top10E=0.23,eRank=179.4,q75/q25=39.26 vo_prod:H=0.4812,top10E=0.69,eRank=38.6,q75/q25=2284.84 train_time:715917ms step_avg:94.20ms +[2025-08-22 10:52:51] [Rank 0] PRINT: step:7600/10000 val_loss:3.8914 svd_entropy: attn_qk:H=0.6073,top10E=0.49,eRank=73.3,q75/q25=124.09 attn_vo:H=0.6318,top10E=0.46,eRank=111.3,q75/q25=57.93 mlp_w1:H=0.7318,top10E=0.22,eRank=171.0,q75/q25=39.47 mlp_w2:H=0.7385,top10E=0.23,eRank=179.4,q75/q25=39.26 vo_prod:H=0.4812,top10E=0.69,eRank=38.6,q75/q25=2284.84 train_time:715917ms step_avg:94.20ms +[2025-08-22 10:52:51] [Rank 0] step:7601/10000 train_time:715935ms step_avg:94.19ms +[2025-08-22 10:52:51] [Rank 0] step:7601/10000 train_time:715935ms step_avg:94.19ms +[2025-08-22 10:52:53] [Rank 0] step:7621/10000 train_time:717886ms step_avg:94.20ms +[2025-08-22 10:52:53] [Rank 0] step:7621/10000 train_time:717886ms step_avg:94.20ms +[2025-08-22 10:52:55] [Rank 0] step:7641/10000 train_time:719845ms step_avg:94.21ms +[2025-08-22 10:52:55] [Rank 0] step:7641/10000 train_time:719845ms step_avg:94.21ms +[2025-08-22 10:52:57] [Rank 0] step:7661/10000 train_time:721814ms step_avg:94.22ms +[2025-08-22 10:52:57] [Rank 0] step:7661/10000 train_time:721814ms step_avg:94.22ms +[2025-08-22 10:52:59] [Rank 0] step:7681/10000 train_time:723780ms step_avg:94.23ms +[2025-08-22 10:52:59] [Rank 0] step:7681/10000 train_time:723780ms step_avg:94.23ms +[2025-08-22 10:53:01] [Rank 0] step:7701/10000 train_time:725743ms step_avg:94.24ms +[2025-08-22 10:53:01] [Rank 0] step:7701/10000 train_time:725743ms step_avg:94.24ms +[2025-08-22 10:53:03] [Rank 0] step:7721/10000 train_time:727721ms step_avg:94.25ms +[2025-08-22 10:53:03] [Rank 0] step:7721/10000 train_time:727721ms step_avg:94.25ms +[2025-08-22 10:53:05] [Rank 0] step:7741/10000 train_time:729692ms step_avg:94.26ms +[2025-08-22 10:53:05] [Rank 0] step:7741/10000 train_time:729692ms step_avg:94.26ms +[2025-08-22 10:53:07] [Rank 0] step:7761/10000 train_time:731669ms step_avg:94.28ms +[2025-08-22 10:53:07] [Rank 0] step:7761/10000 train_time:731669ms step_avg:94.28ms +[2025-08-22 10:53:09] [Rank 0] step:7781/10000 train_time:733641ms step_avg:94.29ms +[2025-08-22 10:53:09] [Rank 0] step:7781/10000 train_time:733641ms step_avg:94.29ms +[2025-08-22 10:53:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:53:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:53:24] [Rank 0] PRINT: step:7800/10000 val_loss:3.8771 svd_entropy: attn_qk:H=0.6089,top10E=0.49,eRank=73.9,q75/q25=123.57 attn_vo:H=0.6334,top10E=0.45,eRank=112.2,q75/q25=58.67 mlp_w1:H=0.7330,top10E=0.22,eRank=172.0,q75/q25=39.52 mlp_w2:H=0.7395,top10E=0.22,eRank=180.3,q75/q25=40.45 vo_prod:H=0.4833,top10E=0.69,eRank=39.0,q75/q25=2396.39 train_time:735637ms step_avg:94.31ms +[2025-08-22 10:53:24] [Rank 0] PRINT: step:7800/10000 val_loss:3.8771 svd_entropy: attn_qk:H=0.6089,top10E=0.49,eRank=73.9,q75/q25=123.57 attn_vo:H=0.6334,top10E=0.45,eRank=112.2,q75/q25=58.67 mlp_w1:H=0.7330,top10E=0.22,eRank=172.0,q75/q25=39.52 mlp_w2:H=0.7395,top10E=0.22,eRank=180.3,q75/q25=40.45 vo_prod:H=0.4833,top10E=0.69,eRank=39.0,q75/q25=2396.39 train_time:735637ms step_avg:94.31ms +[2025-08-22 10:53:24] [Rank 0] step:7801/10000 train_time:735655ms step_avg:94.30ms +[2025-08-22 10:53:24] [Rank 0] step:7801/10000 train_time:735655ms step_avg:94.30ms +[2025-08-22 10:53:26] [Rank 0] step:7821/10000 train_time:737654ms step_avg:94.32ms +[2025-08-22 10:53:26] [Rank 0] step:7821/10000 train_time:737654ms step_avg:94.32ms +[2025-08-22 10:53:28] [Rank 0] step:7841/10000 train_time:739614ms step_avg:94.33ms +[2025-08-22 10:53:28] [Rank 0] step:7841/10000 train_time:739614ms step_avg:94.33ms +[2025-08-22 10:53:30] [Rank 0] step:7861/10000 train_time:741587ms step_avg:94.34ms +[2025-08-22 10:53:30] [Rank 0] step:7861/10000 train_time:741587ms step_avg:94.34ms +[2025-08-22 10:53:32] [Rank 0] step:7881/10000 train_time:743561ms step_avg:94.35ms +[2025-08-22 10:53:32] [Rank 0] step:7881/10000 train_time:743561ms step_avg:94.35ms +[2025-08-22 10:53:34] [Rank 0] step:7901/10000 train_time:745526ms step_avg:94.36ms +[2025-08-22 10:53:34] [Rank 0] step:7901/10000 train_time:745526ms step_avg:94.36ms +[2025-08-22 10:53:36] [Rank 0] step:7921/10000 train_time:747503ms step_avg:94.37ms +[2025-08-22 10:53:36] [Rank 0] step:7921/10000 train_time:747503ms step_avg:94.37ms +[2025-08-22 10:53:38] [Rank 0] step:7941/10000 train_time:749484ms step_avg:94.38ms +[2025-08-22 10:53:38] [Rank 0] step:7941/10000 train_time:749484ms step_avg:94.38ms +[2025-08-22 10:53:40] [Rank 0] step:7961/10000 train_time:751457ms step_avg:94.39ms +[2025-08-22 10:53:40] [Rank 0] step:7961/10000 train_time:751457ms step_avg:94.39ms +[2025-08-22 10:53:42] [Rank 0] step:7981/10000 train_time:753424ms step_avg:94.40ms +[2025-08-22 10:53:42] [Rank 0] step:7981/10000 train_time:753424ms step_avg:94.40ms +[2025-08-22 10:53:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:53:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:53:58] [Rank 0] PRINT: step:8000/10000 val_loss:3.8603 svd_entropy: attn_qk:H=0.6102,top10E=0.48,eRank=74.5,q75/q25=122.30 attn_vo:H=0.6351,top10E=0.45,eRank=113.1,q75/q25=59.07 mlp_w1:H=0.7340,top10E=0.22,eRank=172.9,q75/q25=39.62 mlp_w2:H=0.7405,top10E=0.22,eRank=181.1,q75/q25=41.53 vo_prod:H=0.4850,top10E=0.68,eRank=39.4,q75/q25=2515.51 train_time:755416ms step_avg:94.43ms +[2025-08-22 10:53:58] [Rank 0] PRINT: step:8000/10000 val_loss:3.8603 svd_entropy: attn_qk:H=0.6102,top10E=0.48,eRank=74.5,q75/q25=122.30 attn_vo:H=0.6351,top10E=0.45,eRank=113.1,q75/q25=59.07 mlp_w1:H=0.7340,top10E=0.22,eRank=172.9,q75/q25=39.62 mlp_w2:H=0.7405,top10E=0.22,eRank=181.1,q75/q25=41.53 vo_prod:H=0.4850,top10E=0.68,eRank=39.4,q75/q25=2515.51 train_time:755416ms step_avg:94.43ms +[2025-08-22 10:53:58] [Rank 0] step:8001/10000 train_time:755433ms step_avg:94.42ms +[2025-08-22 10:53:58] [Rank 0] step:8001/10000 train_time:755433ms step_avg:94.42ms +[2025-08-22 10:54:00] [Rank 0] step:8021/10000 train_time:757405ms step_avg:94.43ms +[2025-08-22 10:54:00] [Rank 0] step:8021/10000 train_time:757405ms step_avg:94.43ms +[2025-08-22 10:54:02] [Rank 0] step:8041/10000 train_time:759385ms step_avg:94.44ms +[2025-08-22 10:54:02] [Rank 0] step:8041/10000 train_time:759385ms step_avg:94.44ms +[2025-08-22 10:54:04] [Rank 0] step:8061/10000 train_time:761359ms step_avg:94.45ms +[2025-08-22 10:54:04] [Rank 0] step:8061/10000 train_time:761359ms step_avg:94.45ms +[2025-08-22 10:54:06] [Rank 0] step:8081/10000 train_time:763322ms step_avg:94.46ms +[2025-08-22 10:54:06] [Rank 0] step:8081/10000 train_time:763322ms step_avg:94.46ms +[2025-08-22 10:54:08] [Rank 0] step:8101/10000 train_time:765303ms step_avg:94.47ms +[2025-08-22 10:54:08] [Rank 0] step:8101/10000 train_time:765303ms step_avg:94.47ms +[2025-08-22 10:54:10] [Rank 0] step:8121/10000 train_time:767278ms step_avg:94.48ms +[2025-08-22 10:54:10] [Rank 0] step:8121/10000 train_time:767278ms step_avg:94.48ms +[2025-08-22 10:54:12] [Rank 0] step:8141/10000 train_time:769491ms step_avg:94.52ms +[2025-08-22 10:54:12] [Rank 0] step:8141/10000 train_time:769491ms step_avg:94.52ms +[2025-08-22 10:54:14] [Rank 0] step:8161/10000 train_time:771482ms step_avg:94.53ms +[2025-08-22 10:54:14] [Rank 0] step:8161/10000 train_time:771482ms step_avg:94.53ms +[2025-08-22 10:54:16] [Rank 0] step:8181/10000 train_time:773492ms step_avg:94.55ms +[2025-08-22 10:54:16] [Rank 0] step:8181/10000 train_time:773492ms step_avg:94.55ms +[2025-08-22 10:54:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:54:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:54:32] [Rank 0] PRINT: step:8200/10000 val_loss:3.8507 svd_entropy: attn_qk:H=0.6113,top10E=0.48,eRank=74.9,q75/q25=122.66 attn_vo:H=0.6368,top10E=0.45,eRank=114.0,q75/q25=59.81 mlp_w1:H=0.7350,top10E=0.22,eRank=173.7,q75/q25=39.75 mlp_w2:H=0.7413,top10E=0.22,eRank=181.9,q75/q25=42.42 vo_prod:H=0.4867,top10E=0.68,eRank=39.8,q75/q25=2577.96 train_time:775529ms step_avg:94.58ms +[2025-08-22 10:54:32] [Rank 0] PRINT: step:8200/10000 val_loss:3.8507 svd_entropy: attn_qk:H=0.6113,top10E=0.48,eRank=74.9,q75/q25=122.66 attn_vo:H=0.6368,top10E=0.45,eRank=114.0,q75/q25=59.81 mlp_w1:H=0.7350,top10E=0.22,eRank=173.7,q75/q25=39.75 mlp_w2:H=0.7413,top10E=0.22,eRank=181.9,q75/q25=42.42 vo_prod:H=0.4867,top10E=0.68,eRank=39.8,q75/q25=2577.96 train_time:775529ms step_avg:94.58ms +[2025-08-22 10:54:32] [Rank 0] step:8201/10000 train_time:775546ms step_avg:94.57ms +[2025-08-22 10:54:32] [Rank 0] step:8201/10000 train_time:775546ms step_avg:94.57ms +[2025-08-22 10:54:34] [Rank 0] step:8221/10000 train_time:777530ms step_avg:94.58ms +[2025-08-22 10:54:34] [Rank 0] step:8221/10000 train_time:777530ms step_avg:94.58ms +[2025-08-22 10:54:36] [Rank 0] step:8241/10000 train_time:779528ms step_avg:94.59ms +[2025-08-22 10:54:36] [Rank 0] step:8241/10000 train_time:779528ms step_avg:94.59ms +[2025-08-22 10:54:38] [Rank 0] step:8261/10000 train_time:781530ms step_avg:94.60ms +[2025-08-22 10:54:38] [Rank 0] step:8261/10000 train_time:781530ms step_avg:94.60ms +[2025-08-22 10:54:40] [Rank 0] step:8281/10000 train_time:783524ms step_avg:94.62ms +[2025-08-22 10:54:40] [Rank 0] step:8281/10000 train_time:783524ms step_avg:94.62ms +[2025-08-22 10:54:42] [Rank 0] step:8301/10000 train_time:785518ms step_avg:94.63ms +[2025-08-22 10:54:42] [Rank 0] step:8301/10000 train_time:785518ms step_avg:94.63ms +[2025-08-22 10:54:44] [Rank 0] step:8321/10000 train_time:787510ms step_avg:94.64ms +[2025-08-22 10:54:44] [Rank 0] step:8321/10000 train_time:787510ms step_avg:94.64ms +[2025-08-22 10:54:46] [Rank 0] step:8341/10000 train_time:789513ms step_avg:94.65ms +[2025-08-22 10:54:46] [Rank 0] step:8341/10000 train_time:789513ms step_avg:94.65ms +[2025-08-22 10:54:48] [Rank 0] step:8361/10000 train_time:791509ms step_avg:94.67ms +[2025-08-22 10:54:48] [Rank 0] step:8361/10000 train_time:791509ms step_avg:94.67ms +[2025-08-22 10:54:50] [Rank 0] step:8381/10000 train_time:793503ms step_avg:94.68ms +[2025-08-22 10:54:50] [Rank 0] step:8381/10000 train_time:793503ms step_avg:94.68ms +[2025-08-22 10:54:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:54:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:55:05] [Rank 0] PRINT: step:8400/10000 val_loss:3.8373 svd_entropy: attn_qk:H=0.6124,top10E=0.48,eRank=75.4,q75/q25=122.15 attn_vo:H=0.6382,top10E=0.45,eRank=114.8,q75/q25=60.34 mlp_w1:H=0.7358,top10E=0.22,eRank=174.5,q75/q25=39.57 mlp_w2:H=0.7421,top10E=0.22,eRank=182.5,q75/q25=43.36 vo_prod:H=0.4883,top10E=0.68,eRank=40.2,q75/q25=2703.22 train_time:795511ms step_avg:94.70ms +[2025-08-22 10:55:05] [Rank 0] PRINT: step:8400/10000 val_loss:3.8373 svd_entropy: attn_qk:H=0.6124,top10E=0.48,eRank=75.4,q75/q25=122.15 attn_vo:H=0.6382,top10E=0.45,eRank=114.8,q75/q25=60.34 mlp_w1:H=0.7358,top10E=0.22,eRank=174.5,q75/q25=39.57 mlp_w2:H=0.7421,top10E=0.22,eRank=182.5,q75/q25=43.36 vo_prod:H=0.4883,top10E=0.68,eRank=40.2,q75/q25=2703.22 train_time:795511ms step_avg:94.70ms +[2025-08-22 10:55:06] [Rank 0] step:8401/10000 train_time:795529ms step_avg:94.69ms +[2025-08-22 10:55:06] [Rank 0] step:8401/10000 train_time:795529ms step_avg:94.69ms +[2025-08-22 10:55:08] [Rank 0] step:8421/10000 train_time:797513ms step_avg:94.71ms +[2025-08-22 10:55:08] [Rank 0] step:8421/10000 train_time:797513ms step_avg:94.71ms +[2025-08-22 10:55:10] [Rank 0] step:8441/10000 train_time:799504ms step_avg:94.72ms +[2025-08-22 10:55:10] [Rank 0] step:8441/10000 train_time:799504ms step_avg:94.72ms +[2025-08-22 10:55:12] [Rank 0] step:8461/10000 train_time:801494ms step_avg:94.73ms +[2025-08-22 10:55:12] [Rank 0] step:8461/10000 train_time:801494ms step_avg:94.73ms +[2025-08-22 10:55:14] [Rank 0] step:8481/10000 train_time:803493ms step_avg:94.74ms +[2025-08-22 10:55:14] [Rank 0] step:8481/10000 train_time:803493ms step_avg:94.74ms +[2025-08-22 10:55:16] [Rank 0] step:8501/10000 train_time:805509ms step_avg:94.75ms +[2025-08-22 10:55:16] [Rank 0] step:8501/10000 train_time:805509ms step_avg:94.75ms +[2025-08-22 10:55:18] [Rank 0] step:8521/10000 train_time:807505ms step_avg:94.77ms +[2025-08-22 10:55:18] [Rank 0] step:8521/10000 train_time:807505ms step_avg:94.77ms +[2025-08-22 10:55:20] [Rank 0] step:8541/10000 train_time:809516ms step_avg:94.78ms +[2025-08-22 10:55:20] [Rank 0] step:8541/10000 train_time:809516ms step_avg:94.78ms +[2025-08-22 10:55:22] [Rank 0] step:8561/10000 train_time:811515ms step_avg:94.79ms +[2025-08-22 10:55:22] [Rank 0] step:8561/10000 train_time:811515ms step_avg:94.79ms +[2025-08-22 10:55:24] [Rank 0] step:8581/10000 train_time:813513ms step_avg:94.80ms +[2025-08-22 10:55:24] [Rank 0] step:8581/10000 train_time:813513ms step_avg:94.80ms +[2025-08-22 10:55:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:55:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:55:39] [Rank 0] PRINT: step:8600/10000 val_loss:3.8278 svd_entropy: attn_qk:H=0.6134,top10E=0.48,eRank=75.8,q75/q25=121.06 attn_vo:H=0.6392,top10E=0.44,eRank=115.3,q75/q25=60.35 mlp_w1:H=0.7365,top10E=0.22,eRank=175.1,q75/q25=39.51 mlp_w2:H=0.7427,top10E=0.22,eRank=183.1,q75/q25=44.40 vo_prod:H=0.4892,top10E=0.68,eRank=40.4,q75/q25=2687.69 train_time:815520ms step_avg:94.83ms +[2025-08-22 10:55:39] [Rank 0] PRINT: step:8600/10000 val_loss:3.8278 svd_entropy: attn_qk:H=0.6134,top10E=0.48,eRank=75.8,q75/q25=121.06 attn_vo:H=0.6392,top10E=0.44,eRank=115.3,q75/q25=60.35 mlp_w1:H=0.7365,top10E=0.22,eRank=175.1,q75/q25=39.51 mlp_w2:H=0.7427,top10E=0.22,eRank=183.1,q75/q25=44.40 vo_prod:H=0.4892,top10E=0.68,eRank=40.4,q75/q25=2687.69 train_time:815520ms step_avg:94.83ms +[2025-08-22 10:55:39] [Rank 0] step:8601/10000 train_time:815537ms step_avg:94.82ms +[2025-08-22 10:55:39] [Rank 0] step:8601/10000 train_time:815537ms step_avg:94.82ms +[2025-08-22 10:55:41] [Rank 0] step:8621/10000 train_time:817514ms step_avg:94.83ms +[2025-08-22 10:55:41] [Rank 0] step:8621/10000 train_time:817514ms step_avg:94.83ms +[2025-08-22 10:55:43] [Rank 0] step:8641/10000 train_time:819502ms step_avg:94.84ms +[2025-08-22 10:55:43] [Rank 0] step:8641/10000 train_time:819502ms step_avg:94.84ms +[2025-08-22 10:55:45] [Rank 0] step:8661/10000 train_time:821495ms step_avg:94.85ms +[2025-08-22 10:55:45] [Rank 0] step:8661/10000 train_time:821495ms step_avg:94.85ms +[2025-08-22 10:55:47] [Rank 0] step:8681/10000 train_time:823495ms step_avg:94.86ms +[2025-08-22 10:55:47] [Rank 0] step:8681/10000 train_time:823495ms step_avg:94.86ms +[2025-08-22 10:55:49] [Rank 0] step:8701/10000 train_time:825484ms step_avg:94.87ms +[2025-08-22 10:55:49] [Rank 0] step:8701/10000 train_time:825484ms step_avg:94.87ms +[2025-08-22 10:55:51] [Rank 0] step:8721/10000 train_time:827483ms step_avg:94.88ms +[2025-08-22 10:55:51] [Rank 0] step:8721/10000 train_time:827483ms step_avg:94.88ms +[2025-08-22 10:55:53] [Rank 0] step:8741/10000 train_time:829473ms step_avg:94.89ms +[2025-08-22 10:55:53] [Rank 0] step:8741/10000 train_time:829473ms step_avg:94.89ms +[2025-08-22 10:55:55] [Rank 0] step:8761/10000 train_time:831473ms step_avg:94.91ms +[2025-08-22 10:55:55] [Rank 0] step:8761/10000 train_time:831473ms step_avg:94.91ms +[2025-08-22 10:55:57] [Rank 0] step:8781/10000 train_time:833475ms step_avg:94.92ms +[2025-08-22 10:55:57] [Rank 0] step:8781/10000 train_time:833475ms step_avg:94.92ms +[2025-08-22 10:55:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:55:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:56:13] [Rank 0] PRINT: step:8800/10000 val_loss:3.8161 svd_entropy: attn_qk:H=0.6142,top10E=0.48,eRank=76.2,q75/q25=121.30 attn_vo:H=0.6402,top10E=0.44,eRank=115.9,q75/q25=60.67 mlp_w1:H=0.7372,top10E=0.22,eRank=175.7,q75/q25=39.82 mlp_w2:H=0.7434,top10E=0.22,eRank=183.7,q75/q25=45.06 vo_prod:H=0.4901,top10E=0.67,eRank=40.7,q75/q25=2784.83 train_time:835490ms step_avg:94.94ms +[2025-08-22 10:56:13] [Rank 0] PRINT: step:8800/10000 val_loss:3.8161 svd_entropy: attn_qk:H=0.6142,top10E=0.48,eRank=76.2,q75/q25=121.30 attn_vo:H=0.6402,top10E=0.44,eRank=115.9,q75/q25=60.67 mlp_w1:H=0.7372,top10E=0.22,eRank=175.7,q75/q25=39.82 mlp_w2:H=0.7434,top10E=0.22,eRank=183.7,q75/q25=45.06 vo_prod:H=0.4901,top10E=0.67,eRank=40.7,q75/q25=2784.83 train_time:835490ms step_avg:94.94ms +[2025-08-22 10:56:13] [Rank 0] step:8801/10000 train_time:835507ms step_avg:94.93ms +[2025-08-22 10:56:13] [Rank 0] step:8801/10000 train_time:835507ms step_avg:94.93ms +[2025-08-22 10:56:15] [Rank 0] step:8821/10000 train_time:837479ms step_avg:94.94ms +[2025-08-22 10:56:15] [Rank 0] step:8821/10000 train_time:837479ms step_avg:94.94ms +[2025-08-22 10:56:17] [Rank 0] step:8841/10000 train_time:839495ms step_avg:94.95ms +[2025-08-22 10:56:17] [Rank 0] step:8841/10000 train_time:839495ms step_avg:94.95ms +[2025-08-22 10:56:19] [Rank 0] step:8861/10000 train_time:841487ms step_avg:94.97ms +[2025-08-22 10:56:19] [Rank 0] step:8861/10000 train_time:841487ms step_avg:94.97ms +[2025-08-22 10:56:21] [Rank 0] step:8881/10000 train_time:843509ms step_avg:94.98ms +[2025-08-22 10:56:21] [Rank 0] step:8881/10000 train_time:843509ms step_avg:94.98ms +[2025-08-22 10:56:23] [Rank 0] step:8901/10000 train_time:845511ms step_avg:94.99ms +[2025-08-22 10:56:23] [Rank 0] step:8901/10000 train_time:845511ms step_avg:94.99ms +[2025-08-22 10:56:25] [Rank 0] step:8921/10000 train_time:847526ms step_avg:95.00ms +[2025-08-22 10:56:25] [Rank 0] step:8921/10000 train_time:847526ms step_avg:95.00ms +[2025-08-22 10:56:27] [Rank 0] step:8941/10000 train_time:849533ms step_avg:95.02ms +[2025-08-22 10:56:27] [Rank 0] step:8941/10000 train_time:849533ms step_avg:95.02ms +[2025-08-22 10:56:29] [Rank 0] step:8961/10000 train_time:851536ms step_avg:95.03ms +[2025-08-22 10:56:29] [Rank 0] step:8961/10000 train_time:851536ms step_avg:95.03ms +[2025-08-22 10:56:31] [Rank 0] step:8981/10000 train_time:853539ms step_avg:95.04ms +[2025-08-22 10:56:31] [Rank 0] step:8981/10000 train_time:853539ms step_avg:95.04ms +[2025-08-22 10:56:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:56:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:56:47] [Rank 0] PRINT: step:9000/10000 val_loss:3.8059 svd_entropy: attn_qk:H=0.6150,top10E=0.48,eRank=76.5,q75/q25=120.94 attn_vo:H=0.6412,top10E=0.44,eRank=116.4,q75/q25=61.02 mlp_w1:H=0.7378,top10E=0.21,eRank=176.3,q75/q25=39.76 mlp_w2:H=0.7439,top10E=0.22,eRank=184.2,q75/q25=45.75 vo_prod:H=0.4912,top10E=0.67,eRank=40.9,q75/q25=2832.00 train_time:855593ms step_avg:95.07ms +[2025-08-22 10:56:47] [Rank 0] PRINT: step:9000/10000 val_loss:3.8059 svd_entropy: attn_qk:H=0.6150,top10E=0.48,eRank=76.5,q75/q25=120.94 attn_vo:H=0.6412,top10E=0.44,eRank=116.4,q75/q25=61.02 mlp_w1:H=0.7378,top10E=0.21,eRank=176.3,q75/q25=39.76 mlp_w2:H=0.7439,top10E=0.22,eRank=184.2,q75/q25=45.75 vo_prod:H=0.4912,top10E=0.67,eRank=40.9,q75/q25=2832.00 train_time:855593ms step_avg:95.07ms +[2025-08-22 10:56:47] [Rank 0] step:9001/10000 train_time:855611ms step_avg:95.06ms +[2025-08-22 10:56:47] [Rank 0] step:9001/10000 train_time:855611ms step_avg:95.06ms +[2025-08-22 10:56:49] [Rank 0] step:9021/10000 train_time:857592ms step_avg:95.07ms +[2025-08-22 10:56:49] [Rank 0] step:9021/10000 train_time:857592ms step_avg:95.07ms +[2025-08-22 10:56:51] [Rank 0] step:9041/10000 train_time:859589ms step_avg:95.08ms +[2025-08-22 10:56:51] [Rank 0] step:9041/10000 train_time:859589ms step_avg:95.08ms +[2025-08-22 10:56:53] [Rank 0] step:9061/10000 train_time:861599ms step_avg:95.09ms +[2025-08-22 10:56:53] [Rank 0] step:9061/10000 train_time:861599ms step_avg:95.09ms +[2025-08-22 10:56:55] [Rank 0] step:9081/10000 train_time:863604ms step_avg:95.10ms +[2025-08-22 10:56:55] [Rank 0] step:9081/10000 train_time:863604ms step_avg:95.10ms +[2025-08-22 10:56:57] [Rank 0] step:9101/10000 train_time:865618ms step_avg:95.11ms +[2025-08-22 10:56:57] [Rank 0] step:9101/10000 train_time:865618ms step_avg:95.11ms +[2025-08-22 10:56:59] [Rank 0] step:9121/10000 train_time:867620ms step_avg:95.12ms +[2025-08-22 10:56:59] [Rank 0] step:9121/10000 train_time:867620ms step_avg:95.12ms +[2025-08-22 10:57:01] [Rank 0] step:9141/10000 train_time:869611ms step_avg:95.13ms +[2025-08-22 10:57:01] [Rank 0] step:9141/10000 train_time:869611ms step_avg:95.13ms +[2025-08-22 10:57:03] [Rank 0] step:9161/10000 train_time:871602ms step_avg:95.14ms +[2025-08-22 10:57:03] [Rank 0] step:9161/10000 train_time:871602ms step_avg:95.14ms +[2025-08-22 10:57:05] [Rank 0] step:9181/10000 train_time:873636ms step_avg:95.16ms +[2025-08-22 10:57:05] [Rank 0] step:9181/10000 train_time:873636ms step_avg:95.16ms +[2025-08-22 10:57:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:57:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:57:20] [Rank 0] PRINT: step:9200/10000 val_loss:3.7976 svd_entropy: attn_qk:H=0.6156,top10E=0.48,eRank=76.8,q75/q25=121.06 attn_vo:H=0.6420,top10E=0.44,eRank=116.9,q75/q25=60.89 mlp_w1:H=0.7383,top10E=0.21,eRank=176.7,q75/q25=39.82 mlp_w2:H=0.7444,top10E=0.22,eRank=184.6,q75/q25=46.55 vo_prod:H=0.4922,top10E=0.67,eRank=41.2,q75/q25=2875.74 train_time:875646ms step_avg:95.18ms +[2025-08-22 10:57:20] [Rank 0] PRINT: step:9200/10000 val_loss:3.7976 svd_entropy: attn_qk:H=0.6156,top10E=0.48,eRank=76.8,q75/q25=121.06 attn_vo:H=0.6420,top10E=0.44,eRank=116.9,q75/q25=60.89 mlp_w1:H=0.7383,top10E=0.21,eRank=176.7,q75/q25=39.82 mlp_w2:H=0.7444,top10E=0.22,eRank=184.6,q75/q25=46.55 vo_prod:H=0.4922,top10E=0.67,eRank=41.2,q75/q25=2875.74 train_time:875646ms step_avg:95.18ms +[2025-08-22 10:57:20] [Rank 0] step:9201/10000 train_time:875664ms step_avg:95.17ms +[2025-08-22 10:57:20] [Rank 0] step:9201/10000 train_time:875664ms step_avg:95.17ms +[2025-08-22 10:57:22] [Rank 0] step:9221/10000 train_time:877654ms step_avg:95.18ms +[2025-08-22 10:57:22] [Rank 0] step:9221/10000 train_time:877654ms step_avg:95.18ms +[2025-08-22 10:57:24] [Rank 0] step:9241/10000 train_time:879660ms step_avg:95.19ms +[2025-08-22 10:57:24] [Rank 0] step:9241/10000 train_time:879660ms step_avg:95.19ms +[2025-08-22 10:57:26] [Rank 0] step:9261/10000 train_time:881666ms step_avg:95.20ms +[2025-08-22 10:57:26] [Rank 0] step:9261/10000 train_time:881666ms step_avg:95.20ms +[2025-08-22 10:57:28] [Rank 0] step:9281/10000 train_time:883656ms step_avg:95.21ms +[2025-08-22 10:57:28] [Rank 0] step:9281/10000 train_time:883656ms step_avg:95.21ms +[2025-08-22 10:57:30] [Rank 0] step:9301/10000 train_time:885648ms step_avg:95.22ms +[2025-08-22 10:57:30] [Rank 0] step:9301/10000 train_time:885648ms step_avg:95.22ms +[2025-08-22 10:57:32] [Rank 0] step:9321/10000 train_time:887651ms step_avg:95.23ms +[2025-08-22 10:57:32] [Rank 0] step:9321/10000 train_time:887651ms step_avg:95.23ms +[2025-08-22 10:57:34] [Rank 0] step:9341/10000 train_time:889649ms step_avg:95.24ms +[2025-08-22 10:57:34] [Rank 0] step:9341/10000 train_time:889649ms step_avg:95.24ms +[2025-08-22 10:57:36] [Rank 0] step:9361/10000 train_time:891652ms step_avg:95.25ms +[2025-08-22 10:57:36] [Rank 0] step:9361/10000 train_time:891652ms step_avg:95.25ms +[2025-08-22 10:57:38] [Rank 0] step:9381/10000 train_time:893757ms step_avg:95.27ms +[2025-08-22 10:57:38] [Rank 0] step:9381/10000 train_time:893757ms step_avg:95.27ms +[2025-08-22 10:57:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:57:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:57:54] [Rank 0] PRINT: step:9400/10000 val_loss:3.7892 svd_entropy: attn_qk:H=0.6161,top10E=0.48,eRank=77.0,q75/q25=121.45 attn_vo:H=0.6427,top10E=0.44,eRank=117.2,q75/q25=61.27 mlp_w1:H=0.7387,top10E=0.21,eRank=177.0,q75/q25=39.97 mlp_w2:H=0.7448,top10E=0.22,eRank=184.9,q75/q25=47.14 vo_prod:H=0.4929,top10E=0.67,eRank=41.4,q75/q25=2930.65 train_time:895827ms step_avg:95.30ms +[2025-08-22 10:57:54] [Rank 0] PRINT: step:9400/10000 val_loss:3.7892 svd_entropy: attn_qk:H=0.6161,top10E=0.48,eRank=77.0,q75/q25=121.45 attn_vo:H=0.6427,top10E=0.44,eRank=117.2,q75/q25=61.27 mlp_w1:H=0.7387,top10E=0.21,eRank=177.0,q75/q25=39.97 mlp_w2:H=0.7448,top10E=0.22,eRank=184.9,q75/q25=47.14 vo_prod:H=0.4929,top10E=0.67,eRank=41.4,q75/q25=2930.65 train_time:895827ms step_avg:95.30ms +[2025-08-22 10:57:54] [Rank 0] step:9401/10000 train_time:895844ms step_avg:95.29ms +[2025-08-22 10:57:54] [Rank 0] step:9401/10000 train_time:895844ms step_avg:95.29ms +[2025-08-22 10:57:56] [Rank 0] step:9421/10000 train_time:897821ms step_avg:95.30ms +[2025-08-22 10:57:56] [Rank 0] step:9421/10000 train_time:897821ms step_avg:95.30ms +[2025-08-22 10:57:58] [Rank 0] step:9441/10000 train_time:899820ms step_avg:95.31ms +[2025-08-22 10:57:58] [Rank 0] step:9441/10000 train_time:899820ms step_avg:95.31ms +[2025-08-22 10:58:00] [Rank 0] step:9461/10000 train_time:901825ms step_avg:95.32ms +[2025-08-22 10:58:00] [Rank 0] step:9461/10000 train_time:901825ms step_avg:95.32ms +[2025-08-22 10:58:02] [Rank 0] step:9481/10000 train_time:903831ms step_avg:95.33ms +[2025-08-22 10:58:02] [Rank 0] step:9481/10000 train_time:903831ms step_avg:95.33ms +[2025-08-22 10:58:04] [Rank 0] step:9501/10000 train_time:905840ms step_avg:95.34ms +[2025-08-22 10:58:04] [Rank 0] step:9501/10000 train_time:905840ms step_avg:95.34ms +[2025-08-22 10:58:06] [Rank 0] step:9521/10000 train_time:907835ms step_avg:95.35ms +[2025-08-22 10:58:06] [Rank 0] step:9521/10000 train_time:907835ms step_avg:95.35ms +[2025-08-22 10:58:08] [Rank 0] step:9541/10000 train_time:909835ms step_avg:95.36ms +[2025-08-22 10:58:08] [Rank 0] step:9541/10000 train_time:909835ms step_avg:95.36ms +[2025-08-22 10:58:10] [Rank 0] step:9561/10000 train_time:911830ms step_avg:95.37ms +[2025-08-22 10:58:10] [Rank 0] step:9561/10000 train_time:911830ms step_avg:95.37ms +[2025-08-22 10:58:12] [Rank 0] step:9581/10000 train_time:913837ms step_avg:95.38ms +[2025-08-22 10:58:12] [Rank 0] step:9581/10000 train_time:913837ms step_avg:95.38ms +[2025-08-22 10:58:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:58:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:58:27] [Rank 0] PRINT: step:9600/10000 val_loss:3.7808 svd_entropy: attn_qk:H=0.6165,top10E=0.48,eRank=77.2,q75/q25=121.80 attn_vo:H=0.6433,top10E=0.44,eRank=117.6,q75/q25=61.41 mlp_w1:H=0.7391,top10E=0.21,eRank=177.3,q75/q25=39.96 mlp_w2:H=0.7451,top10E=0.22,eRank=185.3,q75/q25=47.54 vo_prod:H=0.4938,top10E=0.67,eRank=41.6,q75/q25=2982.67 train_time:915874ms step_avg:95.40ms +[2025-08-22 10:58:27] [Rank 0] PRINT: step:9600/10000 val_loss:3.7808 svd_entropy: attn_qk:H=0.6165,top10E=0.48,eRank=77.2,q75/q25=121.80 attn_vo:H=0.6433,top10E=0.44,eRank=117.6,q75/q25=61.41 mlp_w1:H=0.7391,top10E=0.21,eRank=177.3,q75/q25=39.96 mlp_w2:H=0.7451,top10E=0.22,eRank=185.3,q75/q25=47.54 vo_prod:H=0.4938,top10E=0.67,eRank=41.6,q75/q25=2982.67 train_time:915874ms step_avg:95.40ms +[2025-08-22 10:58:27] [Rank 0] step:9601/10000 train_time:915892ms step_avg:95.40ms +[2025-08-22 10:58:27] [Rank 0] step:9601/10000 train_time:915892ms step_avg:95.40ms +[2025-08-22 10:58:29] [Rank 0] step:9621/10000 train_time:917897ms step_avg:95.41ms +[2025-08-22 10:58:29] [Rank 0] step:9621/10000 train_time:917897ms step_avg:95.41ms +[2025-08-22 10:58:31] [Rank 0] step:9641/10000 train_time:919901ms step_avg:95.42ms +[2025-08-22 10:58:31] [Rank 0] step:9641/10000 train_time:919901ms step_avg:95.42ms +[2025-08-22 10:58:33] [Rank 0] step:9661/10000 train_time:921932ms step_avg:95.43ms +[2025-08-22 10:58:33] [Rank 0] step:9661/10000 train_time:921932ms step_avg:95.43ms +[2025-08-22 10:58:35] [Rank 0] step:9681/10000 train_time:923959ms step_avg:95.44ms +[2025-08-22 10:58:35] [Rank 0] step:9681/10000 train_time:923959ms step_avg:95.44ms +[2025-08-22 10:58:37] [Rank 0] step:9701/10000 train_time:925997ms step_avg:95.45ms +[2025-08-22 10:58:37] [Rank 0] step:9701/10000 train_time:925997ms step_avg:95.45ms +[2025-08-22 10:58:39] [Rank 0] step:9721/10000 train_time:928120ms step_avg:95.48ms +[2025-08-22 10:58:39] [Rank 0] step:9721/10000 train_time:928120ms step_avg:95.48ms +[2025-08-22 10:58:41] [Rank 0] step:9741/10000 train_time:930115ms step_avg:95.48ms +[2025-08-22 10:58:41] [Rank 0] step:9741/10000 train_time:930115ms step_avg:95.48ms +[2025-08-22 10:58:44] [Rank 0] step:9761/10000 train_time:932205ms step_avg:95.50ms +[2025-08-22 10:58:44] [Rank 0] step:9761/10000 train_time:932205ms step_avg:95.50ms +[2025-08-22 10:58:46] [Rank 0] step:9781/10000 train_time:934246ms step_avg:95.52ms +[2025-08-22 10:58:46] [Rank 0] step:9781/10000 train_time:934246ms step_avg:95.52ms +[2025-08-22 10:58:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:58:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:59:01] [Rank 0] PRINT: step:9800/10000 val_loss:3.7728 svd_entropy: attn_qk:H=0.6168,top10E=0.47,eRank=77.4,q75/q25=121.93 attn_vo:H=0.6437,top10E=0.44,eRank=117.8,q75/q25=61.70 mlp_w1:H=0.7393,top10E=0.21,eRank=177.6,q75/q25=39.95 mlp_w2:H=0.7453,top10E=0.22,eRank=185.5,q75/q25=48.02 vo_prod:H=0.4942,top10E=0.67,eRank=41.7,q75/q25=3014.88 train_time:936308ms step_avg:95.54ms +[2025-08-22 10:59:01] [Rank 0] PRINT: step:9800/10000 val_loss:3.7728 svd_entropy: attn_qk:H=0.6168,top10E=0.47,eRank=77.4,q75/q25=121.93 attn_vo:H=0.6437,top10E=0.44,eRank=117.8,q75/q25=61.70 mlp_w1:H=0.7393,top10E=0.21,eRank=177.6,q75/q25=39.95 mlp_w2:H=0.7453,top10E=0.22,eRank=185.5,q75/q25=48.02 vo_prod:H=0.4942,top10E=0.67,eRank=41.7,q75/q25=3014.88 train_time:936308ms step_avg:95.54ms +[2025-08-22 10:59:01] [Rank 0] step:9801/10000 train_time:936325ms step_avg:95.53ms +[2025-08-22 10:59:01] [Rank 0] step:9801/10000 train_time:936325ms step_avg:95.53ms +[2025-08-22 10:59:03] [Rank 0] step:9821/10000 train_time:938353ms step_avg:95.55ms +[2025-08-22 10:59:03] [Rank 0] step:9821/10000 train_time:938353ms step_avg:95.55ms +[2025-08-22 10:59:05] [Rank 0] step:9841/10000 train_time:940380ms step_avg:95.56ms +[2025-08-22 10:59:05] [Rank 0] step:9841/10000 train_time:940380ms step_avg:95.56ms +[2025-08-22 10:59:07] [Rank 0] step:9861/10000 train_time:942391ms step_avg:95.57ms +[2025-08-22 10:59:07] [Rank 0] step:9861/10000 train_time:942391ms step_avg:95.57ms +[2025-08-22 10:59:09] [Rank 0] step:9881/10000 train_time:944404ms step_avg:95.58ms +[2025-08-22 10:59:09] [Rank 0] step:9881/10000 train_time:944404ms step_avg:95.58ms +[2025-08-22 10:59:11] [Rank 0] step:9901/10000 train_time:946447ms step_avg:95.59ms +[2025-08-22 10:59:11] [Rank 0] step:9901/10000 train_time:946447ms step_avg:95.59ms +[2025-08-22 10:59:13] [Rank 0] step:9921/10000 train_time:948468ms step_avg:95.60ms +[2025-08-22 10:59:13] [Rank 0] step:9921/10000 train_time:948468ms step_avg:95.60ms +[2025-08-22 10:59:15] [Rank 0] step:9941/10000 train_time:950502ms step_avg:95.61ms +[2025-08-22 10:59:15] [Rank 0] step:9941/10000 train_time:950502ms step_avg:95.61ms +[2025-08-22 10:59:17] [Rank 0] step:9961/10000 train_time:952519ms step_avg:95.62ms +[2025-08-22 10:59:17] [Rank 0] step:9961/10000 train_time:952519ms step_avg:95.62ms +[2025-08-22 10:59:19] [Rank 0] step:9981/10000 train_time:954549ms step_avg:95.64ms +[2025-08-22 10:59:19] [Rank 0] step:9981/10000 train_time:954549ms step_avg:95.64ms +[2025-08-22 10:59:21] [Rank 0] step:10000/10000 train_time:956481ms step_avg:95.65ms +[2025-08-22 10:59:21] [Rank 0] step:10000/10000 train_time:956481ms step_avg:95.65ms +[2025-08-22 10:59:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:59:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 10:59:35] [Rank 0] PRINT: step:10000/10000 val_loss:3.7664 svd_entropy: attn_qk:H=0.6170,top10E=0.47,eRank=77.5,q75/q25=122.00 attn_vo:H=0.6440,top10E=0.44,eRank=117.9,q75/q25=61.74 mlp_w1:H=0.7395,top10E=0.21,eRank=177.7,q75/q25=39.98 mlp_w2:H=0.7455,top10E=0.22,eRank=185.7,q75/q25=48.36 vo_prod:H=0.4945,top10E=0.67,eRank=41.8,q75/q25=3035.60 train_time:956601ms step_avg:95.66ms +[2025-08-22 10:59:35] [Rank 0] PRINT: step:10000/10000 val_loss:3.7664 svd_entropy: attn_qk:H=0.6170,top10E=0.47,eRank=77.5,q75/q25=122.00 attn_vo:H=0.6440,top10E=0.44,eRank=117.9,q75/q25=61.74 mlp_w1:H=0.7395,top10E=0.21,eRank=177.7,q75/q25=39.98 mlp_w2:H=0.7455,top10E=0.22,eRank=185.7,q75/q25=48.36 vo_prod:H=0.4945,top10E=0.67,eRank=41.8,q75/q25=3035.60 train_time:956601ms step_avg:95.66ms +[2025-08-22 10:59:35] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 10:59:35 2025 --- +[2025-08-22 10:59:35] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 10:59:35 2025 --- +[2025-08-22 10:59:35] [Rank 0] PRINT: Peak memory allocated: 11208 MiB reserved: 17156 MiB +[2025-08-22 10:59:35] [Rank 0] PRINT: Peak memory allocated: 11208 MiB reserved: 17156 MiB diff --git a/logs_svd_gated/mode_4_param_gated_seed_42/config.json b/logs_svd_gated/mode_4_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f9e28a31056a215514eb5c2a5b300adf4fa218f5 --- /dev/null +++ b/logs_svd_gated/mode_4_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 4, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "c28d866a-9804-4aa0-8fde-8feb805eaa4b", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_4_param_gated_seed_42/training_log_c28d866a-9804-4aa0-8fde-8feb805eaa4b.txt b/logs_svd_gated/mode_4_param_gated_seed_42/training_log_c28d866a-9804-4aa0-8fde-8feb805eaa4b.txt new file mode 100644 index 0000000000000000000000000000000000000000..46d7dadee77a3191a31eb626b7b6cfec57170084 --- /dev/null +++ b/logs_svd_gated/mode_4_param_gated_seed_42/training_log_c28d866a-9804-4aa0-8fde-8feb805eaa4b.txt @@ -0,0 +1,2926 @@ +[2025-08-22 15:45:12] [Rank 0] PRINT: --- Script Start: Fri Aug 22 15:45:12 2025 --- +[2025-08-22 15:45:12] [Rank 0] PRINT: --- Script Start: Fri Aug 22 15:45:12 2025 --- +[2025-08-22 15:45:12] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=4, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 15:45:12] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=4, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 15:45:12] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 15:45:12] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 15:45:12] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 15:45:12] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 15:45:12] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_4_param_gated_seed_42 +[2025-08-22 15:45:12] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_4_param_gated_seed_42 +[2025-08-22 15:45:12] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 15:45:12] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 15:45:12] [Rank 0] PRINT: Constructing model... +[2025-08-22 15:45:12] [Rank 0] PRINT: Constructing model... +[2025-08-22 15:45:14] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 15:45:14] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 15:45:14] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 15:45:14] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 15:45:14] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 15:45:14] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 15:45:14] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-08-22 15:45:14] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-08-22 15:45:14] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.05). +[2025-08-22 15:45:14] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.05). +[2025-08-22 15:45:14] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 15:45:14] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 15:45:14] [Rank 0] PRINT: Muon optimizer is active with 36 parameters. +[2025-08-22 15:45:14] [Rank 0] PRINT: Muon optimizer is active with 36 parameters. +[2025-08-22 15:45:14] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 15:45:14] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 15:45:14] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 15:45:14] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 15:45:14] [Rank 0] PRINT: Starting warmup... +[2025-08-22 15:45:14] [Rank 0] PRINT: Starting warmup... +[2025-08-22 15:45:58] [Rank 0] PRINT: Warmup complete. +[2025-08-22 15:45:58] [Rank 0] PRINT: Warmup complete. +[2025-08-22 15:45:58] [Rank 0] PRINT: Starting training... +[2025-08-22 15:45:58] [Rank 0] PRINT: Starting training... +[2025-08-22 15:45:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:45:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:46:16] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 15:46:16] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 15:46:17] [Rank 0] step:21/10000 train_time:1803ms step_avg:85.84ms +[2025-08-22 15:46:17] [Rank 0] step:21/10000 train_time:1803ms step_avg:85.84ms +[2025-08-22 15:46:19] [Rank 0] step:41/10000 train_time:3569ms step_avg:87.04ms +[2025-08-22 15:46:19] [Rank 0] step:41/10000 train_time:3569ms step_avg:87.04ms +[2025-08-22 15:46:21] [Rank 0] step:61/10000 train_time:5332ms step_avg:87.41ms +[2025-08-22 15:46:21] [Rank 0] step:61/10000 train_time:5332ms step_avg:87.41ms +[2025-08-22 15:46:23] [Rank 0] step:81/10000 train_time:7097ms step_avg:87.62ms +[2025-08-22 15:46:23] [Rank 0] step:81/10000 train_time:7097ms step_avg:87.62ms +[2025-08-22 15:46:25] [Rank 0] step:101/10000 train_time:8864ms step_avg:87.76ms +[2025-08-22 15:46:25] [Rank 0] step:101/10000 train_time:8864ms step_avg:87.76ms +[2025-08-22 15:46:26] [Rank 0] step:121/10000 train_time:10688ms step_avg:88.33ms +[2025-08-22 15:46:26] [Rank 0] step:121/10000 train_time:10688ms step_avg:88.33ms +[2025-08-22 15:46:28] [Rank 0] step:141/10000 train_time:12515ms step_avg:88.76ms +[2025-08-22 15:46:28] [Rank 0] step:141/10000 train_time:12515ms step_avg:88.76ms +[2025-08-22 15:46:30] [Rank 0] step:161/10000 train_time:14289ms step_avg:88.75ms +[2025-08-22 15:46:30] [Rank 0] step:161/10000 train_time:14289ms step_avg:88.75ms +[2025-08-22 15:46:32] [Rank 0] step:181/10000 train_time:16064ms step_avg:88.75ms +[2025-08-22 15:46:32] [Rank 0] step:181/10000 train_time:16064ms step_avg:88.75ms +[2025-08-22 15:46:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:46:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:46:47] [Rank 0] PRINT: step:200/10000 val_loss:7.2163 svd_entropy: attn_qk:H=0.3004,top10E=0.88,eRank=12.2,q75/q25=19.74 attn_vo:H=0.2079,top10E=0.97,eRank=4.7,q75/q25=76.80 mlp_w1:H=0.8206,top10E=0.36,eRank=235.4,q75/q25=2.48 mlp_w2:H=0.7503,top10E=0.35,eRank=175.1,q75/q25=10.54 vo_prod:H=0.0605,top10E=1.00,eRank=1.5,q75/q25=540.14 train_time:17852ms step_avg:89.26ms +[2025-08-22 15:46:47] [Rank 0] PRINT: step:200/10000 val_loss:7.2163 svd_entropy: attn_qk:H=0.3004,top10E=0.88,eRank=12.2,q75/q25=19.74 attn_vo:H=0.2079,top10E=0.97,eRank=4.7,q75/q25=76.80 mlp_w1:H=0.8206,top10E=0.36,eRank=235.4,q75/q25=2.48 mlp_w2:H=0.7503,top10E=0.35,eRank=175.1,q75/q25=10.54 vo_prod:H=0.0605,top10E=1.00,eRank=1.5,q75/q25=540.14 train_time:17852ms step_avg:89.26ms +[2025-08-22 15:46:48] [Rank 0] step:201/10000 train_time:17871ms step_avg:88.91ms +[2025-08-22 15:46:48] [Rank 0] step:201/10000 train_time:17871ms step_avg:88.91ms +[2025-08-22 15:46:49] [Rank 0] step:221/10000 train_time:19644ms step_avg:88.89ms +[2025-08-22 15:46:49] [Rank 0] step:221/10000 train_time:19644ms step_avg:88.89ms +[2025-08-22 15:46:51] [Rank 0] step:241/10000 train_time:21418ms step_avg:88.87ms +[2025-08-22 15:46:51] [Rank 0] step:241/10000 train_time:21418ms step_avg:88.87ms +[2025-08-22 15:46:53] [Rank 0] step:261/10000 train_time:23193ms step_avg:88.86ms +[2025-08-22 15:46:53] [Rank 0] step:261/10000 train_time:23193ms step_avg:88.86ms +[2025-08-22 15:46:55] [Rank 0] step:281/10000 train_time:24971ms step_avg:88.86ms +[2025-08-22 15:46:55] [Rank 0] step:281/10000 train_time:24971ms step_avg:88.86ms +[2025-08-22 15:46:56] [Rank 0] step:301/10000 train_time:26748ms step_avg:88.86ms +[2025-08-22 15:46:56] [Rank 0] step:301/10000 train_time:26748ms step_avg:88.86ms +[2025-08-22 15:46:58] [Rank 0] step:321/10000 train_time:28528ms step_avg:88.87ms +[2025-08-22 15:46:58] [Rank 0] step:321/10000 train_time:28528ms step_avg:88.87ms +[2025-08-22 15:47:00] [Rank 0] step:341/10000 train_time:30308ms step_avg:88.88ms +[2025-08-22 15:47:00] [Rank 0] step:341/10000 train_time:30308ms step_avg:88.88ms +[2025-08-22 15:47:02] [Rank 0] step:361/10000 train_time:32088ms step_avg:88.89ms +[2025-08-22 15:47:02] [Rank 0] step:361/10000 train_time:32088ms step_avg:88.89ms +[2025-08-22 15:47:04] [Rank 0] step:381/10000 train_time:33868ms step_avg:88.89ms +[2025-08-22 15:47:04] [Rank 0] step:381/10000 train_time:33868ms step_avg:88.89ms +[2025-08-22 15:47:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:47:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:47:19] [Rank 0] PRINT: step:400/10000 val_loss:5.7263 svd_entropy: attn_qk:H=0.3537,top10E=0.81,eRank=18.9,q75/q25=26.46 attn_vo:H=0.3017,top10E=0.86,eRank=15.7,q75/q25=57.46 mlp_w1:H=0.7391,top10E=0.41,eRank=138.9,q75/q25=2.55 mlp_w2:H=0.6916,top10E=0.41,eRank=115.1,q75/q25=11.39 vo_prod:H=0.1413,top10E=0.97,eRank=3.6,q75/q25=442.58 train_time:35663ms step_avg:89.16ms +[2025-08-22 15:47:19] [Rank 0] PRINT: step:400/10000 val_loss:5.7263 svd_entropy: attn_qk:H=0.3537,top10E=0.81,eRank=18.9,q75/q25=26.46 attn_vo:H=0.3017,top10E=0.86,eRank=15.7,q75/q25=57.46 mlp_w1:H=0.7391,top10E=0.41,eRank=138.9,q75/q25=2.55 mlp_w2:H=0.6916,top10E=0.41,eRank=115.1,q75/q25=11.39 vo_prod:H=0.1413,top10E=0.97,eRank=3.6,q75/q25=442.58 train_time:35663ms step_avg:89.16ms +[2025-08-22 15:47:19] [Rank 0] step:401/10000 train_time:35682ms step_avg:88.98ms +[2025-08-22 15:47:19] [Rank 0] step:401/10000 train_time:35682ms step_avg:88.98ms +[2025-08-22 15:47:21] [Rank 0] step:421/10000 train_time:37449ms step_avg:88.95ms +[2025-08-22 15:47:21] [Rank 0] step:421/10000 train_time:37449ms step_avg:88.95ms +[2025-08-22 15:47:23] [Rank 0] step:441/10000 train_time:39226ms step_avg:88.95ms +[2025-08-22 15:47:23] [Rank 0] step:441/10000 train_time:39226ms step_avg:88.95ms +[2025-08-22 15:47:24] [Rank 0] step:461/10000 train_time:41005ms step_avg:88.95ms +[2025-08-22 15:47:24] [Rank 0] step:461/10000 train_time:41005ms step_avg:88.95ms +[2025-08-22 15:47:26] [Rank 0] step:481/10000 train_time:42786ms step_avg:88.95ms +[2025-08-22 15:47:26] [Rank 0] step:481/10000 train_time:42786ms step_avg:88.95ms +[2025-08-22 15:47:28] [Rank 0] step:501/10000 train_time:44566ms step_avg:88.95ms +[2025-08-22 15:47:28] [Rank 0] step:501/10000 train_time:44566ms step_avg:88.95ms +[2025-08-22 15:47:30] [Rank 0] step:521/10000 train_time:46421ms step_avg:89.10ms +[2025-08-22 15:47:30] [Rank 0] step:521/10000 train_time:46421ms step_avg:89.10ms +[2025-08-22 15:47:32] [Rank 0] step:541/10000 train_time:48284ms step_avg:89.25ms +[2025-08-22 15:47:32] [Rank 0] step:541/10000 train_time:48284ms step_avg:89.25ms +[2025-08-22 15:47:33] [Rank 0] step:561/10000 train_time:50065ms step_avg:89.24ms +[2025-08-22 15:47:33] [Rank 0] step:561/10000 train_time:50065ms step_avg:89.24ms +[2025-08-22 15:47:35] [Rank 0] step:581/10000 train_time:51846ms step_avg:89.24ms +[2025-08-22 15:47:35] [Rank 0] step:581/10000 train_time:51846ms step_avg:89.24ms +[2025-08-22 15:47:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:47:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:47:51] [Rank 0] PRINT: step:600/10000 val_loss:5.3741 svd_entropy: attn_qk:H=0.4630,top10E=0.74,eRank=29.2,q75/q25=43.84 attn_vo:H=0.3388,top10E=0.83,eRank=23.0,q75/q25=47.33 mlp_w1:H=0.6971,top10E=0.42,eRank=109.3,q75/q25=3.20 mlp_w2:H=0.6665,top10E=0.42,eRank=100.7,q75/q25=13.10 vo_prod:H=0.1897,top10E=0.94,eRank=5.9,q75/q25=430.78 train_time:53641ms step_avg:89.40ms +[2025-08-22 15:47:51] [Rank 0] PRINT: step:600/10000 val_loss:5.3741 svd_entropy: attn_qk:H=0.4630,top10E=0.74,eRank=29.2,q75/q25=43.84 attn_vo:H=0.3388,top10E=0.83,eRank=23.0,q75/q25=47.33 mlp_w1:H=0.6971,top10E=0.42,eRank=109.3,q75/q25=3.20 mlp_w2:H=0.6665,top10E=0.42,eRank=100.7,q75/q25=13.10 vo_prod:H=0.1897,top10E=0.94,eRank=5.9,q75/q25=430.78 train_time:53641ms step_avg:89.40ms +[2025-08-22 15:47:51] [Rank 0] step:601/10000 train_time:53660ms step_avg:89.28ms +[2025-08-22 15:47:51] [Rank 0] step:601/10000 train_time:53660ms step_avg:89.28ms +[2025-08-22 15:47:53] [Rank 0] step:621/10000 train_time:55424ms step_avg:89.25ms +[2025-08-22 15:47:53] [Rank 0] step:621/10000 train_time:55424ms step_avg:89.25ms +[2025-08-22 15:47:54] [Rank 0] step:641/10000 train_time:57199ms step_avg:89.23ms +[2025-08-22 15:47:54] [Rank 0] step:641/10000 train_time:57199ms step_avg:89.23ms +[2025-08-22 15:47:56] [Rank 0] step:661/10000 train_time:58975ms step_avg:89.22ms +[2025-08-22 15:47:56] [Rank 0] step:661/10000 train_time:58975ms step_avg:89.22ms +[2025-08-22 15:47:58] [Rank 0] step:681/10000 train_time:60753ms step_avg:89.21ms +[2025-08-22 15:47:58] [Rank 0] step:681/10000 train_time:60753ms step_avg:89.21ms +[2025-08-22 15:48:00] [Rank 0] step:701/10000 train_time:62531ms step_avg:89.20ms +[2025-08-22 15:48:00] [Rank 0] step:701/10000 train_time:62531ms step_avg:89.20ms +[2025-08-22 15:48:02] [Rank 0] step:721/10000 train_time:64310ms step_avg:89.20ms +[2025-08-22 15:48:02] [Rank 0] step:721/10000 train_time:64310ms step_avg:89.20ms +[2025-08-22 15:48:03] [Rank 0] step:741/10000 train_time:66091ms step_avg:89.19ms +[2025-08-22 15:48:03] [Rank 0] step:741/10000 train_time:66091ms step_avg:89.19ms +[2025-08-22 15:48:05] [Rank 0] step:761/10000 train_time:67884ms step_avg:89.20ms +[2025-08-22 15:48:05] [Rank 0] step:761/10000 train_time:67884ms step_avg:89.20ms +[2025-08-22 15:48:07] [Rank 0] step:781/10000 train_time:69678ms step_avg:89.22ms +[2025-08-22 15:48:07] [Rank 0] step:781/10000 train_time:69678ms step_avg:89.22ms +[2025-08-22 15:48:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:48:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:48:23] [Rank 0] PRINT: step:800/10000 val_loss:5.0942 svd_entropy: attn_qk:H=0.4740,top10E=0.72,eRank=32.2,q75/q25=58.81 attn_vo:H=0.3464,top10E=0.82,eRank=28.3,q75/q25=44.53 mlp_w1:H=0.6751,top10E=0.44,eRank=101.0,q75/q25=6.07 mlp_w2:H=0.6537,top10E=0.43,eRank=100.0,q75/q25=17.48 vo_prod:H=0.2120,top10E=0.91,eRank=8.5,q75/q25=475.06 train_time:71485ms step_avg:89.36ms +[2025-08-22 15:48:23] [Rank 0] PRINT: step:800/10000 val_loss:5.0942 svd_entropy: attn_qk:H=0.4740,top10E=0.72,eRank=32.2,q75/q25=58.81 attn_vo:H=0.3464,top10E=0.82,eRank=28.3,q75/q25=44.53 mlp_w1:H=0.6751,top10E=0.44,eRank=101.0,q75/q25=6.07 mlp_w2:H=0.6537,top10E=0.43,eRank=100.0,q75/q25=17.48 vo_prod:H=0.2120,top10E=0.91,eRank=8.5,q75/q25=475.06 train_time:71485ms step_avg:89.36ms +[2025-08-22 15:48:23] [Rank 0] step:801/10000 train_time:71504ms step_avg:89.27ms +[2025-08-22 15:48:23] [Rank 0] step:801/10000 train_time:71504ms step_avg:89.27ms +[2025-08-22 15:48:24] [Rank 0] step:821/10000 train_time:73274ms step_avg:89.25ms +[2025-08-22 15:48:24] [Rank 0] step:821/10000 train_time:73274ms step_avg:89.25ms +[2025-08-22 15:48:26] [Rank 0] step:841/10000 train_time:75059ms step_avg:89.25ms +[2025-08-22 15:48:26] [Rank 0] step:841/10000 train_time:75059ms step_avg:89.25ms +[2025-08-22 15:48:28] [Rank 0] step:861/10000 train_time:76847ms step_avg:89.25ms +[2025-08-22 15:48:28] [Rank 0] step:861/10000 train_time:76847ms step_avg:89.25ms +[2025-08-22 15:48:30] [Rank 0] step:881/10000 train_time:78635ms step_avg:89.26ms +[2025-08-22 15:48:30] [Rank 0] step:881/10000 train_time:78635ms step_avg:89.26ms +[2025-08-22 15:48:32] [Rank 0] step:901/10000 train_time:80426ms step_avg:89.26ms +[2025-08-22 15:48:32] [Rank 0] step:901/10000 train_time:80426ms step_avg:89.26ms +[2025-08-22 15:48:34] [Rank 0] step:921/10000 train_time:82295ms step_avg:89.35ms +[2025-08-22 15:48:34] [Rank 0] step:921/10000 train_time:82295ms step_avg:89.35ms +[2025-08-22 15:48:35] [Rank 0] step:941/10000 train_time:84157ms step_avg:89.43ms +[2025-08-22 15:48:35] [Rank 0] step:941/10000 train_time:84157ms step_avg:89.43ms +[2025-08-22 15:48:37] [Rank 0] step:961/10000 train_time:85950ms step_avg:89.44ms +[2025-08-22 15:48:37] [Rank 0] step:961/10000 train_time:85950ms step_avg:89.44ms +[2025-08-22 15:48:39] [Rank 0] step:981/10000 train_time:87743ms step_avg:89.44ms +[2025-08-22 15:48:39] [Rank 0] step:981/10000 train_time:87743ms step_avg:89.44ms +[2025-08-22 15:48:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:48:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:48:55] [Rank 0] PRINT: step:1000/10000 val_loss:4.9144 svd_entropy: attn_qk:H=0.4864,top10E=0.71,eRank=36.0,q75/q25=72.89 attn_vo:H=0.3696,top10E=0.80,eRank=33.0,q75/q25=57.44 mlp_w1:H=0.6704,top10E=0.44,eRank=104.1,q75/q25=9.79 mlp_w2:H=0.6478,top10E=0.44,eRank=103.9,q75/q25=20.72 vo_prod:H=0.2420,top10E=0.89,eRank=10.8,q75/q25=719.01 train_time:89551ms step_avg:89.55ms +[2025-08-22 15:48:55] [Rank 0] PRINT: step:1000/10000 val_loss:4.9144 svd_entropy: attn_qk:H=0.4864,top10E=0.71,eRank=36.0,q75/q25=72.89 attn_vo:H=0.3696,top10E=0.80,eRank=33.0,q75/q25=57.44 mlp_w1:H=0.6704,top10E=0.44,eRank=104.1,q75/q25=9.79 mlp_w2:H=0.6478,top10E=0.44,eRank=103.9,q75/q25=20.72 vo_prod:H=0.2420,top10E=0.89,eRank=10.8,q75/q25=719.01 train_time:89551ms step_avg:89.55ms +[2025-08-22 15:48:55] [Rank 0] step:1001/10000 train_time:89569ms step_avg:89.48ms +[2025-08-22 15:48:55] [Rank 0] step:1001/10000 train_time:89569ms step_avg:89.48ms +[2025-08-22 15:48:56] [Rank 0] step:1021/10000 train_time:91349ms step_avg:89.47ms +[2025-08-22 15:48:56] [Rank 0] step:1021/10000 train_time:91349ms step_avg:89.47ms +[2025-08-22 15:48:58] [Rank 0] step:1041/10000 train_time:93136ms step_avg:89.47ms +[2025-08-22 15:48:58] [Rank 0] step:1041/10000 train_time:93136ms step_avg:89.47ms +[2025-08-22 15:49:00] [Rank 0] step:1061/10000 train_time:94926ms step_avg:89.47ms +[2025-08-22 15:49:00] [Rank 0] step:1061/10000 train_time:94926ms step_avg:89.47ms +[2025-08-22 15:49:02] [Rank 0] step:1081/10000 train_time:96719ms step_avg:89.47ms +[2025-08-22 15:49:02] [Rank 0] step:1081/10000 train_time:96719ms step_avg:89.47ms +[2025-08-22 15:49:04] [Rank 0] step:1101/10000 train_time:98511ms step_avg:89.47ms +[2025-08-22 15:49:04] [Rank 0] step:1101/10000 train_time:98511ms step_avg:89.47ms +[2025-08-22 15:49:05] [Rank 0] step:1121/10000 train_time:100304ms step_avg:89.48ms +[2025-08-22 15:49:05] [Rank 0] step:1121/10000 train_time:100304ms step_avg:89.48ms +[2025-08-22 15:49:07] [Rank 0] step:1141/10000 train_time:102098ms step_avg:89.48ms +[2025-08-22 15:49:07] [Rank 0] step:1141/10000 train_time:102098ms step_avg:89.48ms +[2025-08-22 15:49:09] [Rank 0] step:1161/10000 train_time:103893ms step_avg:89.49ms +[2025-08-22 15:49:09] [Rank 0] step:1161/10000 train_time:103893ms step_avg:89.49ms +[2025-08-22 15:49:11] [Rank 0] step:1181/10000 train_time:105688ms step_avg:89.49ms +[2025-08-22 15:49:11] [Rank 0] step:1181/10000 train_time:105688ms step_avg:89.49ms +[2025-08-22 15:49:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:49:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:49:26] [Rank 0] PRINT: step:1200/10000 val_loss:4.7568 svd_entropy: attn_qk:H=0.4900,top10E=0.70,eRank=38.1,q75/q25=86.10 attn_vo:H=0.3755,top10E=0.79,eRank=36.7,q75/q25=59.13 mlp_w1:H=0.6594,top10E=0.45,eRank=103.7,q75/q25=12.84 mlp_w2:H=0.6393,top10E=0.45,eRank=105.5,q75/q25=22.49 vo_prod:H=0.2471,top10E=0.88,eRank=12.7,q75/q25=838.13 train_time:107499ms step_avg:89.58ms +[2025-08-22 15:49:26] [Rank 0] PRINT: step:1200/10000 val_loss:4.7568 svd_entropy: attn_qk:H=0.4900,top10E=0.70,eRank=38.1,q75/q25=86.10 attn_vo:H=0.3755,top10E=0.79,eRank=36.7,q75/q25=59.13 mlp_w1:H=0.6594,top10E=0.45,eRank=103.7,q75/q25=12.84 mlp_w2:H=0.6393,top10E=0.45,eRank=105.5,q75/q25=22.49 vo_prod:H=0.2471,top10E=0.88,eRank=12.7,q75/q25=838.13 train_time:107499ms step_avg:89.58ms +[2025-08-22 15:49:27] [Rank 0] step:1201/10000 train_time:107518ms step_avg:89.52ms +[2025-08-22 15:49:27] [Rank 0] step:1201/10000 train_time:107518ms step_avg:89.52ms +[2025-08-22 15:49:28] [Rank 0] step:1221/10000 train_time:109290ms step_avg:89.51ms +[2025-08-22 15:49:28] [Rank 0] step:1221/10000 train_time:109290ms step_avg:89.51ms +[2025-08-22 15:49:30] [Rank 0] step:1241/10000 train_time:111083ms step_avg:89.51ms +[2025-08-22 15:49:30] [Rank 0] step:1241/10000 train_time:111083ms step_avg:89.51ms +[2025-08-22 15:49:32] [Rank 0] step:1261/10000 train_time:112880ms step_avg:89.52ms +[2025-08-22 15:49:32] [Rank 0] step:1261/10000 train_time:112880ms step_avg:89.52ms +[2025-08-22 15:49:34] [Rank 0] step:1281/10000 train_time:114676ms step_avg:89.52ms +[2025-08-22 15:49:34] [Rank 0] step:1281/10000 train_time:114676ms step_avg:89.52ms +[2025-08-22 15:49:36] [Rank 0] step:1301/10000 train_time:116474ms step_avg:89.53ms +[2025-08-22 15:49:36] [Rank 0] step:1301/10000 train_time:116474ms step_avg:89.53ms +[2025-08-22 15:49:37] [Rank 0] step:1321/10000 train_time:118325ms step_avg:89.57ms +[2025-08-22 15:49:37] [Rank 0] step:1321/10000 train_time:118325ms step_avg:89.57ms +[2025-08-22 15:49:39] [Rank 0] step:1341/10000 train_time:120227ms step_avg:89.65ms +[2025-08-22 15:49:39] [Rank 0] step:1341/10000 train_time:120227ms step_avg:89.65ms +[2025-08-22 15:49:41] [Rank 0] step:1361/10000 train_time:122025ms step_avg:89.66ms +[2025-08-22 15:49:41] [Rank 0] step:1361/10000 train_time:122025ms step_avg:89.66ms +[2025-08-22 15:49:43] [Rank 0] step:1381/10000 train_time:123824ms step_avg:89.66ms +[2025-08-22 15:49:43] [Rank 0] step:1381/10000 train_time:123824ms step_avg:89.66ms +[2025-08-22 15:49:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:49:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:49:58] [Rank 0] PRINT: step:1400/10000 val_loss:4.6697 svd_entropy: attn_qk:H=0.4867,top10E=0.70,eRank=39.4,q75/q25=95.64 attn_vo:H=0.3860,top10E=0.79,eRank=40.3,q75/q25=61.88 mlp_w1:H=0.6573,top10E=0.45,eRank=106.7,q75/q25=14.61 mlp_w2:H=0.6383,top10E=0.45,eRank=109.3,q75/q25=23.39 vo_prod:H=0.2662,top10E=0.87,eRank=14.7,q75/q25=1012.05 train_time:125637ms step_avg:89.74ms +[2025-08-22 15:49:58] [Rank 0] PRINT: step:1400/10000 val_loss:4.6697 svd_entropy: attn_qk:H=0.4867,top10E=0.70,eRank=39.4,q75/q25=95.64 attn_vo:H=0.3860,top10E=0.79,eRank=40.3,q75/q25=61.88 mlp_w1:H=0.6573,top10E=0.45,eRank=106.7,q75/q25=14.61 mlp_w2:H=0.6383,top10E=0.45,eRank=109.3,q75/q25=23.39 vo_prod:H=0.2662,top10E=0.87,eRank=14.7,q75/q25=1012.05 train_time:125637ms step_avg:89.74ms +[2025-08-22 15:49:58] [Rank 0] step:1401/10000 train_time:125655ms step_avg:89.69ms +[2025-08-22 15:49:58] [Rank 0] step:1401/10000 train_time:125655ms step_avg:89.69ms +[2025-08-22 15:50:00] [Rank 0] step:1421/10000 train_time:127438ms step_avg:89.68ms +[2025-08-22 15:50:00] [Rank 0] step:1421/10000 train_time:127438ms step_avg:89.68ms +[2025-08-22 15:50:02] [Rank 0] step:1441/10000 train_time:129231ms step_avg:89.68ms +[2025-08-22 15:50:02] [Rank 0] step:1441/10000 train_time:129231ms step_avg:89.68ms +[2025-08-22 15:50:04] [Rank 0] step:1461/10000 train_time:131025ms step_avg:89.68ms +[2025-08-22 15:50:04] [Rank 0] step:1461/10000 train_time:131025ms step_avg:89.68ms +[2025-08-22 15:50:06] [Rank 0] step:1481/10000 train_time:132820ms step_avg:89.68ms +[2025-08-22 15:50:06] [Rank 0] step:1481/10000 train_time:132820ms step_avg:89.68ms +[2025-08-22 15:50:07] [Rank 0] step:1501/10000 train_time:134623ms step_avg:89.69ms +[2025-08-22 15:50:07] [Rank 0] step:1501/10000 train_time:134623ms step_avg:89.69ms +[2025-08-22 15:50:09] [Rank 0] step:1521/10000 train_time:136428ms step_avg:89.70ms +[2025-08-22 15:50:09] [Rank 0] step:1521/10000 train_time:136428ms step_avg:89.70ms +[2025-08-22 15:50:11] [Rank 0] step:1541/10000 train_time:138233ms step_avg:89.70ms +[2025-08-22 15:50:11] [Rank 0] step:1541/10000 train_time:138233ms step_avg:89.70ms +[2025-08-22 15:50:13] [Rank 0] step:1561/10000 train_time:140040ms step_avg:89.71ms +[2025-08-22 15:50:13] [Rank 0] step:1561/10000 train_time:140040ms step_avg:89.71ms +[2025-08-22 15:50:15] [Rank 0] step:1581/10000 train_time:141846ms step_avg:89.72ms +[2025-08-22 15:50:15] [Rank 0] step:1581/10000 train_time:141846ms step_avg:89.72ms +[2025-08-22 15:50:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:50:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:50:30] [Rank 0] PRINT: step:1600/10000 val_loss:4.5617 svd_entropy: attn_qk:H=0.4808,top10E=0.70,eRank=40.3,q75/q25=102.81 attn_vo:H=0.3879,top10E=0.78,eRank=43.1,q75/q25=64.43 mlp_w1:H=0.6521,top10E=0.46,eRank=107.9,q75/q25=15.63 mlp_w2:H=0.6367,top10E=0.45,eRank=112.2,q75/q25=23.65 vo_prod:H=0.2653,top10E=0.86,eRank=16.0,q75/q25=1196.93 train_time:143667ms step_avg:89.79ms +[2025-08-22 15:50:30] [Rank 0] PRINT: step:1600/10000 val_loss:4.5617 svd_entropy: attn_qk:H=0.4808,top10E=0.70,eRank=40.3,q75/q25=102.81 attn_vo:H=0.3879,top10E=0.78,eRank=43.1,q75/q25=64.43 mlp_w1:H=0.6521,top10E=0.46,eRank=107.9,q75/q25=15.63 mlp_w2:H=0.6367,top10E=0.45,eRank=112.2,q75/q25=23.65 vo_prod:H=0.2653,top10E=0.86,eRank=16.0,q75/q25=1196.93 train_time:143667ms step_avg:89.79ms +[2025-08-22 15:50:30] [Rank 0] step:1601/10000 train_time:143684ms step_avg:89.75ms +[2025-08-22 15:50:30] [Rank 0] step:1601/10000 train_time:143684ms step_avg:89.75ms +[2025-08-22 15:50:32] [Rank 0] step:1621/10000 train_time:145482ms step_avg:89.75ms +[2025-08-22 15:50:32] [Rank 0] step:1621/10000 train_time:145482ms step_avg:89.75ms +[2025-08-22 15:50:34] [Rank 0] step:1641/10000 train_time:147283ms step_avg:89.75ms +[2025-08-22 15:50:34] [Rank 0] step:1641/10000 train_time:147283ms step_avg:89.75ms +[2025-08-22 15:50:36] [Rank 0] step:1661/10000 train_time:149085ms step_avg:89.76ms +[2025-08-22 15:50:36] [Rank 0] step:1661/10000 train_time:149085ms step_avg:89.76ms +[2025-08-22 15:50:38] [Rank 0] step:1681/10000 train_time:150889ms step_avg:89.76ms +[2025-08-22 15:50:38] [Rank 0] step:1681/10000 train_time:150889ms step_avg:89.76ms +[2025-08-22 15:50:39] [Rank 0] step:1701/10000 train_time:152768ms step_avg:89.81ms +[2025-08-22 15:50:39] [Rank 0] step:1701/10000 train_time:152768ms step_avg:89.81ms +[2025-08-22 15:50:41] [Rank 0] step:1721/10000 train_time:154640ms step_avg:89.85ms +[2025-08-22 15:50:41] [Rank 0] step:1721/10000 train_time:154640ms step_avg:89.85ms +[2025-08-22 15:50:43] [Rank 0] step:1741/10000 train_time:156446ms step_avg:89.86ms +[2025-08-22 15:50:43] [Rank 0] step:1741/10000 train_time:156446ms step_avg:89.86ms +[2025-08-22 15:50:45] [Rank 0] step:1761/10000 train_time:158252ms step_avg:89.86ms +[2025-08-22 15:50:45] [Rank 0] step:1761/10000 train_time:158252ms step_avg:89.86ms +[2025-08-22 15:50:47] [Rank 0] step:1781/10000 train_time:160058ms step_avg:89.87ms +[2025-08-22 15:50:47] [Rank 0] step:1781/10000 train_time:160058ms step_avg:89.87ms +[2025-08-22 15:50:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:50:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:51:02] [Rank 0] PRINT: step:1800/10000 val_loss:4.5005 svd_entropy: attn_qk:H=0.4852,top10E=0.70,eRank=41.9,q75/q25=110.53 attn_vo:H=0.3950,top10E=0.78,eRank=45.3,q75/q25=69.55 mlp_w1:H=0.6467,top10E=0.47,eRank=108.9,q75/q25=16.52 mlp_w2:H=0.6322,top10E=0.46,eRank=113.6,q75/q25=23.68 vo_prod:H=0.2699,top10E=0.86,eRank=16.7,q75/q25=1488.98 train_time:161879ms step_avg:89.93ms +[2025-08-22 15:51:02] [Rank 0] PRINT: step:1800/10000 val_loss:4.5005 svd_entropy: attn_qk:H=0.4852,top10E=0.70,eRank=41.9,q75/q25=110.53 attn_vo:H=0.3950,top10E=0.78,eRank=45.3,q75/q25=69.55 mlp_w1:H=0.6467,top10E=0.47,eRank=108.9,q75/q25=16.52 mlp_w2:H=0.6322,top10E=0.46,eRank=113.6,q75/q25=23.68 vo_prod:H=0.2699,top10E=0.86,eRank=16.7,q75/q25=1488.98 train_time:161879ms step_avg:89.93ms +[2025-08-22 15:51:02] [Rank 0] step:1801/10000 train_time:161898ms step_avg:89.89ms +[2025-08-22 15:51:02] [Rank 0] step:1801/10000 train_time:161898ms step_avg:89.89ms +[2025-08-22 15:51:04] [Rank 0] step:1821/10000 train_time:163702ms step_avg:89.90ms +[2025-08-22 15:51:04] [Rank 0] step:1821/10000 train_time:163702ms step_avg:89.90ms +[2025-08-22 15:51:06] [Rank 0] step:1841/10000 train_time:165504ms step_avg:89.90ms +[2025-08-22 15:51:06] [Rank 0] step:1841/10000 train_time:165504ms step_avg:89.90ms +[2025-08-22 15:51:08] [Rank 0] step:1861/10000 train_time:167307ms step_avg:89.90ms +[2025-08-22 15:51:08] [Rank 0] step:1861/10000 train_time:167307ms step_avg:89.90ms +[2025-08-22 15:51:10] [Rank 0] step:1881/10000 train_time:169110ms step_avg:89.90ms +[2025-08-22 15:51:10] [Rank 0] step:1881/10000 train_time:169110ms step_avg:89.90ms +[2025-08-22 15:51:11] [Rank 0] step:1901/10000 train_time:170913ms step_avg:89.91ms +[2025-08-22 15:51:11] [Rank 0] step:1901/10000 train_time:170913ms step_avg:89.91ms +[2025-08-22 15:51:13] [Rank 0] step:1921/10000 train_time:172718ms step_avg:89.91ms +[2025-08-22 15:51:13] [Rank 0] step:1921/10000 train_time:172718ms step_avg:89.91ms +[2025-08-22 15:51:15] [Rank 0] step:1941/10000 train_time:174525ms step_avg:89.91ms +[2025-08-22 15:51:15] [Rank 0] step:1941/10000 train_time:174525ms step_avg:89.91ms +[2025-08-22 15:51:17] [Rank 0] step:1961/10000 train_time:176331ms step_avg:89.92ms +[2025-08-22 15:51:17] [Rank 0] step:1961/10000 train_time:176331ms step_avg:89.92ms +[2025-08-22 15:51:19] [Rank 0] step:1981/10000 train_time:178140ms step_avg:89.92ms +[2025-08-22 15:51:19] [Rank 0] step:1981/10000 train_time:178140ms step_avg:89.92ms +[2025-08-22 15:51:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:51:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:51:34] [Rank 0] PRINT: step:2000/10000 val_loss:4.4604 svd_entropy: attn_qk:H=0.4865,top10E=0.69,eRank=43.1,q75/q25=118.12 attn_vo:H=0.3943,top10E=0.77,eRank=47.2,q75/q25=70.63 mlp_w1:H=0.6410,top10E=0.47,eRank=109.4,q75/q25=17.14 mlp_w2:H=0.6293,top10E=0.46,eRank=114.9,q75/q25=23.52 vo_prod:H=0.2702,top10E=0.85,eRank=17.7,q75/q25=1659.58 train_time:179962ms step_avg:89.98ms +[2025-08-22 15:51:34] [Rank 0] PRINT: step:2000/10000 val_loss:4.4604 svd_entropy: attn_qk:H=0.4865,top10E=0.69,eRank=43.1,q75/q25=118.12 attn_vo:H=0.3943,top10E=0.77,eRank=47.2,q75/q25=70.63 mlp_w1:H=0.6410,top10E=0.47,eRank=109.4,q75/q25=17.14 mlp_w2:H=0.6293,top10E=0.46,eRank=114.9,q75/q25=23.52 vo_prod:H=0.2702,top10E=0.85,eRank=17.7,q75/q25=1659.58 train_time:179962ms step_avg:89.98ms +[2025-08-22 15:51:34] [Rank 0] step:2001/10000 train_time:179980ms step_avg:89.95ms +[2025-08-22 15:51:34] [Rank 0] step:2001/10000 train_time:179980ms step_avg:89.95ms +[2025-08-22 15:51:36] [Rank 0] step:2021/10000 train_time:181777ms step_avg:89.94ms +[2025-08-22 15:51:36] [Rank 0] step:2021/10000 train_time:181777ms step_avg:89.94ms +[2025-08-22 15:51:39] [Rank 0] step:2041/10000 train_time:184232ms step_avg:90.27ms +[2025-08-22 15:51:39] [Rank 0] step:2041/10000 train_time:184232ms step_avg:90.27ms +[2025-08-22 15:51:40] [Rank 0] step:2061/10000 train_time:186037ms step_avg:90.27ms +[2025-08-22 15:51:40] [Rank 0] step:2061/10000 train_time:186037ms step_avg:90.27ms +[2025-08-22 15:51:42] [Rank 0] step:2081/10000 train_time:187911ms step_avg:90.30ms +[2025-08-22 15:51:42] [Rank 0] step:2081/10000 train_time:187911ms step_avg:90.30ms +[2025-08-22 15:51:44] [Rank 0] step:2101/10000 train_time:189799ms step_avg:90.34ms +[2025-08-22 15:51:44] [Rank 0] step:2101/10000 train_time:189799ms step_avg:90.34ms +[2025-08-22 15:51:46] [Rank 0] step:2121/10000 train_time:191608ms step_avg:90.34ms +[2025-08-22 15:51:46] [Rank 0] step:2121/10000 train_time:191608ms step_avg:90.34ms +[2025-08-22 15:51:48] [Rank 0] step:2141/10000 train_time:193418ms step_avg:90.34ms +[2025-08-22 15:51:48] [Rank 0] step:2141/10000 train_time:193418ms step_avg:90.34ms +[2025-08-22 15:51:50] [Rank 0] step:2161/10000 train_time:195230ms step_avg:90.34ms +[2025-08-22 15:51:50] [Rank 0] step:2161/10000 train_time:195230ms step_avg:90.34ms +[2025-08-22 15:51:51] [Rank 0] step:2181/10000 train_time:197041ms step_avg:90.34ms +[2025-08-22 15:51:51] [Rank 0] step:2181/10000 train_time:197041ms step_avg:90.34ms +[2025-08-22 15:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:52:07] [Rank 0] PRINT: step:2200/10000 val_loss:4.4171 svd_entropy: attn_qk:H=0.4899,top10E=0.69,eRank=44.2,q75/q25=114.72 attn_vo:H=0.3976,top10E=0.77,eRank=48.7,q75/q25=75.01 mlp_w1:H=0.6370,top10E=0.48,eRank=110.1,q75/q25=17.50 mlp_w2:H=0.6253,top10E=0.46,eRank=115.7,q75/q25=23.30 vo_prod:H=0.2711,top10E=0.85,eRank=18.0,q75/q25=2195.39 train_time:198867ms step_avg:90.39ms +[2025-08-22 15:52:07] [Rank 0] PRINT: step:2200/10000 val_loss:4.4171 svd_entropy: attn_qk:H=0.4899,top10E=0.69,eRank=44.2,q75/q25=114.72 attn_vo:H=0.3976,top10E=0.77,eRank=48.7,q75/q25=75.01 mlp_w1:H=0.6370,top10E=0.48,eRank=110.1,q75/q25=17.50 mlp_w2:H=0.6253,top10E=0.46,eRank=115.7,q75/q25=23.30 vo_prod:H=0.2711,top10E=0.85,eRank=18.0,q75/q25=2195.39 train_time:198867ms step_avg:90.39ms +[2025-08-22 15:52:07] [Rank 0] step:2201/10000 train_time:198884ms step_avg:90.36ms +[2025-08-22 15:52:07] [Rank 0] step:2201/10000 train_time:198884ms step_avg:90.36ms +[2025-08-22 15:52:09] [Rank 0] step:2221/10000 train_time:200679ms step_avg:90.36ms +[2025-08-22 15:52:09] [Rank 0] step:2221/10000 train_time:200679ms step_avg:90.36ms +[2025-08-22 15:52:11] [Rank 0] step:2241/10000 train_time:202521ms step_avg:90.37ms +[2025-08-22 15:52:11] [Rank 0] step:2241/10000 train_time:202521ms step_avg:90.37ms +[2025-08-22 15:52:13] [Rank 0] step:2261/10000 train_time:204370ms step_avg:90.39ms +[2025-08-22 15:52:13] [Rank 0] step:2261/10000 train_time:204370ms step_avg:90.39ms +[2025-08-22 15:52:14] [Rank 0] step:2281/10000 train_time:206219ms step_avg:90.41ms +[2025-08-22 15:52:14] [Rank 0] step:2281/10000 train_time:206219ms step_avg:90.41ms +[2025-08-22 15:52:16] [Rank 0] step:2301/10000 train_time:208068ms step_avg:90.42ms +[2025-08-22 15:52:16] [Rank 0] step:2301/10000 train_time:208068ms step_avg:90.42ms +[2025-08-22 15:52:18] [Rank 0] step:2321/10000 train_time:209918ms step_avg:90.44ms +[2025-08-22 15:52:18] [Rank 0] step:2321/10000 train_time:209918ms step_avg:90.44ms +[2025-08-22 15:52:20] [Rank 0] step:2341/10000 train_time:211769ms step_avg:90.46ms +[2025-08-22 15:52:20] [Rank 0] step:2341/10000 train_time:211769ms step_avg:90.46ms +[2025-08-22 15:52:22] [Rank 0] step:2361/10000 train_time:213621ms step_avg:90.48ms +[2025-08-22 15:52:22] [Rank 0] step:2361/10000 train_time:213621ms step_avg:90.48ms +[2025-08-22 15:52:24] [Rank 0] step:2381/10000 train_time:215474ms step_avg:90.50ms +[2025-08-22 15:52:24] [Rank 0] step:2381/10000 train_time:215474ms step_avg:90.50ms +[2025-08-22 15:52:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:52:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:52:39] [Rank 0] PRINT: step:2400/10000 val_loss:4.3497 svd_entropy: attn_qk:H=0.4941,top10E=0.68,eRank=45.3,q75/q25=119.31 attn_vo:H=0.3995,top10E=0.77,eRank=50.1,q75/q25=76.18 mlp_w1:H=0.6338,top10E=0.48,eRank=111.0,q75/q25=17.55 mlp_w2:H=0.6214,top10E=0.47,eRank=116.1,q75/q25=22.93 vo_prod:H=0.2707,top10E=0.85,eRank=18.2,q75/q25=2543.85 train_time:217340ms step_avg:90.56ms +[2025-08-22 15:52:39] [Rank 0] PRINT: step:2400/10000 val_loss:4.3497 svd_entropy: attn_qk:H=0.4941,top10E=0.68,eRank=45.3,q75/q25=119.31 attn_vo:H=0.3995,top10E=0.77,eRank=50.1,q75/q25=76.18 mlp_w1:H=0.6338,top10E=0.48,eRank=111.0,q75/q25=17.55 mlp_w2:H=0.6214,top10E=0.47,eRank=116.1,q75/q25=22.93 vo_prod:H=0.2707,top10E=0.85,eRank=18.2,q75/q25=2543.85 train_time:217340ms step_avg:90.56ms +[2025-08-22 15:52:39] [Rank 0] step:2401/10000 train_time:217359ms step_avg:90.53ms +[2025-08-22 15:52:39] [Rank 0] step:2401/10000 train_time:217359ms step_avg:90.53ms +[2025-08-22 15:52:41] [Rank 0] step:2421/10000 train_time:219188ms step_avg:90.54ms +[2025-08-22 15:52:41] [Rank 0] step:2421/10000 train_time:219188ms step_avg:90.54ms +[2025-08-22 15:52:43] [Rank 0] step:2441/10000 train_time:221032ms step_avg:90.55ms +[2025-08-22 15:52:43] [Rank 0] step:2441/10000 train_time:221032ms step_avg:90.55ms +[2025-08-22 15:52:45] [Rank 0] step:2461/10000 train_time:222878ms step_avg:90.56ms +[2025-08-22 15:52:45] [Rank 0] step:2461/10000 train_time:222878ms step_avg:90.56ms +[2025-08-22 15:52:47] [Rank 0] step:2481/10000 train_time:224826ms step_avg:90.62ms +[2025-08-22 15:52:47] [Rank 0] step:2481/10000 train_time:224826ms step_avg:90.62ms +[2025-08-22 15:52:49] [Rank 0] step:2501/10000 train_time:226745ms step_avg:90.66ms +[2025-08-22 15:52:49] [Rank 0] step:2501/10000 train_time:226745ms step_avg:90.66ms +[2025-08-22 15:52:51] [Rank 0] step:2521/10000 train_time:228596ms step_avg:90.68ms +[2025-08-22 15:52:51] [Rank 0] step:2521/10000 train_time:228596ms step_avg:90.68ms +[2025-08-22 15:52:53] [Rank 0] step:2541/10000 train_time:230445ms step_avg:90.69ms +[2025-08-22 15:52:53] [Rank 0] step:2541/10000 train_time:230445ms step_avg:90.69ms +[2025-08-22 15:52:54] [Rank 0] step:2561/10000 train_time:232294ms step_avg:90.70ms +[2025-08-22 15:52:54] [Rank 0] step:2561/10000 train_time:232294ms step_avg:90.70ms +[2025-08-22 15:52:56] [Rank 0] step:2581/10000 train_time:234145ms step_avg:90.72ms +[2025-08-22 15:52:56] [Rank 0] step:2581/10000 train_time:234145ms step_avg:90.72ms +[2025-08-22 15:52:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:52:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:53:12] [Rank 0] PRINT: step:2600/10000 val_loss:4.3159 svd_entropy: attn_qk:H=0.4958,top10E=0.68,eRank=46.0,q75/q25=124.12 attn_vo:H=0.4023,top10E=0.77,eRank=51.7,q75/q25=80.62 mlp_w1:H=0.6326,top10E=0.48,eRank=112.2,q75/q25=17.74 mlp_w2:H=0.6209,top10E=0.46,eRank=117.2,q75/q25=22.77 vo_prod:H=0.2730,top10E=0.85,eRank=19.1,q75/q25=3484.43 train_time:236011ms step_avg:90.77ms +[2025-08-22 15:53:12] [Rank 0] PRINT: step:2600/10000 val_loss:4.3159 svd_entropy: attn_qk:H=0.4958,top10E=0.68,eRank=46.0,q75/q25=124.12 attn_vo:H=0.4023,top10E=0.77,eRank=51.7,q75/q25=80.62 mlp_w1:H=0.6326,top10E=0.48,eRank=112.2,q75/q25=17.74 mlp_w2:H=0.6209,top10E=0.46,eRank=117.2,q75/q25=22.77 vo_prod:H=0.2730,top10E=0.85,eRank=19.1,q75/q25=3484.43 train_time:236011ms step_avg:90.77ms +[2025-08-22 15:53:12] [Rank 0] step:2601/10000 train_time:236030ms step_avg:90.75ms +[2025-08-22 15:53:12] [Rank 0] step:2601/10000 train_time:236030ms step_avg:90.75ms +[2025-08-22 15:53:14] [Rank 0] step:2621/10000 train_time:237853ms step_avg:90.75ms +[2025-08-22 15:53:14] [Rank 0] step:2621/10000 train_time:237853ms step_avg:90.75ms +[2025-08-22 15:53:16] [Rank 0] step:2641/10000 train_time:239698ms step_avg:90.76ms +[2025-08-22 15:53:16] [Rank 0] step:2641/10000 train_time:239698ms step_avg:90.76ms +[2025-08-22 15:53:18] [Rank 0] step:2661/10000 train_time:241545ms step_avg:90.77ms +[2025-08-22 15:53:18] [Rank 0] step:2661/10000 train_time:241545ms step_avg:90.77ms +[2025-08-22 15:53:19] [Rank 0] step:2681/10000 train_time:243393ms step_avg:90.78ms +[2025-08-22 15:53:19] [Rank 0] step:2681/10000 train_time:243393ms step_avg:90.78ms +[2025-08-22 15:53:21] [Rank 0] step:2701/10000 train_time:245241ms step_avg:90.80ms +[2025-08-22 15:53:21] [Rank 0] step:2701/10000 train_time:245241ms step_avg:90.80ms +[2025-08-22 15:53:23] [Rank 0] step:2721/10000 train_time:247091ms step_avg:90.81ms +[2025-08-22 15:53:23] [Rank 0] step:2721/10000 train_time:247091ms step_avg:90.81ms +[2025-08-22 15:53:25] [Rank 0] step:2741/10000 train_time:248941ms step_avg:90.82ms +[2025-08-22 15:53:25] [Rank 0] step:2741/10000 train_time:248941ms step_avg:90.82ms +[2025-08-22 15:53:27] [Rank 0] step:2761/10000 train_time:250794ms step_avg:90.83ms +[2025-08-22 15:53:27] [Rank 0] step:2761/10000 train_time:250794ms step_avg:90.83ms +[2025-08-22 15:53:29] [Rank 0] step:2781/10000 train_time:252647ms step_avg:90.85ms +[2025-08-22 15:53:29] [Rank 0] step:2781/10000 train_time:252647ms step_avg:90.85ms +[2025-08-22 15:53:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:53:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:53:44] [Rank 0] PRINT: step:2800/10000 val_loss:4.2967 svd_entropy: attn_qk:H=0.4978,top10E=0.68,eRank=47.1,q75/q25=127.19 attn_vo:H=0.4061,top10E=0.76,eRank=53.3,q75/q25=83.39 mlp_w1:H=0.6313,top10E=0.48,eRank=113.2,q75/q25=17.99 mlp_w2:H=0.6200,top10E=0.46,eRank=118.1,q75/q25=22.86 vo_prod:H=0.2762,top10E=0.84,eRank=20.1,q75/q25=4146.61 train_time:254513ms step_avg:90.90ms +[2025-08-22 15:53:44] [Rank 0] PRINT: step:2800/10000 val_loss:4.2967 svd_entropy: attn_qk:H=0.4978,top10E=0.68,eRank=47.1,q75/q25=127.19 attn_vo:H=0.4061,top10E=0.76,eRank=53.3,q75/q25=83.39 mlp_w1:H=0.6313,top10E=0.48,eRank=113.2,q75/q25=17.99 mlp_w2:H=0.6200,top10E=0.46,eRank=118.1,q75/q25=22.86 vo_prod:H=0.2762,top10E=0.84,eRank=20.1,q75/q25=4146.61 train_time:254513ms step_avg:90.90ms +[2025-08-22 15:53:44] [Rank 0] step:2801/10000 train_time:254531ms step_avg:90.87ms +[2025-08-22 15:53:44] [Rank 0] step:2801/10000 train_time:254531ms step_avg:90.87ms +[2025-08-22 15:53:46] [Rank 0] step:2821/10000 train_time:256383ms step_avg:90.88ms +[2025-08-22 15:53:46] [Rank 0] step:2821/10000 train_time:256383ms step_avg:90.88ms +[2025-08-22 15:53:48] [Rank 0] step:2841/10000 train_time:258232ms step_avg:90.89ms +[2025-08-22 15:53:48] [Rank 0] step:2841/10000 train_time:258232ms step_avg:90.89ms +[2025-08-22 15:53:50] [Rank 0] step:2861/10000 train_time:260157ms step_avg:90.93ms +[2025-08-22 15:53:50] [Rank 0] step:2861/10000 train_time:260157ms step_avg:90.93ms +[2025-08-22 15:53:52] [Rank 0] step:2881/10000 train_time:262094ms step_avg:90.97ms +[2025-08-22 15:53:52] [Rank 0] step:2881/10000 train_time:262094ms step_avg:90.97ms +[2025-08-22 15:53:54] [Rank 0] step:2901/10000 train_time:263945ms step_avg:90.98ms +[2025-08-22 15:53:54] [Rank 0] step:2901/10000 train_time:263945ms step_avg:90.98ms +[2025-08-22 15:53:56] [Rank 0] step:2921/10000 train_time:265799ms step_avg:91.00ms +[2025-08-22 15:53:56] [Rank 0] step:2921/10000 train_time:265799ms step_avg:91.00ms +[2025-08-22 15:53:58] [Rank 0] step:2941/10000 train_time:267655ms step_avg:91.01ms +[2025-08-22 15:53:58] [Rank 0] step:2941/10000 train_time:267655ms step_avg:91.01ms +[2025-08-22 15:53:59] [Rank 0] step:2961/10000 train_time:269509ms step_avg:91.02ms +[2025-08-22 15:53:59] [Rank 0] step:2961/10000 train_time:269509ms step_avg:91.02ms +[2025-08-22 15:54:01] [Rank 0] step:2981/10000 train_time:271370ms step_avg:91.03ms +[2025-08-22 15:54:01] [Rank 0] step:2981/10000 train_time:271370ms step_avg:91.03ms +[2025-08-22 15:54:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:54:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:54:17] [Rank 0] PRINT: step:3000/10000 val_loss:4.2613 svd_entropy: attn_qk:H=0.5003,top10E=0.67,eRank=48.1,q75/q25=118.54 attn_vo:H=0.4056,top10E=0.76,eRank=54.5,q75/q25=98.41 mlp_w1:H=0.6297,top10E=0.48,eRank=114.1,q75/q25=18.49 mlp_w2:H=0.6194,top10E=0.46,eRank=119.0,q75/q25=22.70 vo_prod:H=0.2688,top10E=0.84,eRank=20.3,q75/q25=5715.52 train_time:273249ms step_avg:91.08ms +[2025-08-22 15:54:17] [Rank 0] PRINT: step:3000/10000 val_loss:4.2613 svd_entropy: attn_qk:H=0.5003,top10E=0.67,eRank=48.1,q75/q25=118.54 attn_vo:H=0.4056,top10E=0.76,eRank=54.5,q75/q25=98.41 mlp_w1:H=0.6297,top10E=0.48,eRank=114.1,q75/q25=18.49 mlp_w2:H=0.6194,top10E=0.46,eRank=119.0,q75/q25=22.70 vo_prod:H=0.2688,top10E=0.84,eRank=20.3,q75/q25=5715.52 train_time:273249ms step_avg:91.08ms +[2025-08-22 15:54:17] [Rank 0] step:3001/10000 train_time:273267ms step_avg:91.06ms +[2025-08-22 15:54:17] [Rank 0] step:3001/10000 train_time:273267ms step_avg:91.06ms +[2025-08-22 15:54:19] [Rank 0] step:3021/10000 train_time:275113ms step_avg:91.07ms +[2025-08-22 15:54:19] [Rank 0] step:3021/10000 train_time:275113ms step_avg:91.07ms +[2025-08-22 15:54:21] [Rank 0] step:3041/10000 train_time:276966ms step_avg:91.08ms +[2025-08-22 15:54:21] [Rank 0] step:3041/10000 train_time:276966ms step_avg:91.08ms +[2025-08-22 15:54:23] [Rank 0] step:3061/10000 train_time:278820ms step_avg:91.09ms +[2025-08-22 15:54:23] [Rank 0] step:3061/10000 train_time:278820ms step_avg:91.09ms +[2025-08-22 15:54:24] [Rank 0] step:3081/10000 train_time:280675ms step_avg:91.10ms +[2025-08-22 15:54:24] [Rank 0] step:3081/10000 train_time:280675ms step_avg:91.10ms +[2025-08-22 15:54:26] [Rank 0] step:3101/10000 train_time:282531ms step_avg:91.11ms +[2025-08-22 15:54:26] [Rank 0] step:3101/10000 train_time:282531ms step_avg:91.11ms +[2025-08-22 15:54:28] [Rank 0] step:3121/10000 train_time:284388ms step_avg:91.12ms +[2025-08-22 15:54:28] [Rank 0] step:3121/10000 train_time:284388ms step_avg:91.12ms +[2025-08-22 15:54:30] [Rank 0] step:3141/10000 train_time:286245ms step_avg:91.13ms +[2025-08-22 15:54:30] [Rank 0] step:3141/10000 train_time:286245ms step_avg:91.13ms +[2025-08-22 15:54:32] [Rank 0] step:3161/10000 train_time:288105ms step_avg:91.14ms +[2025-08-22 15:54:32] [Rank 0] step:3161/10000 train_time:288105ms step_avg:91.14ms +[2025-08-22 15:54:34] [Rank 0] step:3181/10000 train_time:289963ms step_avg:91.15ms +[2025-08-22 15:54:34] [Rank 0] step:3181/10000 train_time:289963ms step_avg:91.15ms +[2025-08-22 15:54:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:54:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:54:49] [Rank 0] PRINT: step:3200/10000 val_loss:4.2365 svd_entropy: attn_qk:H=0.5022,top10E=0.67,eRank=49.0,q75/q25=119.99 attn_vo:H=0.4084,top10E=0.76,eRank=55.7,q75/q25=100.50 mlp_w1:H=0.6255,top10E=0.48,eRank=114.1,q75/q25=19.00 mlp_w2:H=0.6178,top10E=0.47,eRank=119.5,q75/q25=22.64 vo_prod:H=0.2766,top10E=0.84,eRank=21.0,q75/q25=6413.95 train_time:291837ms step_avg:91.20ms +[2025-08-22 15:54:49] [Rank 0] PRINT: step:3200/10000 val_loss:4.2365 svd_entropy: attn_qk:H=0.5022,top10E=0.67,eRank=49.0,q75/q25=119.99 attn_vo:H=0.4084,top10E=0.76,eRank=55.7,q75/q25=100.50 mlp_w1:H=0.6255,top10E=0.48,eRank=114.1,q75/q25=19.00 mlp_w2:H=0.6178,top10E=0.47,eRank=119.5,q75/q25=22.64 vo_prod:H=0.2766,top10E=0.84,eRank=21.0,q75/q25=6413.95 train_time:291837ms step_avg:91.20ms +[2025-08-22 15:54:49] [Rank 0] step:3201/10000 train_time:291854ms step_avg:91.18ms +[2025-08-22 15:54:49] [Rank 0] step:3201/10000 train_time:291854ms step_avg:91.18ms +[2025-08-22 15:54:51] [Rank 0] step:3221/10000 train_time:293699ms step_avg:91.18ms +[2025-08-22 15:54:51] [Rank 0] step:3221/10000 train_time:293699ms step_avg:91.18ms +[2025-08-22 15:54:53] [Rank 0] step:3241/10000 train_time:295622ms step_avg:91.21ms +[2025-08-22 15:54:53] [Rank 0] step:3241/10000 train_time:295622ms step_avg:91.21ms +[2025-08-22 15:54:55] [Rank 0] step:3261/10000 train_time:297528ms step_avg:91.24ms +[2025-08-22 15:54:55] [Rank 0] step:3261/10000 train_time:297528ms step_avg:91.24ms +[2025-08-22 15:54:57] [Rank 0] step:3281/10000 train_time:299382ms step_avg:91.25ms +[2025-08-22 15:54:57] [Rank 0] step:3281/10000 train_time:299382ms step_avg:91.25ms +[2025-08-22 15:54:59] [Rank 0] step:3301/10000 train_time:301239ms step_avg:91.26ms +[2025-08-22 15:54:59] [Rank 0] step:3301/10000 train_time:301239ms step_avg:91.26ms +[2025-08-22 15:55:01] [Rank 0] step:3321/10000 train_time:303096ms step_avg:91.27ms +[2025-08-22 15:55:01] [Rank 0] step:3321/10000 train_time:303096ms step_avg:91.27ms +[2025-08-22 15:55:03] [Rank 0] step:3341/10000 train_time:304954ms step_avg:91.28ms +[2025-08-22 15:55:03] [Rank 0] step:3341/10000 train_time:304954ms step_avg:91.28ms +[2025-08-22 15:55:04] [Rank 0] step:3361/10000 train_time:306812ms step_avg:91.29ms +[2025-08-22 15:55:04] [Rank 0] step:3361/10000 train_time:306812ms step_avg:91.29ms +[2025-08-22 15:55:06] [Rank 0] step:3381/10000 train_time:308671ms step_avg:91.30ms +[2025-08-22 15:55:06] [Rank 0] step:3381/10000 train_time:308671ms step_avg:91.30ms +[2025-08-22 15:55:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:55:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:55:22] [Rank 0] PRINT: step:3400/10000 val_loss:4.2117 svd_entropy: attn_qk:H=0.5048,top10E=0.67,eRank=50.0,q75/q25=120.65 attn_vo:H=0.4123,top10E=0.75,eRank=57.0,q75/q25=101.48 mlp_w1:H=0.6246,top10E=0.48,eRank=114.9,q75/q25=19.63 mlp_w2:H=0.6162,top10E=0.47,eRank=120.0,q75/q25=22.71 vo_prod:H=0.2830,top10E=0.84,eRank=21.5,q75/q25=6966.85 train_time:310544ms step_avg:91.34ms +[2025-08-22 15:55:22] [Rank 0] PRINT: step:3400/10000 val_loss:4.2117 svd_entropy: attn_qk:H=0.5048,top10E=0.67,eRank=50.0,q75/q25=120.65 attn_vo:H=0.4123,top10E=0.75,eRank=57.0,q75/q25=101.48 mlp_w1:H=0.6246,top10E=0.48,eRank=114.9,q75/q25=19.63 mlp_w2:H=0.6162,top10E=0.47,eRank=120.0,q75/q25=22.71 vo_prod:H=0.2830,top10E=0.84,eRank=21.5,q75/q25=6966.85 train_time:310544ms step_avg:91.34ms +[2025-08-22 15:55:22] [Rank 0] step:3401/10000 train_time:310563ms step_avg:91.32ms +[2025-08-22 15:55:22] [Rank 0] step:3401/10000 train_time:310563ms step_avg:91.32ms +[2025-08-22 15:55:24] [Rank 0] step:3421/10000 train_time:312403ms step_avg:91.32ms +[2025-08-22 15:55:24] [Rank 0] step:3421/10000 train_time:312403ms step_avg:91.32ms +[2025-08-22 15:55:26] [Rank 0] step:3441/10000 train_time:314260ms step_avg:91.33ms +[2025-08-22 15:55:26] [Rank 0] step:3441/10000 train_time:314260ms step_avg:91.33ms +[2025-08-22 15:55:28] [Rank 0] step:3461/10000 train_time:316120ms step_avg:91.34ms +[2025-08-22 15:55:28] [Rank 0] step:3461/10000 train_time:316120ms step_avg:91.34ms +[2025-08-22 15:55:29] [Rank 0] step:3481/10000 train_time:317978ms step_avg:91.35ms +[2025-08-22 15:55:29] [Rank 0] step:3481/10000 train_time:317978ms step_avg:91.35ms +[2025-08-22 15:55:31] [Rank 0] step:3501/10000 train_time:319840ms step_avg:91.36ms +[2025-08-22 15:55:31] [Rank 0] step:3501/10000 train_time:319840ms step_avg:91.36ms +[2025-08-22 15:55:33] [Rank 0] step:3521/10000 train_time:321702ms step_avg:91.37ms +[2025-08-22 15:55:33] [Rank 0] step:3521/10000 train_time:321702ms step_avg:91.37ms +[2025-08-22 15:55:35] [Rank 0] step:3541/10000 train_time:323562ms step_avg:91.38ms +[2025-08-22 15:55:35] [Rank 0] step:3541/10000 train_time:323562ms step_avg:91.38ms +[2025-08-22 15:55:37] [Rank 0] step:3561/10000 train_time:325424ms step_avg:91.39ms +[2025-08-22 15:55:37] [Rank 0] step:3561/10000 train_time:325424ms step_avg:91.39ms +[2025-08-22 15:55:39] [Rank 0] step:3581/10000 train_time:327285ms step_avg:91.39ms +[2025-08-22 15:55:39] [Rank 0] step:3581/10000 train_time:327285ms step_avg:91.39ms +[2025-08-22 15:55:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:55:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:55:54] [Rank 0] PRINT: step:3600/10000 val_loss:4.2096 svd_entropy: attn_qk:H=0.5069,top10E=0.67,eRank=50.8,q75/q25=121.87 attn_vo:H=0.4163,top10E=0.75,eRank=58.1,q75/q25=103.62 mlp_w1:H=0.6241,top10E=0.48,eRank=115.8,q75/q25=20.27 mlp_w2:H=0.6152,top10E=0.47,eRank=120.5,q75/q25=22.51 vo_prod:H=0.2904,top10E=0.84,eRank=21.7,q75/q25=8771.52 train_time:329163ms step_avg:91.43ms +[2025-08-22 15:55:54] [Rank 0] PRINT: step:3600/10000 val_loss:4.2096 svd_entropy: attn_qk:H=0.5069,top10E=0.67,eRank=50.8,q75/q25=121.87 attn_vo:H=0.4163,top10E=0.75,eRank=58.1,q75/q25=103.62 mlp_w1:H=0.6241,top10E=0.48,eRank=115.8,q75/q25=20.27 mlp_w2:H=0.6152,top10E=0.47,eRank=120.5,q75/q25=22.51 vo_prod:H=0.2904,top10E=0.84,eRank=21.7,q75/q25=8771.52 train_time:329163ms step_avg:91.43ms +[2025-08-22 15:55:55] [Rank 0] step:3601/10000 train_time:329182ms step_avg:91.41ms +[2025-08-22 15:55:55] [Rank 0] step:3601/10000 train_time:329182ms step_avg:91.41ms +[2025-08-22 15:55:56] [Rank 0] step:3621/10000 train_time:331065ms step_avg:91.43ms +[2025-08-22 15:55:56] [Rank 0] step:3621/10000 train_time:331065ms step_avg:91.43ms +[2025-08-22 15:55:58] [Rank 0] step:3641/10000 train_time:332974ms step_avg:91.45ms +[2025-08-22 15:55:58] [Rank 0] step:3641/10000 train_time:332974ms step_avg:91.45ms +[2025-08-22 15:56:00] [Rank 0] step:3661/10000 train_time:334827ms step_avg:91.46ms +[2025-08-22 15:56:00] [Rank 0] step:3661/10000 train_time:334827ms step_avg:91.46ms +[2025-08-22 15:56:02] [Rank 0] step:3681/10000 train_time:336682ms step_avg:91.46ms +[2025-08-22 15:56:02] [Rank 0] step:3681/10000 train_time:336682ms step_avg:91.46ms +[2025-08-22 15:56:04] [Rank 0] step:3701/10000 train_time:338537ms step_avg:91.47ms +[2025-08-22 15:56:04] [Rank 0] step:3701/10000 train_time:338537ms step_avg:91.47ms +[2025-08-22 15:56:06] [Rank 0] step:3721/10000 train_time:340423ms step_avg:91.49ms +[2025-08-22 15:56:06] [Rank 0] step:3721/10000 train_time:340423ms step_avg:91.49ms +[2025-08-22 15:56:08] [Rank 0] step:3741/10000 train_time:342314ms step_avg:91.50ms +[2025-08-22 15:56:08] [Rank 0] step:3741/10000 train_time:342314ms step_avg:91.50ms +[2025-08-22 15:56:10] [Rank 0] step:3761/10000 train_time:344208ms step_avg:91.52ms +[2025-08-22 15:56:10] [Rank 0] step:3761/10000 train_time:344208ms step_avg:91.52ms +[2025-08-22 15:56:11] [Rank 0] step:3781/10000 train_time:346105ms step_avg:91.54ms +[2025-08-22 15:56:11] [Rank 0] step:3781/10000 train_time:346105ms step_avg:91.54ms +[2025-08-22 15:56:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:56:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:56:27] [Rank 0] PRINT: step:3800/10000 val_loss:4.1715 svd_entropy: attn_qk:H=0.5099,top10E=0.66,eRank=51.7,q75/q25=119.93 attn_vo:H=0.4203,top10E=0.75,eRank=59.1,q75/q25=108.04 mlp_w1:H=0.6255,top10E=0.48,eRank=117.2,q75/q25=20.78 mlp_w2:H=0.6155,top10E=0.47,eRank=121.3,q75/q25=22.42 vo_prod:H=0.2941,top10E=0.84,eRank=22.1,q75/q25=10351.57 train_time:348016ms step_avg:91.58ms +[2025-08-22 15:56:27] [Rank 0] PRINT: step:3800/10000 val_loss:4.1715 svd_entropy: attn_qk:H=0.5099,top10E=0.66,eRank=51.7,q75/q25=119.93 attn_vo:H=0.4203,top10E=0.75,eRank=59.1,q75/q25=108.04 mlp_w1:H=0.6255,top10E=0.48,eRank=117.2,q75/q25=20.78 mlp_w2:H=0.6155,top10E=0.47,eRank=121.3,q75/q25=22.42 vo_prod:H=0.2941,top10E=0.84,eRank=22.1,q75/q25=10351.57 train_time:348016ms step_avg:91.58ms +[2025-08-22 15:56:27] [Rank 0] step:3801/10000 train_time:348035ms step_avg:91.56ms +[2025-08-22 15:56:27] [Rank 0] step:3801/10000 train_time:348035ms step_avg:91.56ms +[2025-08-22 15:56:29] [Rank 0] step:3821/10000 train_time:349907ms step_avg:91.57ms +[2025-08-22 15:56:29] [Rank 0] step:3821/10000 train_time:349907ms step_avg:91.57ms +[2025-08-22 15:56:31] [Rank 0] step:3841/10000 train_time:351794ms step_avg:91.59ms +[2025-08-22 15:56:31] [Rank 0] step:3841/10000 train_time:351794ms step_avg:91.59ms +[2025-08-22 15:56:33] [Rank 0] step:3861/10000 train_time:353681ms step_avg:91.60ms +[2025-08-22 15:56:33] [Rank 0] step:3861/10000 train_time:353681ms step_avg:91.60ms +[2025-08-22 15:56:35] [Rank 0] step:3881/10000 train_time:355569ms step_avg:91.62ms +[2025-08-22 15:56:35] [Rank 0] step:3881/10000 train_time:355569ms step_avg:91.62ms +[2025-08-22 15:56:37] [Rank 0] step:3901/10000 train_time:357458ms step_avg:91.63ms +[2025-08-22 15:56:37] [Rank 0] step:3901/10000 train_time:357458ms step_avg:91.63ms +[2025-08-22 15:56:39] [Rank 0] step:3921/10000 train_time:359348ms step_avg:91.65ms +[2025-08-22 15:56:39] [Rank 0] step:3921/10000 train_time:359348ms step_avg:91.65ms +[2025-08-22 15:56:41] [Rank 0] step:3941/10000 train_time:361238ms step_avg:91.66ms +[2025-08-22 15:56:41] [Rank 0] step:3941/10000 train_time:361238ms step_avg:91.66ms +[2025-08-22 15:56:42] [Rank 0] step:3961/10000 train_time:363126ms step_avg:91.68ms +[2025-08-22 15:56:42] [Rank 0] step:3961/10000 train_time:363126ms step_avg:91.68ms +[2025-08-22 15:56:44] [Rank 0] step:3981/10000 train_time:365015ms step_avg:91.69ms +[2025-08-22 15:56:44] [Rank 0] step:3981/10000 train_time:365015ms step_avg:91.69ms +[2025-08-22 15:56:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:56:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:57:00] [Rank 0] PRINT: step:4000/10000 val_loss:4.1532 svd_entropy: attn_qk:H=0.5113,top10E=0.66,eRank=52.4,q75/q25=118.87 attn_vo:H=0.4212,top10E=0.74,eRank=60.3,q75/q25=111.64 mlp_w1:H=0.6246,top10E=0.48,eRank=117.8,q75/q25=21.61 mlp_w2:H=0.6157,top10E=0.47,eRank=121.9,q75/q25=22.43 vo_prod:H=0.2932,top10E=0.84,eRank=22.7,q75/q25=12367.90 train_time:366919ms step_avg:91.73ms +[2025-08-22 15:57:00] [Rank 0] PRINT: step:4000/10000 val_loss:4.1532 svd_entropy: attn_qk:H=0.5113,top10E=0.66,eRank=52.4,q75/q25=118.87 attn_vo:H=0.4212,top10E=0.74,eRank=60.3,q75/q25=111.64 mlp_w1:H=0.6246,top10E=0.48,eRank=117.8,q75/q25=21.61 mlp_w2:H=0.6157,top10E=0.47,eRank=121.9,q75/q25=22.43 vo_prod:H=0.2932,top10E=0.84,eRank=22.7,q75/q25=12367.90 train_time:366919ms step_avg:91.73ms +[2025-08-22 15:57:00] [Rank 0] step:4001/10000 train_time:366937ms step_avg:91.71ms +[2025-08-22 15:57:00] [Rank 0] step:4001/10000 train_time:366937ms step_avg:91.71ms +[2025-08-22 15:57:02] [Rank 0] step:4021/10000 train_time:368895ms step_avg:91.74ms +[2025-08-22 15:57:02] [Rank 0] step:4021/10000 train_time:368895ms step_avg:91.74ms +[2025-08-22 15:57:04] [Rank 0] step:4041/10000 train_time:370781ms step_avg:91.75ms +[2025-08-22 15:57:04] [Rank 0] step:4041/10000 train_time:370781ms step_avg:91.75ms +[2025-08-22 15:57:06] [Rank 0] step:4061/10000 train_time:372667ms step_avg:91.77ms +[2025-08-22 15:57:06] [Rank 0] step:4061/10000 train_time:372667ms step_avg:91.77ms +[2025-08-22 15:57:08] [Rank 0] step:4081/10000 train_time:375217ms step_avg:91.94ms +[2025-08-22 15:57:08] [Rank 0] step:4081/10000 train_time:375217ms step_avg:91.94ms +[2025-08-22 15:57:10] [Rank 0] step:4101/10000 train_time:377104ms step_avg:91.95ms +[2025-08-22 15:57:10] [Rank 0] step:4101/10000 train_time:377104ms step_avg:91.95ms +[2025-08-22 15:57:12] [Rank 0] step:4121/10000 train_time:378992ms step_avg:91.97ms +[2025-08-22 15:57:12] [Rank 0] step:4121/10000 train_time:378992ms step_avg:91.97ms +[2025-08-22 15:57:14] [Rank 0] step:4141/10000 train_time:380882ms step_avg:91.98ms +[2025-08-22 15:57:14] [Rank 0] step:4141/10000 train_time:380882ms step_avg:91.98ms +[2025-08-22 15:57:16] [Rank 0] step:4161/10000 train_time:382772ms step_avg:91.99ms +[2025-08-22 15:57:16] [Rank 0] step:4161/10000 train_time:382772ms step_avg:91.99ms +[2025-08-22 15:57:18] [Rank 0] step:4181/10000 train_time:384666ms step_avg:92.00ms +[2025-08-22 15:57:18] [Rank 0] step:4181/10000 train_time:384666ms step_avg:92.00ms +[2025-08-22 15:57:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:57:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:57:34] [Rank 0] PRINT: step:4200/10000 val_loss:4.1440 svd_entropy: attn_qk:H=0.5114,top10E=0.66,eRank=52.8,q75/q25=117.10 attn_vo:H=0.4241,top10E=0.74,eRank=61.2,q75/q25=112.06 mlp_w1:H=0.6233,top10E=0.48,eRank=118.2,q75/q25=22.33 mlp_w2:H=0.6148,top10E=0.47,eRank=122.2,q75/q25=22.14 vo_prod:H=0.2997,top10E=0.84,eRank=23.2,q75/q25=14040.82 train_time:386575ms step_avg:92.04ms +[2025-08-22 15:57:34] [Rank 0] PRINT: step:4200/10000 val_loss:4.1440 svd_entropy: attn_qk:H=0.5114,top10E=0.66,eRank=52.8,q75/q25=117.10 attn_vo:H=0.4241,top10E=0.74,eRank=61.2,q75/q25=112.06 mlp_w1:H=0.6233,top10E=0.48,eRank=118.2,q75/q25=22.33 mlp_w2:H=0.6148,top10E=0.47,eRank=122.2,q75/q25=22.14 vo_prod:H=0.2997,top10E=0.84,eRank=23.2,q75/q25=14040.82 train_time:386575ms step_avg:92.04ms +[2025-08-22 15:57:34] [Rank 0] step:4201/10000 train_time:386593ms step_avg:92.02ms +[2025-08-22 15:57:34] [Rank 0] step:4201/10000 train_time:386593ms step_avg:92.02ms +[2025-08-22 15:57:36] [Rank 0] step:4221/10000 train_time:388475ms step_avg:92.03ms +[2025-08-22 15:57:36] [Rank 0] step:4221/10000 train_time:388475ms step_avg:92.03ms +[2025-08-22 15:57:37] [Rank 0] step:4241/10000 train_time:390368ms step_avg:92.05ms +[2025-08-22 15:57:37] [Rank 0] step:4241/10000 train_time:390368ms step_avg:92.05ms +[2025-08-22 15:57:39] [Rank 0] step:4261/10000 train_time:392259ms step_avg:92.06ms +[2025-08-22 15:57:39] [Rank 0] step:4261/10000 train_time:392259ms step_avg:92.06ms +[2025-08-22 15:57:41] [Rank 0] step:4281/10000 train_time:394154ms step_avg:92.07ms +[2025-08-22 15:57:41] [Rank 0] step:4281/10000 train_time:394154ms step_avg:92.07ms +[2025-08-22 15:57:43] [Rank 0] step:4301/10000 train_time:396047ms step_avg:92.08ms +[2025-08-22 15:57:43] [Rank 0] step:4301/10000 train_time:396047ms step_avg:92.08ms +[2025-08-22 15:57:45] [Rank 0] step:4321/10000 train_time:397942ms step_avg:92.09ms +[2025-08-22 15:57:45] [Rank 0] step:4321/10000 train_time:397942ms step_avg:92.09ms +[2025-08-22 15:57:47] [Rank 0] step:4341/10000 train_time:399837ms step_avg:92.11ms +[2025-08-22 15:57:47] [Rank 0] step:4341/10000 train_time:399837ms step_avg:92.11ms +[2025-08-22 15:57:49] [Rank 0] step:4361/10000 train_time:401731ms step_avg:92.12ms +[2025-08-22 15:57:49] [Rank 0] step:4361/10000 train_time:401731ms step_avg:92.12ms +[2025-08-22 15:57:51] [Rank 0] step:4381/10000 train_time:403626ms step_avg:92.13ms +[2025-08-22 15:57:51] [Rank 0] step:4381/10000 train_time:403626ms step_avg:92.13ms +[2025-08-22 15:57:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:57:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:58:06] [Rank 0] PRINT: step:4400/10000 val_loss:4.1279 svd_entropy: attn_qk:H=0.5144,top10E=0.66,eRank=53.7,q75/q25=111.24 attn_vo:H=0.4293,top10E=0.74,eRank=62.2,q75/q25=114.33 mlp_w1:H=0.6213,top10E=0.48,eRank=118.3,q75/q25=22.97 mlp_w2:H=0.6134,top10E=0.47,eRank=122.3,q75/q25=22.05 vo_prod:H=0.3020,top10E=0.84,eRank=23.4,q75/q25=14928.34 train_time:405536ms step_avg:92.17ms +[2025-08-22 15:58:06] [Rank 0] PRINT: step:4400/10000 val_loss:4.1279 svd_entropy: attn_qk:H=0.5144,top10E=0.66,eRank=53.7,q75/q25=111.24 attn_vo:H=0.4293,top10E=0.74,eRank=62.2,q75/q25=114.33 mlp_w1:H=0.6213,top10E=0.48,eRank=118.3,q75/q25=22.97 mlp_w2:H=0.6134,top10E=0.47,eRank=122.3,q75/q25=22.05 vo_prod:H=0.3020,top10E=0.84,eRank=23.4,q75/q25=14928.34 train_time:405536ms step_avg:92.17ms +[2025-08-22 15:58:06] [Rank 0] step:4401/10000 train_time:405554ms step_avg:92.15ms +[2025-08-22 15:58:06] [Rank 0] step:4401/10000 train_time:405554ms step_avg:92.15ms +[2025-08-22 15:58:08] [Rank 0] step:4421/10000 train_time:407442ms step_avg:92.16ms +[2025-08-22 15:58:08] [Rank 0] step:4421/10000 train_time:407442ms step_avg:92.16ms +[2025-08-22 15:58:10] [Rank 0] step:4441/10000 train_time:409327ms step_avg:92.17ms +[2025-08-22 15:58:10] [Rank 0] step:4441/10000 train_time:409327ms step_avg:92.17ms +[2025-08-22 15:58:12] [Rank 0] step:4461/10000 train_time:411219ms step_avg:92.18ms +[2025-08-22 15:58:12] [Rank 0] step:4461/10000 train_time:411219ms step_avg:92.18ms +[2025-08-22 15:58:14] [Rank 0] step:4481/10000 train_time:413114ms step_avg:92.19ms +[2025-08-22 15:58:14] [Rank 0] step:4481/10000 train_time:413114ms step_avg:92.19ms +[2025-08-22 15:58:16] [Rank 0] step:4501/10000 train_time:415007ms step_avg:92.20ms +[2025-08-22 15:58:16] [Rank 0] step:4501/10000 train_time:415007ms step_avg:92.20ms +[2025-08-22 15:58:18] [Rank 0] step:4521/10000 train_time:416904ms step_avg:92.22ms +[2025-08-22 15:58:18] [Rank 0] step:4521/10000 train_time:416904ms step_avg:92.22ms +[2025-08-22 15:58:20] [Rank 0] step:4541/10000 train_time:418801ms step_avg:92.23ms +[2025-08-22 15:58:20] [Rank 0] step:4541/10000 train_time:418801ms step_avg:92.23ms +[2025-08-22 15:58:22] [Rank 0] step:4561/10000 train_time:420698ms step_avg:92.24ms +[2025-08-22 15:58:22] [Rank 0] step:4561/10000 train_time:420698ms step_avg:92.24ms +[2025-08-22 15:58:24] [Rank 0] step:4581/10000 train_time:422602ms step_avg:92.25ms +[2025-08-22 15:58:24] [Rank 0] step:4581/10000 train_time:422602ms step_avg:92.25ms +[2025-08-22 15:58:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:58:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:58:39] [Rank 0] PRINT: step:4600/10000 val_loss:4.1098 svd_entropy: attn_qk:H=0.5160,top10E=0.66,eRank=54.4,q75/q25=109.52 attn_vo:H=0.4328,top10E=0.74,eRank=63.2,q75/q25=114.28 mlp_w1:H=0.6195,top10E=0.49,eRank=118.5,q75/q25=23.49 mlp_w2:H=0.6129,top10E=0.47,eRank=122.6,q75/q25=22.18 vo_prod:H=0.3038,top10E=0.84,eRank=23.9,q75/q25=16349.21 train_time:424516ms step_avg:92.29ms +[2025-08-22 15:58:39] [Rank 0] PRINT: step:4600/10000 val_loss:4.1098 svd_entropy: attn_qk:H=0.5160,top10E=0.66,eRank=54.4,q75/q25=109.52 attn_vo:H=0.4328,top10E=0.74,eRank=63.2,q75/q25=114.28 mlp_w1:H=0.6195,top10E=0.49,eRank=118.5,q75/q25=23.49 mlp_w2:H=0.6129,top10E=0.47,eRank=122.6,q75/q25=22.18 vo_prod:H=0.3038,top10E=0.84,eRank=23.9,q75/q25=16349.21 train_time:424516ms step_avg:92.29ms +[2025-08-22 15:58:39] [Rank 0] step:4601/10000 train_time:424533ms step_avg:92.27ms +[2025-08-22 15:58:39] [Rank 0] step:4601/10000 train_time:424533ms step_avg:92.27ms +[2025-08-22 15:58:41] [Rank 0] step:4621/10000 train_time:426412ms step_avg:92.28ms +[2025-08-22 15:58:41] [Rank 0] step:4621/10000 train_time:426412ms step_avg:92.28ms +[2025-08-22 15:58:43] [Rank 0] step:4641/10000 train_time:428305ms step_avg:92.29ms +[2025-08-22 15:58:43] [Rank 0] step:4641/10000 train_time:428305ms step_avg:92.29ms +[2025-08-22 15:58:45] [Rank 0] step:4661/10000 train_time:430197ms step_avg:92.30ms +[2025-08-22 15:58:45] [Rank 0] step:4661/10000 train_time:430197ms step_avg:92.30ms +[2025-08-22 15:58:47] [Rank 0] step:4681/10000 train_time:432094ms step_avg:92.31ms +[2025-08-22 15:58:47] [Rank 0] step:4681/10000 train_time:432094ms step_avg:92.31ms +[2025-08-22 15:58:49] [Rank 0] step:4701/10000 train_time:433989ms step_avg:92.32ms +[2025-08-22 15:58:49] [Rank 0] step:4701/10000 train_time:433989ms step_avg:92.32ms +[2025-08-22 15:58:51] [Rank 0] step:4721/10000 train_time:435886ms step_avg:92.33ms +[2025-08-22 15:58:51] [Rank 0] step:4721/10000 train_time:435886ms step_avg:92.33ms +[2025-08-22 15:58:53] [Rank 0] step:4741/10000 train_time:437782ms step_avg:92.34ms +[2025-08-22 15:58:53] [Rank 0] step:4741/10000 train_time:437782ms step_avg:92.34ms +[2025-08-22 15:58:54] [Rank 0] step:4761/10000 train_time:439681ms step_avg:92.35ms +[2025-08-22 15:58:54] [Rank 0] step:4761/10000 train_time:439681ms step_avg:92.35ms +[2025-08-22 15:58:56] [Rank 0] step:4781/10000 train_time:441578ms step_avg:92.36ms +[2025-08-22 15:58:56] [Rank 0] step:4781/10000 train_time:441578ms step_avg:92.36ms +[2025-08-22 15:58:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:58:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:59:12] [Rank 0] PRINT: step:4800/10000 val_loss:4.1095 svd_entropy: attn_qk:H=0.5182,top10E=0.66,eRank=55.1,q75/q25=88.58 attn_vo:H=0.4363,top10E=0.73,eRank=63.9,q75/q25=107.98 mlp_w1:H=0.6186,top10E=0.49,eRank=119.0,q75/q25=24.15 mlp_w2:H=0.6120,top10E=0.47,eRank=122.8,q75/q25=22.01 vo_prod:H=0.3015,top10E=0.84,eRank=23.6,q75/q25=15541.45 train_time:443490ms step_avg:92.39ms +[2025-08-22 15:59:12] [Rank 0] PRINT: step:4800/10000 val_loss:4.1095 svd_entropy: attn_qk:H=0.5182,top10E=0.66,eRank=55.1,q75/q25=88.58 attn_vo:H=0.4363,top10E=0.73,eRank=63.9,q75/q25=107.98 mlp_w1:H=0.6186,top10E=0.49,eRank=119.0,q75/q25=24.15 mlp_w2:H=0.6120,top10E=0.47,eRank=122.8,q75/q25=22.01 vo_prod:H=0.3015,top10E=0.84,eRank=23.6,q75/q25=15541.45 train_time:443490ms step_avg:92.39ms +[2025-08-22 15:59:12] [Rank 0] step:4801/10000 train_time:443509ms step_avg:92.38ms +[2025-08-22 15:59:12] [Rank 0] step:4801/10000 train_time:443509ms step_avg:92.38ms +[2025-08-22 15:59:14] [Rank 0] step:4821/10000 train_time:445389ms step_avg:92.39ms +[2025-08-22 15:59:14] [Rank 0] step:4821/10000 train_time:445389ms step_avg:92.39ms +[2025-08-22 15:59:16] [Rank 0] step:4841/10000 train_time:447280ms step_avg:92.39ms +[2025-08-22 15:59:16] [Rank 0] step:4841/10000 train_time:447280ms step_avg:92.39ms +[2025-08-22 15:59:18] [Rank 0] step:4861/10000 train_time:449176ms step_avg:92.40ms +[2025-08-22 15:59:18] [Rank 0] step:4861/10000 train_time:449176ms step_avg:92.40ms +[2025-08-22 15:59:20] [Rank 0] step:4881/10000 train_time:451068ms step_avg:92.41ms +[2025-08-22 15:59:20] [Rank 0] step:4881/10000 train_time:451068ms step_avg:92.41ms +[2025-08-22 15:59:22] [Rank 0] step:4901/10000 train_time:452960ms step_avg:92.42ms +[2025-08-22 15:59:22] [Rank 0] step:4901/10000 train_time:452960ms step_avg:92.42ms +[2025-08-22 15:59:24] [Rank 0] step:4921/10000 train_time:454857ms step_avg:92.43ms +[2025-08-22 15:59:24] [Rank 0] step:4921/10000 train_time:454857ms step_avg:92.43ms +[2025-08-22 15:59:25] [Rank 0] step:4941/10000 train_time:456757ms step_avg:92.44ms +[2025-08-22 15:59:25] [Rank 0] step:4941/10000 train_time:456757ms step_avg:92.44ms +[2025-08-22 15:59:27] [Rank 0] step:4961/10000 train_time:458656ms step_avg:92.45ms +[2025-08-22 15:59:27] [Rank 0] step:4961/10000 train_time:458656ms step_avg:92.45ms +[2025-08-22 15:59:29] [Rank 0] step:4981/10000 train_time:460559ms step_avg:92.46ms +[2025-08-22 15:59:29] [Rank 0] step:4981/10000 train_time:460559ms step_avg:92.46ms +[2025-08-22 15:59:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:59:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 15:59:45] [Rank 0] PRINT: step:5000/10000 val_loss:4.0918 svd_entropy: attn_qk:H=0.5199,top10E=0.65,eRank=55.8,q75/q25=85.64 attn_vo:H=0.4368,top10E=0.73,eRank=64.7,q75/q25=111.19 mlp_w1:H=0.6142,top10E=0.49,eRank=118.2,q75/q25=24.75 mlp_w2:H=0.6104,top10E=0.47,eRank=122.7,q75/q25=22.03 vo_prod:H=0.2995,top10E=0.84,eRank=24.1,q75/q25=17520.07 train_time:462476ms step_avg:92.50ms +[2025-08-22 15:59:45] [Rank 0] PRINT: step:5000/10000 val_loss:4.0918 svd_entropy: attn_qk:H=0.5199,top10E=0.65,eRank=55.8,q75/q25=85.64 attn_vo:H=0.4368,top10E=0.73,eRank=64.7,q75/q25=111.19 mlp_w1:H=0.6142,top10E=0.49,eRank=118.2,q75/q25=24.75 mlp_w2:H=0.6104,top10E=0.47,eRank=122.7,q75/q25=22.03 vo_prod:H=0.2995,top10E=0.84,eRank=24.1,q75/q25=17520.07 train_time:462476ms step_avg:92.50ms +[2025-08-22 15:59:45] [Rank 0] step:5001/10000 train_time:462494ms step_avg:92.48ms +[2025-08-22 15:59:45] [Rank 0] step:5001/10000 train_time:462494ms step_avg:92.48ms +[2025-08-22 15:59:47] [Rank 0] step:5021/10000 train_time:464380ms step_avg:92.49ms +[2025-08-22 15:59:47] [Rank 0] step:5021/10000 train_time:464380ms step_avg:92.49ms +[2025-08-22 15:59:49] [Rank 0] step:5041/10000 train_time:466279ms step_avg:92.50ms +[2025-08-22 15:59:49] [Rank 0] step:5041/10000 train_time:466279ms step_avg:92.50ms +[2025-08-22 15:59:51] [Rank 0] step:5061/10000 train_time:468174ms step_avg:92.51ms +[2025-08-22 15:59:51] [Rank 0] step:5061/10000 train_time:468174ms step_avg:92.51ms +[2025-08-22 15:59:53] [Rank 0] step:5081/10000 train_time:470075ms step_avg:92.52ms +[2025-08-22 15:59:53] [Rank 0] step:5081/10000 train_time:470075ms step_avg:92.52ms +[2025-08-22 15:59:55] [Rank 0] step:5101/10000 train_time:471974ms step_avg:92.53ms +[2025-08-22 15:59:55] [Rank 0] step:5101/10000 train_time:471974ms step_avg:92.53ms +[2025-08-22 15:59:56] [Rank 0] step:5121/10000 train_time:473878ms step_avg:92.54ms +[2025-08-22 15:59:56] [Rank 0] step:5121/10000 train_time:473878ms step_avg:92.54ms +[2025-08-22 15:59:58] [Rank 0] step:5141/10000 train_time:475783ms step_avg:92.55ms +[2025-08-22 15:59:58] [Rank 0] step:5141/10000 train_time:475783ms step_avg:92.55ms +[2025-08-22 16:00:00] [Rank 0] step:5161/10000 train_time:477685ms step_avg:92.56ms +[2025-08-22 16:00:00] [Rank 0] step:5161/10000 train_time:477685ms step_avg:92.56ms +[2025-08-22 16:00:02] [Rank 0] step:5181/10000 train_time:479590ms step_avg:92.57ms +[2025-08-22 16:00:02] [Rank 0] step:5181/10000 train_time:479590ms step_avg:92.57ms +[2025-08-22 16:00:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:00:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:00:18] [Rank 0] PRINT: step:5200/10000 val_loss:4.0815 svd_entropy: attn_qk:H=0.5209,top10E=0.65,eRank=56.4,q75/q25=84.75 attn_vo:H=0.4381,top10E=0.73,eRank=65.5,q75/q25=112.99 mlp_w1:H=0.6136,top10E=0.49,eRank=118.5,q75/q25=25.27 mlp_w2:H=0.6103,top10E=0.47,eRank=123.0,q75/q25=21.67 vo_prod:H=0.3013,top10E=0.84,eRank=24.4,q75/q25=17996.52 train_time:481533ms step_avg:92.60ms +[2025-08-22 16:00:18] [Rank 0] PRINT: step:5200/10000 val_loss:4.0815 svd_entropy: attn_qk:H=0.5209,top10E=0.65,eRank=56.4,q75/q25=84.75 attn_vo:H=0.4381,top10E=0.73,eRank=65.5,q75/q25=112.99 mlp_w1:H=0.6136,top10E=0.49,eRank=118.5,q75/q25=25.27 mlp_w2:H=0.6103,top10E=0.47,eRank=123.0,q75/q25=21.67 vo_prod:H=0.3013,top10E=0.84,eRank=24.4,q75/q25=17996.52 train_time:481533ms step_avg:92.60ms +[2025-08-22 16:00:18] [Rank 0] step:5201/10000 train_time:481552ms step_avg:92.59ms +[2025-08-22 16:00:18] [Rank 0] step:5201/10000 train_time:481552ms step_avg:92.59ms +[2025-08-22 16:00:20] [Rank 0] step:5221/10000 train_time:483463ms step_avg:92.60ms +[2025-08-22 16:00:20] [Rank 0] step:5221/10000 train_time:483463ms step_avg:92.60ms +[2025-08-22 16:00:22] [Rank 0] step:5241/10000 train_time:485389ms step_avg:92.61ms +[2025-08-22 16:00:22] [Rank 0] step:5241/10000 train_time:485389ms step_avg:92.61ms +[2025-08-22 16:00:24] [Rank 0] step:5261/10000 train_time:487315ms step_avg:92.63ms +[2025-08-22 16:00:24] [Rank 0] step:5261/10000 train_time:487315ms step_avg:92.63ms +[2025-08-22 16:00:26] [Rank 0] step:5281/10000 train_time:489242ms step_avg:92.64ms +[2025-08-22 16:00:26] [Rank 0] step:5281/10000 train_time:489242ms step_avg:92.64ms +[2025-08-22 16:00:28] [Rank 0] step:5301/10000 train_time:491179ms step_avg:92.66ms +[2025-08-22 16:00:28] [Rank 0] step:5301/10000 train_time:491179ms step_avg:92.66ms +[2025-08-22 16:00:30] [Rank 0] step:5321/10000 train_time:493108ms step_avg:92.67ms +[2025-08-22 16:00:30] [Rank 0] step:5321/10000 train_time:493108ms step_avg:92.67ms +[2025-08-22 16:00:32] [Rank 0] step:5341/10000 train_time:495040ms step_avg:92.69ms +[2025-08-22 16:00:32] [Rank 0] step:5341/10000 train_time:495040ms step_avg:92.69ms +[2025-08-22 16:00:33] [Rank 0] step:5361/10000 train_time:496974ms step_avg:92.70ms +[2025-08-22 16:00:33] [Rank 0] step:5361/10000 train_time:496974ms step_avg:92.70ms +[2025-08-22 16:00:35] [Rank 0] step:5381/10000 train_time:498906ms step_avg:92.72ms +[2025-08-22 16:00:35] [Rank 0] step:5381/10000 train_time:498906ms step_avg:92.72ms +[2025-08-22 16:00:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:00:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:00:51] [Rank 0] PRINT: step:5400/10000 val_loss:4.0692 svd_entropy: attn_qk:H=0.5214,top10E=0.65,eRank=56.8,q75/q25=84.54 attn_vo:H=0.4410,top10E=0.73,eRank=66.4,q75/q25=95.92 mlp_w1:H=0.6115,top10E=0.49,eRank=118.6,q75/q25=25.58 mlp_w2:H=0.6097,top10E=0.47,eRank=123.2,q75/q25=21.43 vo_prod:H=0.3088,top10E=0.84,eRank=25.0,q75/q25=8197.75 train_time:500849ms step_avg:92.75ms +[2025-08-22 16:00:51] [Rank 0] PRINT: step:5400/10000 val_loss:4.0692 svd_entropy: attn_qk:H=0.5214,top10E=0.65,eRank=56.8,q75/q25=84.54 attn_vo:H=0.4410,top10E=0.73,eRank=66.4,q75/q25=95.92 mlp_w1:H=0.6115,top10E=0.49,eRank=118.6,q75/q25=25.58 mlp_w2:H=0.6097,top10E=0.47,eRank=123.2,q75/q25=21.43 vo_prod:H=0.3088,top10E=0.84,eRank=25.0,q75/q25=8197.75 train_time:500849ms step_avg:92.75ms +[2025-08-22 16:00:51] [Rank 0] step:5401/10000 train_time:500867ms step_avg:92.74ms +[2025-08-22 16:00:51] [Rank 0] step:5401/10000 train_time:500867ms step_avg:92.74ms +[2025-08-22 16:00:53] [Rank 0] step:5421/10000 train_time:502795ms step_avg:92.75ms +[2025-08-22 16:00:53] [Rank 0] step:5421/10000 train_time:502795ms step_avg:92.75ms +[2025-08-22 16:00:55] [Rank 0] step:5441/10000 train_time:504718ms step_avg:92.76ms +[2025-08-22 16:00:55] [Rank 0] step:5441/10000 train_time:504718ms step_avg:92.76ms +[2025-08-22 16:00:57] [Rank 0] step:5461/10000 train_time:506646ms step_avg:92.78ms +[2025-08-22 16:00:57] [Rank 0] step:5461/10000 train_time:506646ms step_avg:92.78ms +[2025-08-22 16:00:59] [Rank 0] step:5481/10000 train_time:508572ms step_avg:92.79ms +[2025-08-22 16:00:59] [Rank 0] step:5481/10000 train_time:508572ms step_avg:92.79ms +[2025-08-22 16:01:01] [Rank 0] step:5501/10000 train_time:510506ms step_avg:92.80ms +[2025-08-22 16:01:01] [Rank 0] step:5501/10000 train_time:510506ms step_avg:92.80ms +[2025-08-22 16:01:03] [Rank 0] step:5521/10000 train_time:512439ms step_avg:92.82ms +[2025-08-22 16:01:03] [Rank 0] step:5521/10000 train_time:512439ms step_avg:92.82ms +[2025-08-22 16:01:05] [Rank 0] step:5541/10000 train_time:514370ms step_avg:92.83ms +[2025-08-22 16:01:05] [Rank 0] step:5541/10000 train_time:514370ms step_avg:92.83ms +[2025-08-22 16:01:07] [Rank 0] step:5561/10000 train_time:516301ms step_avg:92.84ms +[2025-08-22 16:01:07] [Rank 0] step:5561/10000 train_time:516301ms step_avg:92.84ms +[2025-08-22 16:01:09] [Rank 0] step:5581/10000 train_time:518231ms step_avg:92.86ms +[2025-08-22 16:01:09] [Rank 0] step:5581/10000 train_time:518231ms step_avg:92.86ms +[2025-08-22 16:01:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:01:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:01:24] [Rank 0] PRINT: step:5600/10000 val_loss:4.0635 svd_entropy: attn_qk:H=0.5222,top10E=0.65,eRank=57.2,q75/q25=82.30 attn_vo:H=0.4420,top10E=0.73,eRank=67.0,q75/q25=93.83 mlp_w1:H=0.6089,top10E=0.50,eRank=118.5,q75/q25=26.24 mlp_w2:H=0.6084,top10E=0.48,eRank=123.2,q75/q25=21.35 vo_prod:H=0.3077,top10E=0.83,eRank=25.2,q75/q25=7824.99 train_time:520178ms step_avg:92.89ms +[2025-08-22 16:01:24] [Rank 0] PRINT: step:5600/10000 val_loss:4.0635 svd_entropy: attn_qk:H=0.5222,top10E=0.65,eRank=57.2,q75/q25=82.30 attn_vo:H=0.4420,top10E=0.73,eRank=67.0,q75/q25=93.83 mlp_w1:H=0.6089,top10E=0.50,eRank=118.5,q75/q25=26.24 mlp_w2:H=0.6084,top10E=0.48,eRank=123.2,q75/q25=21.35 vo_prod:H=0.3077,top10E=0.83,eRank=25.2,q75/q25=7824.99 train_time:520178ms step_avg:92.89ms +[2025-08-22 16:01:24] [Rank 0] step:5601/10000 train_time:520195ms step_avg:92.88ms +[2025-08-22 16:01:24] [Rank 0] step:5601/10000 train_time:520195ms step_avg:92.88ms +[2025-08-22 16:01:26] [Rank 0] step:5621/10000 train_time:522111ms step_avg:92.89ms +[2025-08-22 16:01:26] [Rank 0] step:5621/10000 train_time:522111ms step_avg:92.89ms +[2025-08-22 16:01:28] [Rank 0] step:5641/10000 train_time:524037ms step_avg:92.90ms +[2025-08-22 16:01:28] [Rank 0] step:5641/10000 train_time:524037ms step_avg:92.90ms +[2025-08-22 16:01:30] [Rank 0] step:5661/10000 train_time:525962ms step_avg:92.91ms +[2025-08-22 16:01:30] [Rank 0] step:5661/10000 train_time:525962ms step_avg:92.91ms +[2025-08-22 16:01:32] [Rank 0] step:5681/10000 train_time:527892ms step_avg:92.92ms +[2025-08-22 16:01:32] [Rank 0] step:5681/10000 train_time:527892ms step_avg:92.92ms +[2025-08-22 16:01:34] [Rank 0] step:5701/10000 train_time:529820ms step_avg:92.93ms +[2025-08-22 16:01:34] [Rank 0] step:5701/10000 train_time:529820ms step_avg:92.93ms +[2025-08-22 16:01:36] [Rank 0] step:5721/10000 train_time:531755ms step_avg:92.95ms +[2025-08-22 16:01:36] [Rank 0] step:5721/10000 train_time:531755ms step_avg:92.95ms +[2025-08-22 16:01:38] [Rank 0] step:5741/10000 train_time:533684ms step_avg:92.96ms +[2025-08-22 16:01:38] [Rank 0] step:5741/10000 train_time:533684ms step_avg:92.96ms +[2025-08-22 16:01:40] [Rank 0] step:5761/10000 train_time:535617ms step_avg:92.97ms +[2025-08-22 16:01:40] [Rank 0] step:5761/10000 train_time:535617ms step_avg:92.97ms +[2025-08-22 16:01:42] [Rank 0] step:5781/10000 train_time:537550ms step_avg:92.99ms +[2025-08-22 16:01:42] [Rank 0] step:5781/10000 train_time:537550ms step_avg:92.99ms +[2025-08-22 16:01:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:01:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:01:57] [Rank 0] PRINT: step:5800/10000 val_loss:4.0599 svd_entropy: attn_qk:H=0.5219,top10E=0.65,eRank=57.6,q75/q25=81.84 attn_vo:H=0.4431,top10E=0.73,eRank=67.7,q75/q25=94.28 mlp_w1:H=0.6080,top10E=0.50,eRank=118.7,q75/q25=26.08 mlp_w2:H=0.6080,top10E=0.48,eRank=123.4,q75/q25=21.12 vo_prod:H=0.3083,top10E=0.83,eRank=25.6,q75/q25=7927.03 train_time:539500ms step_avg:93.02ms +[2025-08-22 16:01:57] [Rank 0] PRINT: step:5800/10000 val_loss:4.0599 svd_entropy: attn_qk:H=0.5219,top10E=0.65,eRank=57.6,q75/q25=81.84 attn_vo:H=0.4431,top10E=0.73,eRank=67.7,q75/q25=94.28 mlp_w1:H=0.6080,top10E=0.50,eRank=118.7,q75/q25=26.08 mlp_w2:H=0.6080,top10E=0.48,eRank=123.4,q75/q25=21.12 vo_prod:H=0.3083,top10E=0.83,eRank=25.6,q75/q25=7927.03 train_time:539500ms step_avg:93.02ms +[2025-08-22 16:01:57] [Rank 0] step:5801/10000 train_time:539518ms step_avg:93.00ms +[2025-08-22 16:01:57] [Rank 0] step:5801/10000 train_time:539518ms step_avg:93.00ms +[2025-08-22 16:01:59] [Rank 0] step:5821/10000 train_time:541422ms step_avg:93.01ms +[2025-08-22 16:01:59] [Rank 0] step:5821/10000 train_time:541422ms step_avg:93.01ms +[2025-08-22 16:02:01] [Rank 0] step:5841/10000 train_time:543350ms step_avg:93.02ms +[2025-08-22 16:02:01] [Rank 0] step:5841/10000 train_time:543350ms step_avg:93.02ms +[2025-08-22 16:02:03] [Rank 0] step:5861/10000 train_time:545284ms step_avg:93.04ms +[2025-08-22 16:02:03] [Rank 0] step:5861/10000 train_time:545284ms step_avg:93.04ms +[2025-08-22 16:02:05] [Rank 0] step:5881/10000 train_time:547218ms step_avg:93.05ms +[2025-08-22 16:02:05] [Rank 0] step:5881/10000 train_time:547218ms step_avg:93.05ms +[2025-08-22 16:02:07] [Rank 0] step:5901/10000 train_time:549150ms step_avg:93.06ms +[2025-08-22 16:02:07] [Rank 0] step:5901/10000 train_time:549150ms step_avg:93.06ms +[2025-08-22 16:02:09] [Rank 0] step:5921/10000 train_time:551083ms step_avg:93.07ms +[2025-08-22 16:02:09] [Rank 0] step:5921/10000 train_time:551083ms step_avg:93.07ms +[2025-08-22 16:02:11] [Rank 0] step:5941/10000 train_time:553020ms step_avg:93.09ms +[2025-08-22 16:02:11] [Rank 0] step:5941/10000 train_time:553020ms step_avg:93.09ms +[2025-08-22 16:02:13] [Rank 0] step:5961/10000 train_time:554955ms step_avg:93.10ms +[2025-08-22 16:02:13] [Rank 0] step:5961/10000 train_time:554955ms step_avg:93.10ms +[2025-08-22 16:02:15] [Rank 0] step:5981/10000 train_time:556890ms step_avg:93.11ms +[2025-08-22 16:02:15] [Rank 0] step:5981/10000 train_time:556890ms step_avg:93.11ms +[2025-08-22 16:02:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:02:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:02:30] [Rank 0] PRINT: step:6000/10000 val_loss:4.0398 svd_entropy: attn_qk:H=0.5215,top10E=0.65,eRank=57.9,q75/q25=81.26 attn_vo:H=0.4451,top10E=0.72,eRank=68.4,q75/q25=95.68 mlp_w1:H=0.6063,top10E=0.50,eRank=118.9,q75/q25=26.30 mlp_w2:H=0.6073,top10E=0.48,eRank=123.4,q75/q25=20.67 vo_prod:H=0.3124,top10E=0.83,eRank=25.9,q75/q25=8656.56 train_time:558857ms step_avg:93.14ms +[2025-08-22 16:02:30] [Rank 0] PRINT: step:6000/10000 val_loss:4.0398 svd_entropy: attn_qk:H=0.5215,top10E=0.65,eRank=57.9,q75/q25=81.26 attn_vo:H=0.4451,top10E=0.72,eRank=68.4,q75/q25=95.68 mlp_w1:H=0.6063,top10E=0.50,eRank=118.9,q75/q25=26.30 mlp_w2:H=0.6073,top10E=0.48,eRank=123.4,q75/q25=20.67 vo_prod:H=0.3124,top10E=0.83,eRank=25.9,q75/q25=8656.56 train_time:558857ms step_avg:93.14ms +[2025-08-22 16:02:30] [Rank 0] step:6001/10000 train_time:558875ms step_avg:93.13ms +[2025-08-22 16:02:30] [Rank 0] step:6001/10000 train_time:558875ms step_avg:93.13ms +[2025-08-22 16:02:32] [Rank 0] step:6021/10000 train_time:560796ms step_avg:93.14ms +[2025-08-22 16:02:32] [Rank 0] step:6021/10000 train_time:560796ms step_avg:93.14ms +[2025-08-22 16:02:34] [Rank 0] step:6041/10000 train_time:562732ms step_avg:93.15ms +[2025-08-22 16:02:34] [Rank 0] step:6041/10000 train_time:562732ms step_avg:93.15ms +[2025-08-22 16:02:36] [Rank 0] step:6061/10000 train_time:564669ms step_avg:93.16ms +[2025-08-22 16:02:36] [Rank 0] step:6061/10000 train_time:564669ms step_avg:93.16ms +[2025-08-22 16:02:38] [Rank 0] step:6081/10000 train_time:566603ms step_avg:93.18ms +[2025-08-22 16:02:38] [Rank 0] step:6081/10000 train_time:566603ms step_avg:93.18ms +[2025-08-22 16:02:40] [Rank 0] step:6101/10000 train_time:568546ms step_avg:93.19ms +[2025-08-22 16:02:40] [Rank 0] step:6101/10000 train_time:568546ms step_avg:93.19ms +[2025-08-22 16:02:42] [Rank 0] step:6121/10000 train_time:570750ms step_avg:93.24ms +[2025-08-22 16:02:42] [Rank 0] step:6121/10000 train_time:570750ms step_avg:93.24ms +[2025-08-22 16:02:44] [Rank 0] step:6141/10000 train_time:572698ms step_avg:93.26ms +[2025-08-22 16:02:44] [Rank 0] step:6141/10000 train_time:572698ms step_avg:93.26ms +[2025-08-22 16:02:46] [Rank 0] step:6161/10000 train_time:574637ms step_avg:93.27ms +[2025-08-22 16:02:46] [Rank 0] step:6161/10000 train_time:574637ms step_avg:93.27ms +[2025-08-22 16:02:48] [Rank 0] step:6181/10000 train_time:576574ms step_avg:93.28ms +[2025-08-22 16:02:48] [Rank 0] step:6181/10000 train_time:576574ms step_avg:93.28ms +[2025-08-22 16:02:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:02:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:03:04] [Rank 0] PRINT: step:6200/10000 val_loss:4.0284 svd_entropy: attn_qk:H=0.5216,top10E=0.65,eRank=58.2,q75/q25=80.35 attn_vo:H=0.4462,top10E=0.72,eRank=69.1,q75/q25=98.49 mlp_w1:H=0.6056,top10E=0.50,eRank=119.2,q75/q25=26.25 mlp_w2:H=0.6064,top10E=0.48,eRank=123.5,q75/q25=20.42 vo_prod:H=0.3167,top10E=0.83,eRank=26.3,q75/q25=9277.77 train_time:578528ms step_avg:93.31ms +[2025-08-22 16:03:04] [Rank 0] PRINT: step:6200/10000 val_loss:4.0284 svd_entropy: attn_qk:H=0.5216,top10E=0.65,eRank=58.2,q75/q25=80.35 attn_vo:H=0.4462,top10E=0.72,eRank=69.1,q75/q25=98.49 mlp_w1:H=0.6056,top10E=0.50,eRank=119.2,q75/q25=26.25 mlp_w2:H=0.6064,top10E=0.48,eRank=123.5,q75/q25=20.42 vo_prod:H=0.3167,top10E=0.83,eRank=26.3,q75/q25=9277.77 train_time:578528ms step_avg:93.31ms +[2025-08-22 16:03:04] [Rank 0] step:6201/10000 train_time:578547ms step_avg:93.30ms +[2025-08-22 16:03:04] [Rank 0] step:6201/10000 train_time:578547ms step_avg:93.30ms +[2025-08-22 16:03:06] [Rank 0] step:6221/10000 train_time:580458ms step_avg:93.31ms +[2025-08-22 16:03:06] [Rank 0] step:6221/10000 train_time:580458ms step_avg:93.31ms +[2025-08-22 16:03:08] [Rank 0] step:6241/10000 train_time:582386ms step_avg:93.32ms +[2025-08-22 16:03:08] [Rank 0] step:6241/10000 train_time:582386ms step_avg:93.32ms +[2025-08-22 16:03:10] [Rank 0] step:6261/10000 train_time:584319ms step_avg:93.33ms +[2025-08-22 16:03:10] [Rank 0] step:6261/10000 train_time:584319ms step_avg:93.33ms +[2025-08-22 16:03:12] [Rank 0] step:6281/10000 train_time:586256ms step_avg:93.34ms +[2025-08-22 16:03:12] [Rank 0] step:6281/10000 train_time:586256ms step_avg:93.34ms +[2025-08-22 16:03:14] [Rank 0] step:6301/10000 train_time:588193ms step_avg:93.35ms +[2025-08-22 16:03:14] [Rank 0] step:6301/10000 train_time:588193ms step_avg:93.35ms +[2025-08-22 16:03:15] [Rank 0] step:6321/10000 train_time:590131ms step_avg:93.36ms +[2025-08-22 16:03:15] [Rank 0] step:6321/10000 train_time:590131ms step_avg:93.36ms +[2025-08-22 16:03:17] [Rank 0] step:6341/10000 train_time:592065ms step_avg:93.37ms +[2025-08-22 16:03:17] [Rank 0] step:6341/10000 train_time:592065ms step_avg:93.37ms +[2025-08-22 16:03:19] [Rank 0] step:6361/10000 train_time:594058ms step_avg:93.39ms +[2025-08-22 16:03:19] [Rank 0] step:6361/10000 train_time:594058ms step_avg:93.39ms +[2025-08-22 16:03:21] [Rank 0] step:6381/10000 train_time:596066ms step_avg:93.41ms +[2025-08-22 16:03:21] [Rank 0] step:6381/10000 train_time:596066ms step_avg:93.41ms +[2025-08-22 16:03:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:03:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:03:37] [Rank 0] PRINT: step:6400/10000 val_loss:4.0188 svd_entropy: attn_qk:H=0.5221,top10E=0.65,eRank=58.6,q75/q25=78.40 attn_vo:H=0.4489,top10E=0.72,eRank=69.8,q75/q25=98.46 mlp_w1:H=0.6048,top10E=0.50,eRank=119.3,q75/q25=26.11 mlp_w2:H=0.6051,top10E=0.48,eRank=123.4,q75/q25=20.28 vo_prod:H=0.3176,top10E=0.83,eRank=26.5,q75/q25=9223.95 train_time:598013ms step_avg:93.44ms +[2025-08-22 16:03:37] [Rank 0] PRINT: step:6400/10000 val_loss:4.0188 svd_entropy: attn_qk:H=0.5221,top10E=0.65,eRank=58.6,q75/q25=78.40 attn_vo:H=0.4489,top10E=0.72,eRank=69.8,q75/q25=98.46 mlp_w1:H=0.6048,top10E=0.50,eRank=119.3,q75/q25=26.11 mlp_w2:H=0.6051,top10E=0.48,eRank=123.4,q75/q25=20.28 vo_prod:H=0.3176,top10E=0.83,eRank=26.5,q75/q25=9223.95 train_time:598013ms step_avg:93.44ms +[2025-08-22 16:03:37] [Rank 0] step:6401/10000 train_time:598032ms step_avg:93.43ms +[2025-08-22 16:03:37] [Rank 0] step:6401/10000 train_time:598032ms step_avg:93.43ms +[2025-08-22 16:03:39] [Rank 0] step:6421/10000 train_time:599955ms step_avg:93.44ms +[2025-08-22 16:03:39] [Rank 0] step:6421/10000 train_time:599955ms step_avg:93.44ms +[2025-08-22 16:03:41] [Rank 0] step:6441/10000 train_time:601887ms step_avg:93.45ms +[2025-08-22 16:03:41] [Rank 0] step:6441/10000 train_time:601887ms step_avg:93.45ms +[2025-08-22 16:03:43] [Rank 0] step:6461/10000 train_time:603824ms step_avg:93.46ms +[2025-08-22 16:03:43] [Rank 0] step:6461/10000 train_time:603824ms step_avg:93.46ms +[2025-08-22 16:03:45] [Rank 0] step:6481/10000 train_time:605764ms step_avg:93.47ms +[2025-08-22 16:03:45] [Rank 0] step:6481/10000 train_time:605764ms step_avg:93.47ms +[2025-08-22 16:03:47] [Rank 0] step:6501/10000 train_time:607695ms step_avg:93.48ms +[2025-08-22 16:03:47] [Rank 0] step:6501/10000 train_time:607695ms step_avg:93.48ms +[2025-08-22 16:03:49] [Rank 0] step:6521/10000 train_time:609625ms step_avg:93.49ms +[2025-08-22 16:03:49] [Rank 0] step:6521/10000 train_time:609625ms step_avg:93.49ms +[2025-08-22 16:03:51] [Rank 0] step:6541/10000 train_time:611560ms step_avg:93.50ms +[2025-08-22 16:03:51] [Rank 0] step:6541/10000 train_time:611560ms step_avg:93.50ms +[2025-08-22 16:03:53] [Rank 0] step:6561/10000 train_time:613499ms step_avg:93.51ms +[2025-08-22 16:03:53] [Rank 0] step:6561/10000 train_time:613499ms step_avg:93.51ms +[2025-08-22 16:03:55] [Rank 0] step:6581/10000 train_time:615432ms step_avg:93.52ms +[2025-08-22 16:03:55] [Rank 0] step:6581/10000 train_time:615432ms step_avg:93.52ms +[2025-08-22 16:03:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:03:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:04:10] [Rank 0] PRINT: step:6600/10000 val_loss:4.0043 svd_entropy: attn_qk:H=0.5232,top10E=0.65,eRank=59.1,q75/q25=77.42 attn_vo:H=0.4506,top10E=0.72,eRank=70.3,q75/q25=98.17 mlp_w1:H=0.6035,top10E=0.50,eRank=119.4,q75/q25=26.20 mlp_w2:H=0.6045,top10E=0.48,eRank=123.4,q75/q25=20.00 vo_prod:H=0.3205,top10E=0.83,eRank=26.8,q75/q25=9633.33 train_time:617383ms step_avg:93.54ms +[2025-08-22 16:04:10] [Rank 0] PRINT: step:6600/10000 val_loss:4.0043 svd_entropy: attn_qk:H=0.5232,top10E=0.65,eRank=59.1,q75/q25=77.42 attn_vo:H=0.4506,top10E=0.72,eRank=70.3,q75/q25=98.17 mlp_w1:H=0.6035,top10E=0.50,eRank=119.4,q75/q25=26.20 mlp_w2:H=0.6045,top10E=0.48,eRank=123.4,q75/q25=20.00 vo_prod:H=0.3205,top10E=0.83,eRank=26.8,q75/q25=9633.33 train_time:617383ms step_avg:93.54ms +[2025-08-22 16:04:11] [Rank 0] step:6601/10000 train_time:617402ms step_avg:93.53ms +[2025-08-22 16:04:11] [Rank 0] step:6601/10000 train_time:617402ms step_avg:93.53ms +[2025-08-22 16:04:12] [Rank 0] step:6621/10000 train_time:619330ms step_avg:93.54ms +[2025-08-22 16:04:12] [Rank 0] step:6621/10000 train_time:619330ms step_avg:93.54ms +[2025-08-22 16:04:14] [Rank 0] step:6641/10000 train_time:621269ms step_avg:93.55ms +[2025-08-22 16:04:14] [Rank 0] step:6641/10000 train_time:621269ms step_avg:93.55ms +[2025-08-22 16:04:16] [Rank 0] step:6661/10000 train_time:623203ms step_avg:93.56ms +[2025-08-22 16:04:16] [Rank 0] step:6661/10000 train_time:623203ms step_avg:93.56ms +[2025-08-22 16:04:18] [Rank 0] step:6681/10000 train_time:625153ms step_avg:93.57ms +[2025-08-22 16:04:18] [Rank 0] step:6681/10000 train_time:625153ms step_avg:93.57ms +[2025-08-22 16:04:20] [Rank 0] step:6701/10000 train_time:627125ms step_avg:93.59ms +[2025-08-22 16:04:20] [Rank 0] step:6701/10000 train_time:627125ms step_avg:93.59ms +[2025-08-22 16:04:22] [Rank 0] step:6721/10000 train_time:629145ms step_avg:93.61ms +[2025-08-22 16:04:22] [Rank 0] step:6721/10000 train_time:629145ms step_avg:93.61ms +[2025-08-22 16:04:24] [Rank 0] step:6741/10000 train_time:631182ms step_avg:93.63ms +[2025-08-22 16:04:24] [Rank 0] step:6741/10000 train_time:631182ms step_avg:93.63ms +[2025-08-22 16:04:26] [Rank 0] step:6761/10000 train_time:633144ms step_avg:93.65ms +[2025-08-22 16:04:26] [Rank 0] step:6761/10000 train_time:633144ms step_avg:93.65ms +[2025-08-22 16:04:28] [Rank 0] step:6781/10000 train_time:635111ms step_avg:93.66ms +[2025-08-22 16:04:28] [Rank 0] step:6781/10000 train_time:635111ms step_avg:93.66ms +[2025-08-22 16:04:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:04:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:04:44] [Rank 0] PRINT: step:6800/10000 val_loss:3.9913 svd_entropy: attn_qk:H=0.5236,top10E=0.65,eRank=59.4,q75/q25=76.79 attn_vo:H=0.4516,top10E=0.72,eRank=70.8,q75/q25=99.73 mlp_w1:H=0.6017,top10E=0.51,eRank=119.4,q75/q25=26.51 mlp_w2:H=0.6038,top10E=0.48,eRank=123.5,q75/q25=19.53 vo_prod:H=0.3219,top10E=0.83,eRank=27.0,q75/q25=9494.61 train_time:637094ms step_avg:93.69ms +[2025-08-22 16:04:44] [Rank 0] PRINT: step:6800/10000 val_loss:3.9913 svd_entropy: attn_qk:H=0.5236,top10E=0.65,eRank=59.4,q75/q25=76.79 attn_vo:H=0.4516,top10E=0.72,eRank=70.8,q75/q25=99.73 mlp_w1:H=0.6017,top10E=0.51,eRank=119.4,q75/q25=26.51 mlp_w2:H=0.6038,top10E=0.48,eRank=123.5,q75/q25=19.53 vo_prod:H=0.3219,top10E=0.83,eRank=27.0,q75/q25=9494.61 train_time:637094ms step_avg:93.69ms +[2025-08-22 16:04:44] [Rank 0] step:6801/10000 train_time:637113ms step_avg:93.68ms +[2025-08-22 16:04:44] [Rank 0] step:6801/10000 train_time:637113ms step_avg:93.68ms +[2025-08-22 16:04:46] [Rank 0] step:6821/10000 train_time:639056ms step_avg:93.69ms +[2025-08-22 16:04:46] [Rank 0] step:6821/10000 train_time:639056ms step_avg:93.69ms +[2025-08-22 16:04:48] [Rank 0] step:6841/10000 train_time:641021ms step_avg:93.70ms +[2025-08-22 16:04:48] [Rank 0] step:6841/10000 train_time:641021ms step_avg:93.70ms +[2025-08-22 16:04:50] [Rank 0] step:6861/10000 train_time:642981ms step_avg:93.72ms +[2025-08-22 16:04:50] [Rank 0] step:6861/10000 train_time:642981ms step_avg:93.72ms +[2025-08-22 16:04:52] [Rank 0] step:6881/10000 train_time:644952ms step_avg:93.73ms +[2025-08-22 16:04:52] [Rank 0] step:6881/10000 train_time:644952ms step_avg:93.73ms +[2025-08-22 16:04:54] [Rank 0] step:6901/10000 train_time:646915ms step_avg:93.74ms +[2025-08-22 16:04:54] [Rank 0] step:6901/10000 train_time:646915ms step_avg:93.74ms +[2025-08-22 16:04:56] [Rank 0] step:6921/10000 train_time:648877ms step_avg:93.75ms +[2025-08-22 16:04:56] [Rank 0] step:6921/10000 train_time:648877ms step_avg:93.75ms +[2025-08-22 16:04:58] [Rank 0] step:6941/10000 train_time:650851ms step_avg:93.77ms +[2025-08-22 16:04:58] [Rank 0] step:6941/10000 train_time:650851ms step_avg:93.77ms +[2025-08-22 16:05:00] [Rank 0] step:6961/10000 train_time:652832ms step_avg:93.78ms +[2025-08-22 16:05:00] [Rank 0] step:6961/10000 train_time:652832ms step_avg:93.78ms +[2025-08-22 16:05:02] [Rank 0] step:6981/10000 train_time:654807ms step_avg:93.80ms +[2025-08-22 16:05:02] [Rank 0] step:6981/10000 train_time:654807ms step_avg:93.80ms +[2025-08-22 16:05:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:05:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:05:18] [Rank 0] PRINT: step:7000/10000 val_loss:3.9782 svd_entropy: attn_qk:H=0.5237,top10E=0.65,eRank=59.6,q75/q25=76.40 attn_vo:H=0.4530,top10E=0.72,eRank=71.4,q75/q25=100.46 mlp_w1:H=0.6024,top10E=0.50,eRank=119.8,q75/q25=26.02 mlp_w2:H=0.6038,top10E=0.48,eRank=123.6,q75/q25=19.32 vo_prod:H=0.3258,top10E=0.83,eRank=27.3,q75/q25=9841.69 train_time:656798ms step_avg:93.83ms +[2025-08-22 16:05:18] [Rank 0] PRINT: step:7000/10000 val_loss:3.9782 svd_entropy: attn_qk:H=0.5237,top10E=0.65,eRank=59.6,q75/q25=76.40 attn_vo:H=0.4530,top10E=0.72,eRank=71.4,q75/q25=100.46 mlp_w1:H=0.6024,top10E=0.50,eRank=119.8,q75/q25=26.02 mlp_w2:H=0.6038,top10E=0.48,eRank=123.6,q75/q25=19.32 vo_prod:H=0.3258,top10E=0.83,eRank=27.3,q75/q25=9841.69 train_time:656798ms step_avg:93.83ms +[2025-08-22 16:05:18] [Rank 0] step:7001/10000 train_time:656816ms step_avg:93.82ms +[2025-08-22 16:05:18] [Rank 0] step:7001/10000 train_time:656816ms step_avg:93.82ms +[2025-08-22 16:05:20] [Rank 0] step:7021/10000 train_time:658776ms step_avg:93.83ms +[2025-08-22 16:05:20] [Rank 0] step:7021/10000 train_time:658776ms step_avg:93.83ms +[2025-08-22 16:05:22] [Rank 0] step:7041/10000 train_time:660737ms step_avg:93.84ms +[2025-08-22 16:05:22] [Rank 0] step:7041/10000 train_time:660737ms step_avg:93.84ms +[2025-08-22 16:05:24] [Rank 0] step:7061/10000 train_time:662700ms step_avg:93.85ms +[2025-08-22 16:05:24] [Rank 0] step:7061/10000 train_time:662700ms step_avg:93.85ms +[2025-08-22 16:05:26] [Rank 0] step:7081/10000 train_time:664718ms step_avg:93.87ms +[2025-08-22 16:05:26] [Rank 0] step:7081/10000 train_time:664718ms step_avg:93.87ms +[2025-08-22 16:05:28] [Rank 0] step:7101/10000 train_time:666759ms step_avg:93.90ms +[2025-08-22 16:05:28] [Rank 0] step:7101/10000 train_time:666759ms step_avg:93.90ms +[2025-08-22 16:05:30] [Rank 0] step:7121/10000 train_time:668719ms step_avg:93.91ms +[2025-08-22 16:05:30] [Rank 0] step:7121/10000 train_time:668719ms step_avg:93.91ms +[2025-08-22 16:05:32] [Rank 0] step:7141/10000 train_time:670680ms step_avg:93.92ms +[2025-08-22 16:05:32] [Rank 0] step:7141/10000 train_time:670680ms step_avg:93.92ms +[2025-08-22 16:05:34] [Rank 0] step:7161/10000 train_time:672647ms step_avg:93.93ms +[2025-08-22 16:05:34] [Rank 0] step:7161/10000 train_time:672647ms step_avg:93.93ms +[2025-08-22 16:05:36] [Rank 0] step:7181/10000 train_time:674614ms step_avg:93.94ms +[2025-08-22 16:05:36] [Rank 0] step:7181/10000 train_time:674614ms step_avg:93.94ms +[2025-08-22 16:05:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:05:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:05:51] [Rank 0] PRINT: step:7200/10000 val_loss:3.9690 svd_entropy: attn_qk:H=0.5244,top10E=0.65,eRank=60.0,q75/q25=75.78 attn_vo:H=0.4528,top10E=0.72,eRank=71.7,q75/q25=100.24 mlp_w1:H=0.6023,top10E=0.50,eRank=120.1,q75/q25=26.16 mlp_w2:H=0.6036,top10E=0.48,eRank=123.6,q75/q25=19.18 vo_prod:H=0.3266,top10E=0.83,eRank=27.4,q75/q25=9927.23 train_time:676597ms step_avg:93.97ms +[2025-08-22 16:05:51] [Rank 0] PRINT: step:7200/10000 val_loss:3.9690 svd_entropy: attn_qk:H=0.5244,top10E=0.65,eRank=60.0,q75/q25=75.78 attn_vo:H=0.4528,top10E=0.72,eRank=71.7,q75/q25=100.24 mlp_w1:H=0.6023,top10E=0.50,eRank=120.1,q75/q25=26.16 mlp_w2:H=0.6036,top10E=0.48,eRank=123.6,q75/q25=19.18 vo_prod:H=0.3266,top10E=0.83,eRank=27.4,q75/q25=9927.23 train_time:676597ms step_avg:93.97ms +[2025-08-22 16:05:52] [Rank 0] step:7201/10000 train_time:676616ms step_avg:93.96ms +[2025-08-22 16:05:52] [Rank 0] step:7201/10000 train_time:676616ms step_avg:93.96ms +[2025-08-22 16:05:53] [Rank 0] step:7221/10000 train_time:678578ms step_avg:93.97ms +[2025-08-22 16:05:53] [Rank 0] step:7221/10000 train_time:678578ms step_avg:93.97ms +[2025-08-22 16:05:55] [Rank 0] step:7241/10000 train_time:680535ms step_avg:93.98ms +[2025-08-22 16:05:55] [Rank 0] step:7241/10000 train_time:680535ms step_avg:93.98ms +[2025-08-22 16:05:57] [Rank 0] step:7261/10000 train_time:682492ms step_avg:93.99ms +[2025-08-22 16:05:57] [Rank 0] step:7261/10000 train_time:682492ms step_avg:93.99ms +[2025-08-22 16:05:59] [Rank 0] step:7281/10000 train_time:684462ms step_avg:94.01ms +[2025-08-22 16:05:59] [Rank 0] step:7281/10000 train_time:684462ms step_avg:94.01ms +[2025-08-22 16:06:01] [Rank 0] step:7301/10000 train_time:686423ms step_avg:94.02ms +[2025-08-22 16:06:01] [Rank 0] step:7301/10000 train_time:686423ms step_avg:94.02ms +[2025-08-22 16:06:03] [Rank 0] step:7321/10000 train_time:688402ms step_avg:94.03ms +[2025-08-22 16:06:03] [Rank 0] step:7321/10000 train_time:688402ms step_avg:94.03ms +[2025-08-22 16:06:05] [Rank 0] step:7341/10000 train_time:690367ms step_avg:94.04ms +[2025-08-22 16:06:05] [Rank 0] step:7341/10000 train_time:690367ms step_avg:94.04ms +[2025-08-22 16:06:07] [Rank 0] step:7361/10000 train_time:692343ms step_avg:94.06ms +[2025-08-22 16:06:07] [Rank 0] step:7361/10000 train_time:692343ms step_avg:94.06ms +[2025-08-22 16:06:09] [Rank 0] step:7381/10000 train_time:694316ms step_avg:94.07ms +[2025-08-22 16:06:09] [Rank 0] step:7381/10000 train_time:694316ms step_avg:94.07ms +[2025-08-22 16:06:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:06:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:06:25] [Rank 0] PRINT: step:7400/10000 val_loss:3.9512 svd_entropy: attn_qk:H=0.5243,top10E=0.65,eRank=60.2,q75/q25=75.01 attn_vo:H=0.4526,top10E=0.71,eRank=72.0,q75/q25=100.01 mlp_w1:H=0.6017,top10E=0.50,eRank=120.3,q75/q25=25.53 mlp_w2:H=0.6034,top10E=0.49,eRank=123.8,q75/q25=19.12 vo_prod:H=0.3246,top10E=0.83,eRank=27.5,q75/q25=10125.36 train_time:696282ms step_avg:94.09ms +[2025-08-22 16:06:25] [Rank 0] PRINT: step:7400/10000 val_loss:3.9512 svd_entropy: attn_qk:H=0.5243,top10E=0.65,eRank=60.2,q75/q25=75.01 attn_vo:H=0.4526,top10E=0.71,eRank=72.0,q75/q25=100.01 mlp_w1:H=0.6017,top10E=0.50,eRank=120.3,q75/q25=25.53 mlp_w2:H=0.6034,top10E=0.49,eRank=123.8,q75/q25=19.12 vo_prod:H=0.3246,top10E=0.83,eRank=27.5,q75/q25=10125.36 train_time:696282ms step_avg:94.09ms +[2025-08-22 16:06:25] [Rank 0] step:7401/10000 train_time:696300ms step_avg:94.08ms +[2025-08-22 16:06:25] [Rank 0] step:7401/10000 train_time:696300ms step_avg:94.08ms +[2025-08-22 16:06:27] [Rank 0] step:7421/10000 train_time:698256ms step_avg:94.09ms +[2025-08-22 16:06:27] [Rank 0] step:7421/10000 train_time:698256ms step_avg:94.09ms +[2025-08-22 16:06:29] [Rank 0] step:7441/10000 train_time:700278ms step_avg:94.11ms +[2025-08-22 16:06:29] [Rank 0] step:7441/10000 train_time:700278ms step_avg:94.11ms +[2025-08-22 16:06:31] [Rank 0] step:7461/10000 train_time:702288ms step_avg:94.13ms +[2025-08-22 16:06:31] [Rank 0] step:7461/10000 train_time:702288ms step_avg:94.13ms +[2025-08-22 16:06:33] [Rank 0] step:7481/10000 train_time:704259ms step_avg:94.14ms +[2025-08-22 16:06:33] [Rank 0] step:7481/10000 train_time:704259ms step_avg:94.14ms +[2025-08-22 16:06:35] [Rank 0] step:7501/10000 train_time:706228ms step_avg:94.15ms +[2025-08-22 16:06:35] [Rank 0] step:7501/10000 train_time:706228ms step_avg:94.15ms +[2025-08-22 16:06:37] [Rank 0] step:7521/10000 train_time:708199ms step_avg:94.16ms +[2025-08-22 16:06:37] [Rank 0] step:7521/10000 train_time:708199ms step_avg:94.16ms +[2025-08-22 16:06:39] [Rank 0] step:7541/10000 train_time:710178ms step_avg:94.18ms +[2025-08-22 16:06:39] [Rank 0] step:7541/10000 train_time:710178ms step_avg:94.18ms +[2025-08-22 16:06:41] [Rank 0] step:7561/10000 train_time:712140ms step_avg:94.19ms +[2025-08-22 16:06:41] [Rank 0] step:7561/10000 train_time:712140ms step_avg:94.19ms +[2025-08-22 16:06:43] [Rank 0] step:7581/10000 train_time:714121ms step_avg:94.20ms +[2025-08-22 16:06:43] [Rank 0] step:7581/10000 train_time:714121ms step_avg:94.20ms +[2025-08-22 16:06:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:06:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:06:59] [Rank 0] PRINT: step:7600/10000 val_loss:3.9445 svd_entropy: attn_qk:H=0.5244,top10E=0.65,eRank=60.5,q75/q25=75.05 attn_vo:H=0.4530,top10E=0.71,eRank=72.4,q75/q25=99.63 mlp_w1:H=0.6023,top10E=0.50,eRank=120.6,q75/q25=25.64 mlp_w2:H=0.6037,top10E=0.48,eRank=124.0,q75/q25=18.77 vo_prod:H=0.3251,top10E=0.83,eRank=27.7,q75/q25=10041.12 train_time:716119ms step_avg:94.23ms +[2025-08-22 16:06:59] [Rank 0] PRINT: step:7600/10000 val_loss:3.9445 svd_entropy: attn_qk:H=0.5244,top10E=0.65,eRank=60.5,q75/q25=75.05 attn_vo:H=0.4530,top10E=0.71,eRank=72.4,q75/q25=99.63 mlp_w1:H=0.6023,top10E=0.50,eRank=120.6,q75/q25=25.64 mlp_w2:H=0.6037,top10E=0.48,eRank=124.0,q75/q25=18.77 vo_prod:H=0.3251,top10E=0.83,eRank=27.7,q75/q25=10041.12 train_time:716119ms step_avg:94.23ms +[2025-08-22 16:06:59] [Rank 0] step:7601/10000 train_time:716138ms step_avg:94.22ms +[2025-08-22 16:06:59] [Rank 0] step:7601/10000 train_time:716138ms step_avg:94.22ms +[2025-08-22 16:07:01] [Rank 0] step:7621/10000 train_time:718086ms step_avg:94.22ms +[2025-08-22 16:07:01] [Rank 0] step:7621/10000 train_time:718086ms step_avg:94.22ms +[2025-08-22 16:07:03] [Rank 0] step:7641/10000 train_time:720053ms step_avg:94.24ms +[2025-08-22 16:07:03] [Rank 0] step:7641/10000 train_time:720053ms step_avg:94.24ms +[2025-08-22 16:07:05] [Rank 0] step:7661/10000 train_time:722026ms step_avg:94.25ms +[2025-08-22 16:07:05] [Rank 0] step:7661/10000 train_time:722026ms step_avg:94.25ms +[2025-08-22 16:07:07] [Rank 0] step:7681/10000 train_time:723993ms step_avg:94.26ms +[2025-08-22 16:07:07] [Rank 0] step:7681/10000 train_time:723993ms step_avg:94.26ms +[2025-08-22 16:07:09] [Rank 0] step:7701/10000 train_time:725962ms step_avg:94.27ms +[2025-08-22 16:07:09] [Rank 0] step:7701/10000 train_time:725962ms step_avg:94.27ms +[2025-08-22 16:07:11] [Rank 0] step:7721/10000 train_time:727944ms step_avg:94.28ms +[2025-08-22 16:07:11] [Rank 0] step:7721/10000 train_time:727944ms step_avg:94.28ms +[2025-08-22 16:07:13] [Rank 0] step:7741/10000 train_time:729920ms step_avg:94.29ms +[2025-08-22 16:07:13] [Rank 0] step:7741/10000 train_time:729920ms step_avg:94.29ms +[2025-08-22 16:07:15] [Rank 0] step:7761/10000 train_time:731904ms step_avg:94.31ms +[2025-08-22 16:07:15] [Rank 0] step:7761/10000 train_time:731904ms step_avg:94.31ms +[2025-08-22 16:07:17] [Rank 0] step:7781/10000 train_time:733881ms step_avg:94.32ms +[2025-08-22 16:07:17] [Rank 0] step:7781/10000 train_time:733881ms step_avg:94.32ms +[2025-08-22 16:07:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:07:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:07:32] [Rank 0] PRINT: step:7800/10000 val_loss:3.9342 svd_entropy: attn_qk:H=0.5245,top10E=0.65,eRank=60.6,q75/q25=74.61 attn_vo:H=0.4546,top10E=0.71,eRank=72.9,q75/q25=99.39 mlp_w1:H=0.6025,top10E=0.50,eRank=120.8,q75/q25=25.52 mlp_w2:H=0.6037,top10E=0.48,eRank=124.1,q75/q25=18.61 vo_prod:H=0.3248,top10E=0.83,eRank=27.8,q75/q25=10189.12 train_time:735882ms step_avg:94.34ms +[2025-08-22 16:07:32] [Rank 0] PRINT: step:7800/10000 val_loss:3.9342 svd_entropy: attn_qk:H=0.5245,top10E=0.65,eRank=60.6,q75/q25=74.61 attn_vo:H=0.4546,top10E=0.71,eRank=72.9,q75/q25=99.39 mlp_w1:H=0.6025,top10E=0.50,eRank=120.8,q75/q25=25.52 mlp_w2:H=0.6037,top10E=0.48,eRank=124.1,q75/q25=18.61 vo_prod:H=0.3248,top10E=0.83,eRank=27.8,q75/q25=10189.12 train_time:735882ms step_avg:94.34ms +[2025-08-22 16:07:33] [Rank 0] step:7801/10000 train_time:735899ms step_avg:94.33ms +[2025-08-22 16:07:33] [Rank 0] step:7801/10000 train_time:735899ms step_avg:94.33ms +[2025-08-22 16:07:35] [Rank 0] step:7821/10000 train_time:737903ms step_avg:94.35ms +[2025-08-22 16:07:35] [Rank 0] step:7821/10000 train_time:737903ms step_avg:94.35ms +[2025-08-22 16:07:36] [Rank 0] step:7841/10000 train_time:739863ms step_avg:94.36ms +[2025-08-22 16:07:36] [Rank 0] step:7841/10000 train_time:739863ms step_avg:94.36ms +[2025-08-22 16:07:38] [Rank 0] step:7861/10000 train_time:741837ms step_avg:94.37ms +[2025-08-22 16:07:38] [Rank 0] step:7861/10000 train_time:741837ms step_avg:94.37ms +[2025-08-22 16:07:40] [Rank 0] step:7881/10000 train_time:743811ms step_avg:94.38ms +[2025-08-22 16:07:40] [Rank 0] step:7881/10000 train_time:743811ms step_avg:94.38ms +[2025-08-22 16:07:42] [Rank 0] step:7901/10000 train_time:745774ms step_avg:94.39ms +[2025-08-22 16:07:42] [Rank 0] step:7901/10000 train_time:745774ms step_avg:94.39ms +[2025-08-22 16:07:44] [Rank 0] step:7921/10000 train_time:747745ms step_avg:94.40ms +[2025-08-22 16:07:44] [Rank 0] step:7921/10000 train_time:747745ms step_avg:94.40ms +[2025-08-22 16:07:46] [Rank 0] step:7941/10000 train_time:749722ms step_avg:94.41ms +[2025-08-22 16:07:46] [Rank 0] step:7941/10000 train_time:749722ms step_avg:94.41ms +[2025-08-22 16:07:48] [Rank 0] step:7961/10000 train_time:751695ms step_avg:94.42ms +[2025-08-22 16:07:48] [Rank 0] step:7961/10000 train_time:751695ms step_avg:94.42ms +[2025-08-22 16:07:50] [Rank 0] step:7981/10000 train_time:753659ms step_avg:94.43ms +[2025-08-22 16:07:50] [Rank 0] step:7981/10000 train_time:753659ms step_avg:94.43ms +[2025-08-22 16:07:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:07:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:08:06] [Rank 0] PRINT: step:8000/10000 val_loss:3.9169 svd_entropy: attn_qk:H=0.5248,top10E=0.65,eRank=60.9,q75/q25=74.47 attn_vo:H=0.4544,top10E=0.71,eRank=73.1,q75/q25=98.94 mlp_w1:H=0.6023,top10E=0.50,eRank=120.9,q75/q25=25.77 mlp_w2:H=0.6034,top10E=0.49,eRank=124.1,q75/q25=18.44 vo_prod:H=0.3229,top10E=0.83,eRank=27.9,q75/q25=10174.94 train_time:755647ms step_avg:94.46ms +[2025-08-22 16:08:06] [Rank 0] PRINT: step:8000/10000 val_loss:3.9169 svd_entropy: attn_qk:H=0.5248,top10E=0.65,eRank=60.9,q75/q25=74.47 attn_vo:H=0.4544,top10E=0.71,eRank=73.1,q75/q25=98.94 mlp_w1:H=0.6023,top10E=0.50,eRank=120.9,q75/q25=25.77 mlp_w2:H=0.6034,top10E=0.49,eRank=124.1,q75/q25=18.44 vo_prod:H=0.3229,top10E=0.83,eRank=27.9,q75/q25=10174.94 train_time:755647ms step_avg:94.46ms +[2025-08-22 16:08:06] [Rank 0] step:8001/10000 train_time:755665ms step_avg:94.45ms +[2025-08-22 16:08:06] [Rank 0] step:8001/10000 train_time:755665ms step_avg:94.45ms +[2025-08-22 16:08:08] [Rank 0] step:8021/10000 train_time:757628ms step_avg:94.46ms +[2025-08-22 16:08:08] [Rank 0] step:8021/10000 train_time:757628ms step_avg:94.46ms +[2025-08-22 16:08:10] [Rank 0] step:8041/10000 train_time:759603ms step_avg:94.47ms +[2025-08-22 16:08:10] [Rank 0] step:8041/10000 train_time:759603ms step_avg:94.47ms +[2025-08-22 16:08:12] [Rank 0] step:8061/10000 train_time:761572ms step_avg:94.48ms +[2025-08-22 16:08:12] [Rank 0] step:8061/10000 train_time:761572ms step_avg:94.48ms +[2025-08-22 16:08:14] [Rank 0] step:8081/10000 train_time:763529ms step_avg:94.48ms +[2025-08-22 16:08:14] [Rank 0] step:8081/10000 train_time:763529ms step_avg:94.48ms +[2025-08-22 16:08:16] [Rank 0] step:8101/10000 train_time:765502ms step_avg:94.49ms +[2025-08-22 16:08:16] [Rank 0] step:8101/10000 train_time:765502ms step_avg:94.49ms +[2025-08-22 16:08:18] [Rank 0] step:8121/10000 train_time:767468ms step_avg:94.50ms +[2025-08-22 16:08:18] [Rank 0] step:8121/10000 train_time:767468ms step_avg:94.50ms +[2025-08-22 16:08:21] [Rank 0] step:8141/10000 train_time:770077ms step_avg:94.59ms +[2025-08-22 16:08:21] [Rank 0] step:8141/10000 train_time:770077ms step_avg:94.59ms +[2025-08-22 16:08:23] [Rank 0] step:8161/10000 train_time:772064ms step_avg:94.60ms +[2025-08-22 16:08:23] [Rank 0] step:8161/10000 train_time:772064ms step_avg:94.60ms +[2025-08-22 16:08:25] [Rank 0] step:8181/10000 train_time:774062ms step_avg:94.62ms +[2025-08-22 16:08:25] [Rank 0] step:8181/10000 train_time:774062ms step_avg:94.62ms +[2025-08-22 16:08:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:08:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:08:40] [Rank 0] PRINT: step:8200/10000 val_loss:3.9083 svd_entropy: attn_qk:H=0.5251,top10E=0.64,eRank=61.0,q75/q25=74.51 attn_vo:H=0.4547,top10E=0.71,eRank=73.5,q75/q25=98.32 mlp_w1:H=0.6013,top10E=0.50,eRank=120.9,q75/q25=25.60 mlp_w2:H=0.6028,top10E=0.49,eRank=124.2,q75/q25=18.31 vo_prod:H=0.3231,top10E=0.83,eRank=28.0,q75/q25=10371.03 train_time:776095ms step_avg:94.65ms +[2025-08-22 16:08:40] [Rank 0] PRINT: step:8200/10000 val_loss:3.9083 svd_entropy: attn_qk:H=0.5251,top10E=0.64,eRank=61.0,q75/q25=74.51 attn_vo:H=0.4547,top10E=0.71,eRank=73.5,q75/q25=98.32 mlp_w1:H=0.6013,top10E=0.50,eRank=120.9,q75/q25=25.60 mlp_w2:H=0.6028,top10E=0.49,eRank=124.2,q75/q25=18.31 vo_prod:H=0.3231,top10E=0.83,eRank=28.0,q75/q25=10371.03 train_time:776095ms step_avg:94.65ms +[2025-08-22 16:08:41] [Rank 0] step:8201/10000 train_time:776113ms step_avg:94.64ms +[2025-08-22 16:08:41] [Rank 0] step:8201/10000 train_time:776113ms step_avg:94.64ms +[2025-08-22 16:08:43] [Rank 0] step:8221/10000 train_time:778110ms step_avg:94.65ms +[2025-08-22 16:08:43] [Rank 0] step:8221/10000 train_time:778110ms step_avg:94.65ms +[2025-08-22 16:08:45] [Rank 0] step:8241/10000 train_time:780109ms step_avg:94.66ms +[2025-08-22 16:08:45] [Rank 0] step:8241/10000 train_time:780109ms step_avg:94.66ms +[2025-08-22 16:08:47] [Rank 0] step:8261/10000 train_time:782112ms step_avg:94.68ms +[2025-08-22 16:08:47] [Rank 0] step:8261/10000 train_time:782112ms step_avg:94.68ms +[2025-08-22 16:08:49] [Rank 0] step:8281/10000 train_time:784103ms step_avg:94.69ms +[2025-08-22 16:08:49] [Rank 0] step:8281/10000 train_time:784103ms step_avg:94.69ms +[2025-08-22 16:08:51] [Rank 0] step:8301/10000 train_time:786101ms step_avg:94.70ms +[2025-08-22 16:08:51] [Rank 0] step:8301/10000 train_time:786101ms step_avg:94.70ms +[2025-08-22 16:08:53] [Rank 0] step:8321/10000 train_time:788092ms step_avg:94.71ms +[2025-08-22 16:08:53] [Rank 0] step:8321/10000 train_time:788092ms step_avg:94.71ms +[2025-08-22 16:08:55] [Rank 0] step:8341/10000 train_time:790097ms step_avg:94.72ms +[2025-08-22 16:08:55] [Rank 0] step:8341/10000 train_time:790097ms step_avg:94.72ms +[2025-08-22 16:08:57] [Rank 0] step:8361/10000 train_time:792096ms step_avg:94.74ms +[2025-08-22 16:08:57] [Rank 0] step:8361/10000 train_time:792096ms step_avg:94.74ms +[2025-08-22 16:08:59] [Rank 0] step:8381/10000 train_time:794093ms step_avg:94.75ms +[2025-08-22 16:08:59] [Rank 0] step:8381/10000 train_time:794093ms step_avg:94.75ms +[2025-08-22 16:09:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:09:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:09:14] [Rank 0] PRINT: step:8400/10000 val_loss:3.8968 svd_entropy: attn_qk:H=0.5253,top10E=0.64,eRank=61.2,q75/q25=73.66 attn_vo:H=0.4548,top10E=0.71,eRank=73.7,q75/q25=95.80 mlp_w1:H=0.6008,top10E=0.50,eRank=121.0,q75/q25=25.54 mlp_w2:H=0.6027,top10E=0.49,eRank=124.2,q75/q25=18.18 vo_prod:H=0.3237,top10E=0.83,eRank=28.1,q75/q25=9413.59 train_time:796104ms step_avg:94.77ms +[2025-08-22 16:09:14] [Rank 0] PRINT: step:8400/10000 val_loss:3.8968 svd_entropy: attn_qk:H=0.5253,top10E=0.64,eRank=61.2,q75/q25=73.66 attn_vo:H=0.4548,top10E=0.71,eRank=73.7,q75/q25=95.80 mlp_w1:H=0.6008,top10E=0.50,eRank=121.0,q75/q25=25.54 mlp_w2:H=0.6027,top10E=0.49,eRank=124.2,q75/q25=18.18 vo_prod:H=0.3237,top10E=0.83,eRank=28.1,q75/q25=9413.59 train_time:796104ms step_avg:94.77ms +[2025-08-22 16:09:14] [Rank 0] step:8401/10000 train_time:796123ms step_avg:94.77ms +[2025-08-22 16:09:14] [Rank 0] step:8401/10000 train_time:796123ms step_avg:94.77ms +[2025-08-22 16:09:16] [Rank 0] step:8421/10000 train_time:798108ms step_avg:94.78ms +[2025-08-22 16:09:16] [Rank 0] step:8421/10000 train_time:798108ms step_avg:94.78ms +[2025-08-22 16:09:18] [Rank 0] step:8441/10000 train_time:800104ms step_avg:94.79ms +[2025-08-22 16:09:18] [Rank 0] step:8441/10000 train_time:800104ms step_avg:94.79ms +[2025-08-22 16:09:20] [Rank 0] step:8461/10000 train_time:802099ms step_avg:94.80ms +[2025-08-22 16:09:20] [Rank 0] step:8461/10000 train_time:802099ms step_avg:94.80ms +[2025-08-22 16:09:22] [Rank 0] step:8481/10000 train_time:804101ms step_avg:94.81ms +[2025-08-22 16:09:22] [Rank 0] step:8481/10000 train_time:804101ms step_avg:94.81ms +[2025-08-22 16:09:24] [Rank 0] step:8501/10000 train_time:806127ms step_avg:94.83ms +[2025-08-22 16:09:24] [Rank 0] step:8501/10000 train_time:806127ms step_avg:94.83ms +[2025-08-22 16:09:26] [Rank 0] step:8521/10000 train_time:808172ms step_avg:94.84ms +[2025-08-22 16:09:26] [Rank 0] step:8521/10000 train_time:808172ms step_avg:94.84ms +[2025-08-22 16:09:29] [Rank 0] step:8541/10000 train_time:810193ms step_avg:94.86ms +[2025-08-22 16:09:29] [Rank 0] step:8541/10000 train_time:810193ms step_avg:94.86ms +[2025-08-22 16:09:31] [Rank 0] step:8561/10000 train_time:812199ms step_avg:94.87ms +[2025-08-22 16:09:31] [Rank 0] step:8561/10000 train_time:812199ms step_avg:94.87ms +[2025-08-22 16:09:33] [Rank 0] step:8581/10000 train_time:814201ms step_avg:94.88ms +[2025-08-22 16:09:33] [Rank 0] step:8581/10000 train_time:814201ms step_avg:94.88ms +[2025-08-22 16:09:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:09:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:09:48] [Rank 0] PRINT: step:8600/10000 val_loss:3.8887 svd_entropy: attn_qk:H=0.5256,top10E=0.64,eRank=61.4,q75/q25=73.23 attn_vo:H=0.4550,top10E=0.71,eRank=73.9,q75/q25=95.38 mlp_w1:H=0.6010,top10E=0.50,eRank=121.1,q75/q25=25.31 mlp_w2:H=0.6027,top10E=0.49,eRank=124.3,q75/q25=18.10 vo_prod:H=0.3240,top10E=0.83,eRank=28.1,q75/q25=9295.24 train_time:816216ms step_avg:94.91ms +[2025-08-22 16:09:48] [Rank 0] PRINT: step:8600/10000 val_loss:3.8887 svd_entropy: attn_qk:H=0.5256,top10E=0.64,eRank=61.4,q75/q25=73.23 attn_vo:H=0.4550,top10E=0.71,eRank=73.9,q75/q25=95.38 mlp_w1:H=0.6010,top10E=0.50,eRank=121.1,q75/q25=25.31 mlp_w2:H=0.6027,top10E=0.49,eRank=124.3,q75/q25=18.10 vo_prod:H=0.3240,top10E=0.83,eRank=28.1,q75/q25=9295.24 train_time:816216ms step_avg:94.91ms +[2025-08-22 16:09:48] [Rank 0] step:8601/10000 train_time:816234ms step_avg:94.90ms +[2025-08-22 16:09:48] [Rank 0] step:8601/10000 train_time:816234ms step_avg:94.90ms +[2025-08-22 16:09:50] [Rank 0] step:8621/10000 train_time:818213ms step_avg:94.91ms +[2025-08-22 16:09:50] [Rank 0] step:8621/10000 train_time:818213ms step_avg:94.91ms +[2025-08-22 16:09:52] [Rank 0] step:8641/10000 train_time:820203ms step_avg:94.92ms +[2025-08-22 16:09:52] [Rank 0] step:8641/10000 train_time:820203ms step_avg:94.92ms +[2025-08-22 16:09:54] [Rank 0] step:8661/10000 train_time:822197ms step_avg:94.93ms +[2025-08-22 16:09:54] [Rank 0] step:8661/10000 train_time:822197ms step_avg:94.93ms +[2025-08-22 16:09:56] [Rank 0] step:8681/10000 train_time:824196ms step_avg:94.94ms +[2025-08-22 16:09:56] [Rank 0] step:8681/10000 train_time:824196ms step_avg:94.94ms +[2025-08-22 16:09:58] [Rank 0] step:8701/10000 train_time:826185ms step_avg:94.95ms +[2025-08-22 16:09:58] [Rank 0] step:8701/10000 train_time:826185ms step_avg:94.95ms +[2025-08-22 16:10:00] [Rank 0] step:8721/10000 train_time:828186ms step_avg:94.96ms +[2025-08-22 16:10:00] [Rank 0] step:8721/10000 train_time:828186ms step_avg:94.96ms +[2025-08-22 16:10:02] [Rank 0] step:8741/10000 train_time:830177ms step_avg:94.98ms +[2025-08-22 16:10:02] [Rank 0] step:8741/10000 train_time:830177ms step_avg:94.98ms +[2025-08-22 16:10:04] [Rank 0] step:8761/10000 train_time:832173ms step_avg:94.99ms +[2025-08-22 16:10:04] [Rank 0] step:8761/10000 train_time:832173ms step_avg:94.99ms +[2025-08-22 16:10:06] [Rank 0] step:8781/10000 train_time:834172ms step_avg:95.00ms +[2025-08-22 16:10:06] [Rank 0] step:8781/10000 train_time:834172ms step_avg:95.00ms +[2025-08-22 16:10:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:10:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:10:22] [Rank 0] PRINT: step:8800/10000 val_loss:3.8789 svd_entropy: attn_qk:H=0.5259,top10E=0.64,eRank=61.5,q75/q25=72.93 attn_vo:H=0.4551,top10E=0.71,eRank=74.1,q75/q25=95.31 mlp_w1:H=0.6008,top10E=0.51,eRank=121.2,q75/q25=25.39 mlp_w2:H=0.6027,top10E=0.49,eRank=124.4,q75/q25=18.00 vo_prod:H=0.3230,top10E=0.83,eRank=28.1,q75/q25=9214.51 train_time:836184ms step_avg:95.02ms +[2025-08-22 16:10:22] [Rank 0] PRINT: step:8800/10000 val_loss:3.8789 svd_entropy: attn_qk:H=0.5259,top10E=0.64,eRank=61.5,q75/q25=72.93 attn_vo:H=0.4551,top10E=0.71,eRank=74.1,q75/q25=95.31 mlp_w1:H=0.6008,top10E=0.51,eRank=121.2,q75/q25=25.39 mlp_w2:H=0.6027,top10E=0.49,eRank=124.4,q75/q25=18.00 vo_prod:H=0.3230,top10E=0.83,eRank=28.1,q75/q25=9214.51 train_time:836184ms step_avg:95.02ms +[2025-08-22 16:10:22] [Rank 0] step:8801/10000 train_time:836202ms step_avg:95.01ms +[2025-08-22 16:10:22] [Rank 0] step:8801/10000 train_time:836202ms step_avg:95.01ms +[2025-08-22 16:10:24] [Rank 0] step:8821/10000 train_time:838174ms step_avg:95.02ms +[2025-08-22 16:10:24] [Rank 0] step:8821/10000 train_time:838174ms step_avg:95.02ms +[2025-08-22 16:10:26] [Rank 0] step:8841/10000 train_time:840183ms step_avg:95.03ms +[2025-08-22 16:10:26] [Rank 0] step:8841/10000 train_time:840183ms step_avg:95.03ms +[2025-08-22 16:10:28] [Rank 0] step:8861/10000 train_time:842170ms step_avg:95.04ms +[2025-08-22 16:10:28] [Rank 0] step:8861/10000 train_time:842170ms step_avg:95.04ms +[2025-08-22 16:10:30] [Rank 0] step:8881/10000 train_time:844163ms step_avg:95.05ms +[2025-08-22 16:10:30] [Rank 0] step:8881/10000 train_time:844163ms step_avg:95.05ms +[2025-08-22 16:10:32] [Rank 0] step:8901/10000 train_time:846161ms step_avg:95.06ms +[2025-08-22 16:10:32] [Rank 0] step:8901/10000 train_time:846161ms step_avg:95.06ms +[2025-08-22 16:10:34] [Rank 0] step:8921/10000 train_time:848172ms step_avg:95.08ms +[2025-08-22 16:10:34] [Rank 0] step:8921/10000 train_time:848172ms step_avg:95.08ms +[2025-08-22 16:10:36] [Rank 0] step:8941/10000 train_time:850171ms step_avg:95.09ms +[2025-08-22 16:10:36] [Rank 0] step:8941/10000 train_time:850171ms step_avg:95.09ms +[2025-08-22 16:10:38] [Rank 0] step:8961/10000 train_time:852165ms step_avg:95.10ms +[2025-08-22 16:10:38] [Rank 0] step:8961/10000 train_time:852165ms step_avg:95.10ms +[2025-08-22 16:10:40] [Rank 0] step:8981/10000 train_time:854284ms step_avg:95.12ms +[2025-08-22 16:10:40] [Rank 0] step:8981/10000 train_time:854284ms step_avg:95.12ms +[2025-08-22 16:10:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:10:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:10:56] [Rank 0] PRINT: step:9000/10000 val_loss:3.8695 svd_entropy: attn_qk:H=0.5260,top10E=0.64,eRank=61.7,q75/q25=72.87 attn_vo:H=0.4555,top10E=0.71,eRank=74.3,q75/q25=95.36 mlp_w1:H=0.6009,top10E=0.50,eRank=121.3,q75/q25=25.44 mlp_w2:H=0.6029,top10E=0.49,eRank=124.5,q75/q25=17.93 vo_prod:H=0.3232,top10E=0.83,eRank=28.2,q75/q25=9309.14 train_time:856286ms step_avg:95.14ms +[2025-08-22 16:10:56] [Rank 0] PRINT: step:9000/10000 val_loss:3.8695 svd_entropy: attn_qk:H=0.5260,top10E=0.64,eRank=61.7,q75/q25=72.87 attn_vo:H=0.4555,top10E=0.71,eRank=74.3,q75/q25=95.36 mlp_w1:H=0.6009,top10E=0.50,eRank=121.3,q75/q25=25.44 mlp_w2:H=0.6029,top10E=0.49,eRank=124.5,q75/q25=17.93 vo_prod:H=0.3232,top10E=0.83,eRank=28.2,q75/q25=9309.14 train_time:856286ms step_avg:95.14ms +[2025-08-22 16:10:56] [Rank 0] step:9001/10000 train_time:856305ms step_avg:95.13ms +[2025-08-22 16:10:56] [Rank 0] step:9001/10000 train_time:856305ms step_avg:95.13ms +[2025-08-22 16:10:58] [Rank 0] step:9021/10000 train_time:858289ms step_avg:95.14ms +[2025-08-22 16:10:58] [Rank 0] step:9021/10000 train_time:858289ms step_avg:95.14ms +[2025-08-22 16:11:00] [Rank 0] step:9041/10000 train_time:860281ms step_avg:95.15ms +[2025-08-22 16:11:00] [Rank 0] step:9041/10000 train_time:860281ms step_avg:95.15ms +[2025-08-22 16:11:02] [Rank 0] step:9061/10000 train_time:862281ms step_avg:95.16ms +[2025-08-22 16:11:02] [Rank 0] step:9061/10000 train_time:862281ms step_avg:95.16ms +[2025-08-22 16:11:04] [Rank 0] step:9081/10000 train_time:864281ms step_avg:95.17ms +[2025-08-22 16:11:04] [Rank 0] step:9081/10000 train_time:864281ms step_avg:95.17ms +[2025-08-22 16:11:06] [Rank 0] step:9101/10000 train_time:866296ms step_avg:95.19ms +[2025-08-22 16:11:06] [Rank 0] step:9101/10000 train_time:866296ms step_avg:95.19ms +[2025-08-22 16:11:08] [Rank 0] step:9121/10000 train_time:868294ms step_avg:95.20ms +[2025-08-22 16:11:08] [Rank 0] step:9121/10000 train_time:868294ms step_avg:95.20ms +[2025-08-22 16:11:10] [Rank 0] step:9141/10000 train_time:870282ms step_avg:95.21ms +[2025-08-22 16:11:10] [Rank 0] step:9141/10000 train_time:870282ms step_avg:95.21ms +[2025-08-22 16:11:12] [Rank 0] step:9161/10000 train_time:872272ms step_avg:95.22ms +[2025-08-22 16:11:12] [Rank 0] step:9161/10000 train_time:872272ms step_avg:95.22ms +[2025-08-22 16:11:14] [Rank 0] step:9181/10000 train_time:874305ms step_avg:95.23ms +[2025-08-22 16:11:14] [Rank 0] step:9181/10000 train_time:874305ms step_avg:95.23ms +[2025-08-22 16:11:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:11:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:11:30] [Rank 0] PRINT: step:9200/10000 val_loss:3.8616 svd_entropy: attn_qk:H=0.5262,top10E=0.64,eRank=61.8,q75/q25=72.80 attn_vo:H=0.4556,top10E=0.71,eRank=74.5,q75/q25=95.50 mlp_w1:H=0.6006,top10E=0.50,eRank=121.4,q75/q25=25.20 mlp_w2:H=0.6028,top10E=0.49,eRank=124.6,q75/q25=17.90 vo_prod:H=0.3236,top10E=0.83,eRank=28.3,q75/q25=9406.95 train_time:876310ms step_avg:95.25ms +[2025-08-22 16:11:30] [Rank 0] PRINT: step:9200/10000 val_loss:3.8616 svd_entropy: attn_qk:H=0.5262,top10E=0.64,eRank=61.8,q75/q25=72.80 attn_vo:H=0.4556,top10E=0.71,eRank=74.5,q75/q25=95.50 mlp_w1:H=0.6006,top10E=0.50,eRank=121.4,q75/q25=25.20 mlp_w2:H=0.6028,top10E=0.49,eRank=124.6,q75/q25=17.90 vo_prod:H=0.3236,top10E=0.83,eRank=28.3,q75/q25=9406.95 train_time:876310ms step_avg:95.25ms +[2025-08-22 16:11:30] [Rank 0] step:9201/10000 train_time:876328ms step_avg:95.24ms +[2025-08-22 16:11:30] [Rank 0] step:9201/10000 train_time:876328ms step_avg:95.24ms +[2025-08-22 16:11:32] [Rank 0] step:9221/10000 train_time:878333ms step_avg:95.25ms +[2025-08-22 16:11:32] [Rank 0] step:9221/10000 train_time:878333ms step_avg:95.25ms +[2025-08-22 16:11:34] [Rank 0] step:9241/10000 train_time:880336ms step_avg:95.26ms +[2025-08-22 16:11:34] [Rank 0] step:9241/10000 train_time:880336ms step_avg:95.26ms +[2025-08-22 16:11:36] [Rank 0] step:9261/10000 train_time:882339ms step_avg:95.27ms +[2025-08-22 16:11:36] [Rank 0] step:9261/10000 train_time:882339ms step_avg:95.27ms +[2025-08-22 16:11:38] [Rank 0] step:9281/10000 train_time:884326ms step_avg:95.28ms +[2025-08-22 16:11:38] [Rank 0] step:9281/10000 train_time:884326ms step_avg:95.28ms +[2025-08-22 16:11:40] [Rank 0] step:9301/10000 train_time:886320ms step_avg:95.29ms +[2025-08-22 16:11:40] [Rank 0] step:9301/10000 train_time:886320ms step_avg:95.29ms +[2025-08-22 16:11:42] [Rank 0] step:9321/10000 train_time:888321ms step_avg:95.30ms +[2025-08-22 16:11:42] [Rank 0] step:9321/10000 train_time:888321ms step_avg:95.30ms +[2025-08-22 16:11:44] [Rank 0] step:9341/10000 train_time:890363ms step_avg:95.32ms +[2025-08-22 16:11:44] [Rank 0] step:9341/10000 train_time:890363ms step_avg:95.32ms +[2025-08-22 16:11:46] [Rank 0] step:9361/10000 train_time:892421ms step_avg:95.33ms +[2025-08-22 16:11:46] [Rank 0] step:9361/10000 train_time:892421ms step_avg:95.33ms +[2025-08-22 16:11:48] [Rank 0] step:9381/10000 train_time:894437ms step_avg:95.35ms +[2025-08-22 16:11:48] [Rank 0] step:9381/10000 train_time:894437ms step_avg:95.35ms +[2025-08-22 16:11:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:11:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:12:04] [Rank 0] PRINT: step:9400/10000 val_loss:3.8536 svd_entropy: attn_qk:H=0.5263,top10E=0.64,eRank=61.8,q75/q25=72.49 attn_vo:H=0.4559,top10E=0.71,eRank=74.6,q75/q25=95.49 mlp_w1:H=0.6009,top10E=0.50,eRank=121.4,q75/q25=25.13 mlp_w2:H=0.6030,top10E=0.49,eRank=124.7,q75/q25=17.77 vo_prod:H=0.3241,top10E=0.83,eRank=28.4,q75/q25=9520.52 train_time:896461ms step_avg:95.37ms +[2025-08-22 16:12:04] [Rank 0] PRINT: step:9400/10000 val_loss:3.8536 svd_entropy: attn_qk:H=0.5263,top10E=0.64,eRank=61.8,q75/q25=72.49 attn_vo:H=0.4559,top10E=0.71,eRank=74.6,q75/q25=95.49 mlp_w1:H=0.6009,top10E=0.50,eRank=121.4,q75/q25=25.13 mlp_w2:H=0.6030,top10E=0.49,eRank=124.7,q75/q25=17.77 vo_prod:H=0.3241,top10E=0.83,eRank=28.4,q75/q25=9520.52 train_time:896461ms step_avg:95.37ms +[2025-08-22 16:12:04] [Rank 0] step:9401/10000 train_time:896480ms step_avg:95.36ms +[2025-08-22 16:12:04] [Rank 0] step:9401/10000 train_time:896480ms step_avg:95.36ms +[2025-08-22 16:12:06] [Rank 0] step:9421/10000 train_time:898479ms step_avg:95.37ms +[2025-08-22 16:12:06] [Rank 0] step:9421/10000 train_time:898479ms step_avg:95.37ms +[2025-08-22 16:12:08] [Rank 0] step:9441/10000 train_time:900476ms step_avg:95.38ms +[2025-08-22 16:12:08] [Rank 0] step:9441/10000 train_time:900476ms step_avg:95.38ms +[2025-08-22 16:12:10] [Rank 0] step:9461/10000 train_time:902483ms step_avg:95.39ms +[2025-08-22 16:12:10] [Rank 0] step:9461/10000 train_time:902483ms step_avg:95.39ms +[2025-08-22 16:12:12] [Rank 0] step:9481/10000 train_time:904492ms step_avg:95.40ms +[2025-08-22 16:12:12] [Rank 0] step:9481/10000 train_time:904492ms step_avg:95.40ms +[2025-08-22 16:12:14] [Rank 0] step:9501/10000 train_time:906504ms step_avg:95.41ms +[2025-08-22 16:12:14] [Rank 0] step:9501/10000 train_time:906504ms step_avg:95.41ms +[2025-08-22 16:12:16] [Rank 0] step:9521/10000 train_time:908502ms step_avg:95.42ms +[2025-08-22 16:12:16] [Rank 0] step:9521/10000 train_time:908502ms step_avg:95.42ms +[2025-08-22 16:12:18] [Rank 0] step:9541/10000 train_time:910510ms step_avg:95.43ms +[2025-08-22 16:12:18] [Rank 0] step:9541/10000 train_time:910510ms step_avg:95.43ms +[2025-08-22 16:12:20] [Rank 0] step:9561/10000 train_time:912509ms step_avg:95.44ms +[2025-08-22 16:12:20] [Rank 0] step:9561/10000 train_time:912509ms step_avg:95.44ms +[2025-08-22 16:12:22] [Rank 0] step:9581/10000 train_time:914518ms step_avg:95.45ms +[2025-08-22 16:12:22] [Rank 0] step:9581/10000 train_time:914518ms step_avg:95.45ms +[2025-08-22 16:12:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:12:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:12:38] [Rank 0] PRINT: step:9600/10000 val_loss:3.8464 svd_entropy: attn_qk:H=0.5265,top10E=0.64,eRank=61.9,q75/q25=72.34 attn_vo:H=0.4560,top10E=0.71,eRank=74.8,q75/q25=95.52 mlp_w1:H=0.6010,top10E=0.50,eRank=121.5,q75/q25=25.08 mlp_w2:H=0.6030,top10E=0.49,eRank=124.8,q75/q25=17.76 vo_prod:H=0.3249,top10E=0.83,eRank=28.5,q75/q25=9645.01 train_time:916553ms step_avg:95.47ms +[2025-08-22 16:12:38] [Rank 0] PRINT: step:9600/10000 val_loss:3.8464 svd_entropy: attn_qk:H=0.5265,top10E=0.64,eRank=61.9,q75/q25=72.34 attn_vo:H=0.4560,top10E=0.71,eRank=74.8,q75/q25=95.52 mlp_w1:H=0.6010,top10E=0.50,eRank=121.5,q75/q25=25.08 mlp_w2:H=0.6030,top10E=0.49,eRank=124.8,q75/q25=17.76 vo_prod:H=0.3249,top10E=0.83,eRank=28.5,q75/q25=9645.01 train_time:916553ms step_avg:95.47ms +[2025-08-22 16:12:38] [Rank 0] step:9601/10000 train_time:916571ms step_avg:95.47ms +[2025-08-22 16:12:38] [Rank 0] step:9601/10000 train_time:916571ms step_avg:95.47ms +[2025-08-22 16:12:40] [Rank 0] step:9621/10000 train_time:918550ms step_avg:95.47ms +[2025-08-22 16:12:40] [Rank 0] step:9621/10000 train_time:918550ms step_avg:95.47ms +[2025-08-22 16:12:42] [Rank 0] step:9641/10000 train_time:920552ms step_avg:95.48ms +[2025-08-22 16:12:42] [Rank 0] step:9641/10000 train_time:920552ms step_avg:95.48ms +[2025-08-22 16:12:44] [Rank 0] step:9661/10000 train_time:922584ms step_avg:95.50ms +[2025-08-22 16:12:44] [Rank 0] step:9661/10000 train_time:922584ms step_avg:95.50ms +[2025-08-22 16:12:46] [Rank 0] step:9681/10000 train_time:924605ms step_avg:95.51ms +[2025-08-22 16:12:46] [Rank 0] step:9681/10000 train_time:924605ms step_avg:95.51ms +[2025-08-22 16:12:49] [Rank 0] step:9701/10000 train_time:926690ms step_avg:95.53ms +[2025-08-22 16:12:49] [Rank 0] step:9701/10000 train_time:926690ms step_avg:95.53ms +[2025-08-22 16:12:51] [Rank 0] step:9721/10000 train_time:928774ms step_avg:95.54ms +[2025-08-22 16:12:51] [Rank 0] step:9721/10000 train_time:928774ms step_avg:95.54ms +[2025-08-22 16:12:53] [Rank 0] step:9741/10000 train_time:930816ms step_avg:95.56ms +[2025-08-22 16:12:53] [Rank 0] step:9741/10000 train_time:930816ms step_avg:95.56ms +[2025-08-22 16:12:55] [Rank 0] step:9761/10000 train_time:932844ms step_avg:95.57ms +[2025-08-22 16:12:55] [Rank 0] step:9761/10000 train_time:932844ms step_avg:95.57ms +[2025-08-22 16:12:57] [Rank 0] step:9781/10000 train_time:934879ms step_avg:95.58ms +[2025-08-22 16:12:57] [Rank 0] step:9781/10000 train_time:934879ms step_avg:95.58ms +[2025-08-22 16:12:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:12:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:13:13] [Rank 0] PRINT: step:9800/10000 val_loss:3.8401 svd_entropy: attn_qk:H=0.5266,top10E=0.64,eRank=62.0,q75/q25=72.34 attn_vo:H=0.4560,top10E=0.71,eRank=74.8,q75/q25=95.46 mlp_w1:H=0.6010,top10E=0.50,eRank=121.5,q75/q25=25.08 mlp_w2:H=0.6030,top10E=0.49,eRank=124.8,q75/q25=17.70 vo_prod:H=0.3249,top10E=0.83,eRank=28.5,q75/q25=9607.02 train_time:936938ms step_avg:95.61ms +[2025-08-22 16:13:13] [Rank 0] PRINT: step:9800/10000 val_loss:3.8401 svd_entropy: attn_qk:H=0.5266,top10E=0.64,eRank=62.0,q75/q25=72.34 attn_vo:H=0.4560,top10E=0.71,eRank=74.8,q75/q25=95.46 mlp_w1:H=0.6010,top10E=0.50,eRank=121.5,q75/q25=25.08 mlp_w2:H=0.6030,top10E=0.49,eRank=124.8,q75/q25=17.70 vo_prod:H=0.3249,top10E=0.83,eRank=28.5,q75/q25=9607.02 train_time:936938ms step_avg:95.61ms +[2025-08-22 16:13:13] [Rank 0] step:9801/10000 train_time:936957ms step_avg:95.60ms +[2025-08-22 16:13:13] [Rank 0] step:9801/10000 train_time:936957ms step_avg:95.60ms +[2025-08-22 16:13:15] [Rank 0] step:9821/10000 train_time:938966ms step_avg:95.61ms +[2025-08-22 16:13:15] [Rank 0] step:9821/10000 train_time:938966ms step_avg:95.61ms +[2025-08-22 16:13:17] [Rank 0] step:9841/10000 train_time:940996ms step_avg:95.62ms +[2025-08-22 16:13:17] [Rank 0] step:9841/10000 train_time:940996ms step_avg:95.62ms +[2025-08-22 16:13:19] [Rank 0] step:9861/10000 train_time:943007ms step_avg:95.63ms +[2025-08-22 16:13:19] [Rank 0] step:9861/10000 train_time:943007ms step_avg:95.63ms +[2025-08-22 16:13:21] [Rank 0] step:9881/10000 train_time:945020ms step_avg:95.64ms +[2025-08-22 16:13:21] [Rank 0] step:9881/10000 train_time:945020ms step_avg:95.64ms +[2025-08-22 16:13:23] [Rank 0] step:9901/10000 train_time:947053ms step_avg:95.65ms +[2025-08-22 16:13:23] [Rank 0] step:9901/10000 train_time:947053ms step_avg:95.65ms +[2025-08-22 16:13:25] [Rank 0] step:9921/10000 train_time:949070ms step_avg:95.66ms +[2025-08-22 16:13:25] [Rank 0] step:9921/10000 train_time:949070ms step_avg:95.66ms +[2025-08-22 16:13:27] [Rank 0] step:9941/10000 train_time:951103ms step_avg:95.67ms +[2025-08-22 16:13:27] [Rank 0] step:9941/10000 train_time:951103ms step_avg:95.67ms +[2025-08-22 16:13:29] [Rank 0] step:9961/10000 train_time:953121ms step_avg:95.69ms +[2025-08-22 16:13:29] [Rank 0] step:9961/10000 train_time:953121ms step_avg:95.69ms +[2025-08-22 16:13:31] [Rank 0] step:9981/10000 train_time:955152ms step_avg:95.70ms +[2025-08-22 16:13:31] [Rank 0] step:9981/10000 train_time:955152ms step_avg:95.70ms +[2025-08-22 16:13:33] [Rank 0] step:10000/10000 train_time:957084ms step_avg:95.71ms +[2025-08-22 16:13:33] [Rank 0] step:10000/10000 train_time:957084ms step_avg:95.71ms +[2025-08-22 16:13:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:13:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:13:47] [Rank 0] PRINT: step:10000/10000 val_loss:3.8342 svd_entropy: attn_qk:H=0.5267,top10E=0.64,eRank=62.0,q75/q25=72.46 attn_vo:H=0.4560,top10E=0.71,eRank=74.9,q75/q25=95.76 mlp_w1:H=0.6010,top10E=0.50,eRank=121.6,q75/q25=25.16 mlp_w2:H=0.6031,top10E=0.49,eRank=124.9,q75/q25=17.66 vo_prod:H=0.3250,top10E=0.83,eRank=28.5,q75/q25=9668.42 train_time:957204ms step_avg:95.72ms +[2025-08-22 16:13:47] [Rank 0] PRINT: step:10000/10000 val_loss:3.8342 svd_entropy: attn_qk:H=0.5267,top10E=0.64,eRank=62.0,q75/q25=72.46 attn_vo:H=0.4560,top10E=0.71,eRank=74.9,q75/q25=95.76 mlp_w1:H=0.6010,top10E=0.50,eRank=121.6,q75/q25=25.16 mlp_w2:H=0.6031,top10E=0.49,eRank=124.9,q75/q25=17.66 vo_prod:H=0.3250,top10E=0.83,eRank=28.5,q75/q25=9668.42 train_time:957204ms step_avg:95.72ms +[2025-08-22 16:13:47] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 16:13:47 2025 --- +[2025-08-22 16:13:47] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 16:13:47 2025 --- +[2025-08-22 16:13:47] [Rank 0] PRINT: Peak memory allocated: 11208 MiB reserved: 16936 MiB +[2025-08-22 16:13:47] [Rank 0] PRINT: Peak memory allocated: 11208 MiB reserved: 16936 MiB diff --git a/logs_svd_gated/mode_4_param_gated_seed_43/config.json b/logs_svd_gated/mode_4_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..43a7b730aaaa2304939e714cf1670195daa6465f --- /dev/null +++ b/logs_svd_gated/mode_4_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 4, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "bf3dbf35-2e93-4084-a677-9cce89725140", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_4_param_gated_seed_43/training_log_bf3dbf35-2e93-4084-a677-9cce89725140.txt b/logs_svd_gated/mode_4_param_gated_seed_43/training_log_bf3dbf35-2e93-4084-a677-9cce89725140.txt new file mode 100644 index 0000000000000000000000000000000000000000..8dd99ad5b5c61f1620cd1932394e3c4ac5573776 --- /dev/null +++ b/logs_svd_gated/mode_4_param_gated_seed_43/training_log_bf3dbf35-2e93-4084-a677-9cce89725140.txt @@ -0,0 +1,2926 @@ +[2025-08-22 20:58:57] [Rank 0] PRINT: --- Script Start: Fri Aug 22 20:58:57 2025 --- +[2025-08-22 20:58:57] [Rank 0] PRINT: --- Script Start: Fri Aug 22 20:58:57 2025 --- +[2025-08-22 20:58:57] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=4, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 20:58:57] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=4, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 20:58:57] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 20:58:57] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 20:58:57] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 20:58:57] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 20:58:57] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_4_param_gated_seed_43 +[2025-08-22 20:58:57] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_4_param_gated_seed_43 +[2025-08-22 20:58:57] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 20:58:57] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 20:58:57] [Rank 0] PRINT: Constructing model... +[2025-08-22 20:58:57] [Rank 0] PRINT: Constructing model... +[2025-08-22 20:58:59] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 20:58:59] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 20:58:59] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 20:58:59] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 20:58:59] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 20:58:59] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 20:58:59] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-08-22 20:58:59] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 4 +[2025-08-22 20:58:59] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.05). +[2025-08-22 20:58:59] [Rank 0] PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: 0.05). +[2025-08-22 20:58:59] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 20:58:59] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 20:58:59] [Rank 0] PRINT: Muon optimizer is active with 36 parameters. +[2025-08-22 20:58:59] [Rank 0] PRINT: Muon optimizer is active with 36 parameters. +[2025-08-22 20:58:59] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 20:58:59] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 20:58:59] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 20:58:59] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 20:58:59] [Rank 0] PRINT: Starting warmup... +[2025-08-22 20:58:59] [Rank 0] PRINT: Starting warmup... +[2025-08-22 20:59:42] [Rank 0] PRINT: Warmup complete. +[2025-08-22 20:59:42] [Rank 0] PRINT: Warmup complete. +[2025-08-22 20:59:42] [Rank 0] PRINT: Starting training... +[2025-08-22 20:59:42] [Rank 0] PRINT: Starting training... +[2025-08-22 20:59:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 20:59:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:00:01] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 21:00:01] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 21:00:03] [Rank 0] step:21/10000 train_time:1813ms step_avg:86.33ms +[2025-08-22 21:00:03] [Rank 0] step:21/10000 train_time:1813ms step_avg:86.33ms +[2025-08-22 21:00:05] [Rank 0] step:41/10000 train_time:3583ms step_avg:87.39ms +[2025-08-22 21:00:05] [Rank 0] step:41/10000 train_time:3583ms step_avg:87.39ms +[2025-08-22 21:00:06] [Rank 0] step:61/10000 train_time:5352ms step_avg:87.74ms +[2025-08-22 21:00:06] [Rank 0] step:61/10000 train_time:5352ms step_avg:87.74ms +[2025-08-22 21:00:08] [Rank 0] step:81/10000 train_time:7124ms step_avg:87.95ms +[2025-08-22 21:00:08] [Rank 0] step:81/10000 train_time:7124ms step_avg:87.95ms +[2025-08-22 21:00:10] [Rank 0] step:101/10000 train_time:8894ms step_avg:88.06ms +[2025-08-22 21:00:10] [Rank 0] step:101/10000 train_time:8894ms step_avg:88.06ms +[2025-08-22 21:00:12] [Rank 0] step:121/10000 train_time:10665ms step_avg:88.14ms +[2025-08-22 21:00:12] [Rank 0] step:121/10000 train_time:10665ms step_avg:88.14ms +[2025-08-22 21:00:13] [Rank 0] step:141/10000 train_time:12438ms step_avg:88.21ms +[2025-08-22 21:00:13] [Rank 0] step:141/10000 train_time:12438ms step_avg:88.21ms +[2025-08-22 21:00:15] [Rank 0] step:161/10000 train_time:14211ms step_avg:88.27ms +[2025-08-22 21:00:15] [Rank 0] step:161/10000 train_time:14211ms step_avg:88.27ms +[2025-08-22 21:00:17] [Rank 0] step:181/10000 train_time:15986ms step_avg:88.32ms +[2025-08-22 21:00:17] [Rank 0] step:181/10000 train_time:15986ms step_avg:88.32ms +[2025-08-22 21:00:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:00:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:00:33] [Rank 0] PRINT: step:200/10000 val_loss:7.7410 svd_entropy: attn_qk:H=0.2523,top10E=0.91,eRank=8.9,q75/q25=16.18 attn_vo:H=0.1425,top10E=0.98,eRank=3.0,q75/q25=199.25 mlp_w1:H=0.8371,top10E=0.31,eRank=265.0,q75/q25=2.47 mlp_w2:H=0.7467,top10E=0.35,eRank=198.3,q75/q25=9.13 vo_prod:H=0.0304,top10E=1.00,eRank=1.2,q75/q25=1540.65 train_time:17776ms step_avg:88.88ms +[2025-08-22 21:00:33] [Rank 0] PRINT: step:200/10000 val_loss:7.7410 svd_entropy: attn_qk:H=0.2523,top10E=0.91,eRank=8.9,q75/q25=16.18 attn_vo:H=0.1425,top10E=0.98,eRank=3.0,q75/q25=199.25 mlp_w1:H=0.8371,top10E=0.31,eRank=265.0,q75/q25=2.47 mlp_w2:H=0.7467,top10E=0.35,eRank=198.3,q75/q25=9.13 vo_prod:H=0.0304,top10E=1.00,eRank=1.2,q75/q25=1540.65 train_time:17776ms step_avg:88.88ms +[2025-08-22 21:00:33] [Rank 0] step:201/10000 train_time:17793ms step_avg:88.52ms +[2025-08-22 21:00:33] [Rank 0] step:201/10000 train_time:17793ms step_avg:88.52ms +[2025-08-22 21:00:34] [Rank 0] step:221/10000 train_time:19636ms step_avg:88.85ms +[2025-08-22 21:00:34] [Rank 0] step:221/10000 train_time:19636ms step_avg:88.85ms +[2025-08-22 21:00:36] [Rank 0] step:241/10000 train_time:21410ms step_avg:88.84ms +[2025-08-22 21:00:36] [Rank 0] step:241/10000 train_time:21410ms step_avg:88.84ms +[2025-08-22 21:00:38] [Rank 0] step:261/10000 train_time:23186ms step_avg:88.84ms +[2025-08-22 21:00:38] [Rank 0] step:261/10000 train_time:23186ms step_avg:88.84ms +[2025-08-22 21:00:40] [Rank 0] step:281/10000 train_time:24963ms step_avg:88.84ms +[2025-08-22 21:00:40] [Rank 0] step:281/10000 train_time:24963ms step_avg:88.84ms +[2025-08-22 21:00:42] [Rank 0] step:301/10000 train_time:26744ms step_avg:88.85ms +[2025-08-22 21:00:42] [Rank 0] step:301/10000 train_time:26744ms step_avg:88.85ms +[2025-08-22 21:00:43] [Rank 0] step:321/10000 train_time:28529ms step_avg:88.88ms +[2025-08-22 21:00:43] [Rank 0] step:321/10000 train_time:28529ms step_avg:88.88ms +[2025-08-22 21:00:45] [Rank 0] step:341/10000 train_time:30314ms step_avg:88.90ms +[2025-08-22 21:00:45] [Rank 0] step:341/10000 train_time:30314ms step_avg:88.90ms +[2025-08-22 21:00:47] [Rank 0] step:361/10000 train_time:32101ms step_avg:88.92ms +[2025-08-22 21:00:47] [Rank 0] step:361/10000 train_time:32101ms step_avg:88.92ms +[2025-08-22 21:00:49] [Rank 0] step:381/10000 train_time:33890ms step_avg:88.95ms +[2025-08-22 21:00:49] [Rank 0] step:381/10000 train_time:33890ms step_avg:88.95ms +[2025-08-22 21:00:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:00:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:01:04] [Rank 0] PRINT: step:400/10000 val_loss:6.4221 svd_entropy: attn_qk:H=0.3285,top10E=0.89,eRank=12.0,q75/q25=25.67 attn_vo:H=0.2166,top10E=0.96,eRank=5.2,q75/q25=160.83 mlp_w1:H=0.7007,top10E=0.53,eRank=106.5,q75/q25=2.49 mlp_w2:H=0.6511,top10E=0.53,eRank=95.7,q75/q25=9.34 vo_prod:H=0.0692,top10E=1.00,eRank=1.7,q75/q25=1449.03 train_time:35690ms step_avg:89.23ms +[2025-08-22 21:01:04] [Rank 0] PRINT: step:400/10000 val_loss:6.4221 svd_entropy: attn_qk:H=0.3285,top10E=0.89,eRank=12.0,q75/q25=25.67 attn_vo:H=0.2166,top10E=0.96,eRank=5.2,q75/q25=160.83 mlp_w1:H=0.7007,top10E=0.53,eRank=106.5,q75/q25=2.49 mlp_w2:H=0.6511,top10E=0.53,eRank=95.7,q75/q25=9.34 vo_prod:H=0.0692,top10E=1.00,eRank=1.7,q75/q25=1449.03 train_time:35690ms step_avg:89.23ms +[2025-08-22 21:01:04] [Rank 0] step:401/10000 train_time:35708ms step_avg:89.05ms +[2025-08-22 21:01:04] [Rank 0] step:401/10000 train_time:35708ms step_avg:89.05ms +[2025-08-22 21:01:06] [Rank 0] step:421/10000 train_time:37488ms step_avg:89.05ms +[2025-08-22 21:01:06] [Rank 0] step:421/10000 train_time:37488ms step_avg:89.05ms +[2025-08-22 21:01:08] [Rank 0] step:441/10000 train_time:39268ms step_avg:89.04ms +[2025-08-22 21:01:08] [Rank 0] step:441/10000 train_time:39268ms step_avg:89.04ms +[2025-08-22 21:01:10] [Rank 0] step:461/10000 train_time:41050ms step_avg:89.04ms +[2025-08-22 21:01:10] [Rank 0] step:461/10000 train_time:41050ms step_avg:89.04ms +[2025-08-22 21:01:12] [Rank 0] step:481/10000 train_time:42832ms step_avg:89.05ms +[2025-08-22 21:01:12] [Rank 0] step:481/10000 train_time:42832ms step_avg:89.05ms +[2025-08-22 21:01:13] [Rank 0] step:501/10000 train_time:44616ms step_avg:89.05ms +[2025-08-22 21:01:13] [Rank 0] step:501/10000 train_time:44616ms step_avg:89.05ms +[2025-08-22 21:01:15] [Rank 0] step:521/10000 train_time:46400ms step_avg:89.06ms +[2025-08-22 21:01:15] [Rank 0] step:521/10000 train_time:46400ms step_avg:89.06ms +[2025-08-22 21:01:17] [Rank 0] step:541/10000 train_time:48185ms step_avg:89.07ms +[2025-08-22 21:01:17] [Rank 0] step:541/10000 train_time:48185ms step_avg:89.07ms +[2025-08-22 21:01:19] [Rank 0] step:561/10000 train_time:49972ms step_avg:89.08ms +[2025-08-22 21:01:19] [Rank 0] step:561/10000 train_time:49972ms step_avg:89.08ms +[2025-08-22 21:01:20] [Rank 0] step:581/10000 train_time:51758ms step_avg:89.08ms +[2025-08-22 21:01:20] [Rank 0] step:581/10000 train_time:51758ms step_avg:89.08ms +[2025-08-22 21:01:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:01:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:01:36] [Rank 0] PRINT: step:600/10000 val_loss:5.7920 svd_entropy: attn_qk:H=0.3758,top10E=0.86,eRank=15.3,q75/q25=36.83 attn_vo:H=0.2765,top10E=0.91,eRank=9.6,q75/q25=117.77 mlp_w1:H=0.6423,top10E=0.59,eRank=72.4,q75/q25=2.51 mlp_w2:H=0.6093,top10E=0.58,eRank=71.1,q75/q25=9.54 vo_prod:H=0.1189,top10E=0.99,eRank=2.4,q75/q25=1137.27 train_time:53558ms step_avg:89.26ms +[2025-08-22 21:01:36] [Rank 0] PRINT: step:600/10000 val_loss:5.7920 svd_entropy: attn_qk:H=0.3758,top10E=0.86,eRank=15.3,q75/q25=36.83 attn_vo:H=0.2765,top10E=0.91,eRank=9.6,q75/q25=117.77 mlp_w1:H=0.6423,top10E=0.59,eRank=72.4,q75/q25=2.51 mlp_w2:H=0.6093,top10E=0.58,eRank=71.1,q75/q25=9.54 vo_prod:H=0.1189,top10E=0.99,eRank=2.4,q75/q25=1137.27 train_time:53558ms step_avg:89.26ms +[2025-08-22 21:01:36] [Rank 0] step:601/10000 train_time:53576ms step_avg:89.14ms +[2025-08-22 21:01:36] [Rank 0] step:601/10000 train_time:53576ms step_avg:89.14ms +[2025-08-22 21:01:38] [Rank 0] step:621/10000 train_time:55426ms step_avg:89.25ms +[2025-08-22 21:01:38] [Rank 0] step:621/10000 train_time:55426ms step_avg:89.25ms +[2025-08-22 21:01:40] [Rank 0] step:641/10000 train_time:57205ms step_avg:89.24ms +[2025-08-22 21:01:40] [Rank 0] step:641/10000 train_time:57205ms step_avg:89.24ms +[2025-08-22 21:01:41] [Rank 0] step:661/10000 train_time:58987ms step_avg:89.24ms +[2025-08-22 21:01:41] [Rank 0] step:661/10000 train_time:58987ms step_avg:89.24ms +[2025-08-22 21:01:43] [Rank 0] step:681/10000 train_time:60769ms step_avg:89.23ms +[2025-08-22 21:01:43] [Rank 0] step:681/10000 train_time:60769ms step_avg:89.23ms +[2025-08-22 21:01:45] [Rank 0] step:701/10000 train_time:62550ms step_avg:89.23ms +[2025-08-22 21:01:45] [Rank 0] step:701/10000 train_time:62550ms step_avg:89.23ms +[2025-08-22 21:01:47] [Rank 0] step:721/10000 train_time:64334ms step_avg:89.23ms +[2025-08-22 21:01:47] [Rank 0] step:721/10000 train_time:64334ms step_avg:89.23ms +[2025-08-22 21:01:49] [Rank 0] step:741/10000 train_time:66117ms step_avg:89.23ms +[2025-08-22 21:01:49] [Rank 0] step:741/10000 train_time:66117ms step_avg:89.23ms +[2025-08-22 21:01:50] [Rank 0] step:761/10000 train_time:67915ms step_avg:89.24ms +[2025-08-22 21:01:50] [Rank 0] step:761/10000 train_time:67915ms step_avg:89.24ms +[2025-08-22 21:01:52] [Rank 0] step:781/10000 train_time:69713ms step_avg:89.26ms +[2025-08-22 21:01:52] [Rank 0] step:781/10000 train_time:69713ms step_avg:89.26ms +[2025-08-22 21:01:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:01:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:02:08] [Rank 0] PRINT: step:800/10000 val_loss:5.4657 svd_entropy: attn_qk:H=0.3943,top10E=0.83,eRank=17.9,q75/q25=51.53 attn_vo:H=0.3256,top10E=0.87,eRank=15.1,q75/q25=85.28 mlp_w1:H=0.6321,top10E=0.57,eRank=68.4,q75/q25=2.54 mlp_w2:H=0.6003,top10E=0.57,eRank=65.5,q75/q25=9.70 vo_prod:H=0.1642,top10E=0.98,eRank=3.6,q75/q25=961.84 train_time:71525ms step_avg:89.41ms +[2025-08-22 21:02:08] [Rank 0] PRINT: step:800/10000 val_loss:5.4657 svd_entropy: attn_qk:H=0.3943,top10E=0.83,eRank=17.9,q75/q25=51.53 attn_vo:H=0.3256,top10E=0.87,eRank=15.1,q75/q25=85.28 mlp_w1:H=0.6321,top10E=0.57,eRank=68.4,q75/q25=2.54 mlp_w2:H=0.6003,top10E=0.57,eRank=65.5,q75/q25=9.70 vo_prod:H=0.1642,top10E=0.98,eRank=3.6,q75/q25=961.84 train_time:71525ms step_avg:89.41ms +[2025-08-22 21:02:08] [Rank 0] step:801/10000 train_time:71542ms step_avg:89.32ms +[2025-08-22 21:02:08] [Rank 0] step:801/10000 train_time:71542ms step_avg:89.32ms +[2025-08-22 21:02:10] [Rank 0] step:821/10000 train_time:73315ms step_avg:89.30ms +[2025-08-22 21:02:10] [Rank 0] step:821/10000 train_time:73315ms step_avg:89.30ms +[2025-08-22 21:02:11] [Rank 0] step:841/10000 train_time:75106ms step_avg:89.31ms +[2025-08-22 21:02:11] [Rank 0] step:841/10000 train_time:75106ms step_avg:89.31ms +[2025-08-22 21:02:13] [Rank 0] step:861/10000 train_time:76899ms step_avg:89.31ms +[2025-08-22 21:02:13] [Rank 0] step:861/10000 train_time:76899ms step_avg:89.31ms +[2025-08-22 21:02:15] [Rank 0] step:881/10000 train_time:78694ms step_avg:89.32ms +[2025-08-22 21:02:15] [Rank 0] step:881/10000 train_time:78694ms step_avg:89.32ms +[2025-08-22 21:02:17] [Rank 0] step:901/10000 train_time:80491ms step_avg:89.33ms +[2025-08-22 21:02:17] [Rank 0] step:901/10000 train_time:80491ms step_avg:89.33ms +[2025-08-22 21:02:19] [Rank 0] step:921/10000 train_time:82289ms step_avg:89.35ms +[2025-08-22 21:02:19] [Rank 0] step:921/10000 train_time:82289ms step_avg:89.35ms +[2025-08-22 21:02:20] [Rank 0] step:941/10000 train_time:84088ms step_avg:89.36ms +[2025-08-22 21:02:20] [Rank 0] step:941/10000 train_time:84088ms step_avg:89.36ms +[2025-08-22 21:02:22] [Rank 0] step:961/10000 train_time:85885ms step_avg:89.37ms +[2025-08-22 21:02:22] [Rank 0] step:961/10000 train_time:85885ms step_avg:89.37ms +[2025-08-22 21:02:24] [Rank 0] step:981/10000 train_time:87686ms step_avg:89.38ms +[2025-08-22 21:02:24] [Rank 0] step:981/10000 train_time:87686ms step_avg:89.38ms +[2025-08-22 21:02:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:02:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:02:39] [Rank 0] PRINT: step:1000/10000 val_loss:5.3047 svd_entropy: attn_qk:H=0.4123,top10E=0.79,eRank=21.8,q75/q25=68.83 attn_vo:H=0.3563,top10E=0.83,eRank=19.8,q75/q25=84.02 mlp_w1:H=0.6247,top10E=0.56,eRank=66.5,q75/q25=2.62 mlp_w2:H=0.5948,top10E=0.56,eRank=63.7,q75/q25=9.88 vo_prod:H=0.1922,top10E=0.96,eRank=4.8,q75/q25=1494.76 train_time:89500ms step_avg:89.50ms +[2025-08-22 21:02:39] [Rank 0] PRINT: step:1000/10000 val_loss:5.3047 svd_entropy: attn_qk:H=0.4123,top10E=0.79,eRank=21.8,q75/q25=68.83 attn_vo:H=0.3563,top10E=0.83,eRank=19.8,q75/q25=84.02 mlp_w1:H=0.6247,top10E=0.56,eRank=66.5,q75/q25=2.62 mlp_w2:H=0.5948,top10E=0.56,eRank=63.7,q75/q25=9.88 vo_prod:H=0.1922,top10E=0.96,eRank=4.8,q75/q25=1494.76 train_time:89500ms step_avg:89.50ms +[2025-08-22 21:02:39] [Rank 0] step:1001/10000 train_time:89518ms step_avg:89.43ms +[2025-08-22 21:02:39] [Rank 0] step:1001/10000 train_time:89518ms step_avg:89.43ms +[2025-08-22 21:02:41] [Rank 0] step:1021/10000 train_time:91363ms step_avg:89.48ms +[2025-08-22 21:02:41] [Rank 0] step:1021/10000 train_time:91363ms step_avg:89.48ms +[2025-08-22 21:02:43] [Rank 0] step:1041/10000 train_time:93159ms step_avg:89.49ms +[2025-08-22 21:02:43] [Rank 0] step:1041/10000 train_time:93159ms step_avg:89.49ms +[2025-08-22 21:02:45] [Rank 0] step:1061/10000 train_time:94958ms step_avg:89.50ms +[2025-08-22 21:02:45] [Rank 0] step:1061/10000 train_time:94958ms step_avg:89.50ms +[2025-08-22 21:02:47] [Rank 0] step:1081/10000 train_time:96757ms step_avg:89.51ms +[2025-08-22 21:02:47] [Rank 0] step:1081/10000 train_time:96757ms step_avg:89.51ms +[2025-08-22 21:02:49] [Rank 0] step:1101/10000 train_time:98558ms step_avg:89.52ms +[2025-08-22 21:02:49] [Rank 0] step:1101/10000 train_time:98558ms step_avg:89.52ms +[2025-08-22 21:02:50] [Rank 0] step:1121/10000 train_time:100358ms step_avg:89.53ms +[2025-08-22 21:02:50] [Rank 0] step:1121/10000 train_time:100358ms step_avg:89.53ms +[2025-08-22 21:02:52] [Rank 0] step:1141/10000 train_time:102160ms step_avg:89.54ms +[2025-08-22 21:02:52] [Rank 0] step:1141/10000 train_time:102160ms step_avg:89.54ms +[2025-08-22 21:02:54] [Rank 0] step:1161/10000 train_time:103961ms step_avg:89.54ms +[2025-08-22 21:02:54] [Rank 0] step:1161/10000 train_time:103961ms step_avg:89.54ms +[2025-08-22 21:02:56] [Rank 0] step:1181/10000 train_time:105766ms step_avg:89.56ms +[2025-08-22 21:02:56] [Rank 0] step:1181/10000 train_time:105766ms step_avg:89.56ms +[2025-08-22 21:02:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:02:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:03:11] [Rank 0] PRINT: step:1200/10000 val_loss:5.0911 svd_entropy: attn_qk:H=0.4269,top10E=0.77,eRank=24.1,q75/q25=78.60 attn_vo:H=0.3646,top10E=0.82,eRank=22.8,q75/q25=86.94 mlp_w1:H=0.5999,top10E=0.57,eRank=59.2,q75/q25=3.29 mlp_w2:H=0.5752,top10E=0.58,eRank=58.2,q75/q25=11.27 vo_prod:H=0.2060,top10E=0.94,eRank=6.0,q75/q25=1762.42 train_time:107584ms step_avg:89.65ms +[2025-08-22 21:03:11] [Rank 0] PRINT: step:1200/10000 val_loss:5.0911 svd_entropy: attn_qk:H=0.4269,top10E=0.77,eRank=24.1,q75/q25=78.60 attn_vo:H=0.3646,top10E=0.82,eRank=22.8,q75/q25=86.94 mlp_w1:H=0.5999,top10E=0.57,eRank=59.2,q75/q25=3.29 mlp_w2:H=0.5752,top10E=0.58,eRank=58.2,q75/q25=11.27 vo_prod:H=0.2060,top10E=0.94,eRank=6.0,q75/q25=1762.42 train_time:107584ms step_avg:89.65ms +[2025-08-22 21:03:11] [Rank 0] step:1201/10000 train_time:107602ms step_avg:89.59ms +[2025-08-22 21:03:11] [Rank 0] step:1201/10000 train_time:107602ms step_avg:89.59ms +[2025-08-22 21:03:13] [Rank 0] step:1221/10000 train_time:109382ms step_avg:89.58ms +[2025-08-22 21:03:13] [Rank 0] step:1221/10000 train_time:109382ms step_avg:89.58ms +[2025-08-22 21:03:15] [Rank 0] step:1241/10000 train_time:111176ms step_avg:89.59ms +[2025-08-22 21:03:15] [Rank 0] step:1241/10000 train_time:111176ms step_avg:89.59ms +[2025-08-22 21:03:17] [Rank 0] step:1261/10000 train_time:112971ms step_avg:89.59ms +[2025-08-22 21:03:17] [Rank 0] step:1261/10000 train_time:112971ms step_avg:89.59ms +[2025-08-22 21:03:19] [Rank 0] step:1281/10000 train_time:114767ms step_avg:89.59ms +[2025-08-22 21:03:19] [Rank 0] step:1281/10000 train_time:114767ms step_avg:89.59ms +[2025-08-22 21:03:20] [Rank 0] step:1301/10000 train_time:116565ms step_avg:89.60ms +[2025-08-22 21:03:20] [Rank 0] step:1301/10000 train_time:116565ms step_avg:89.60ms +[2025-08-22 21:03:22] [Rank 0] step:1321/10000 train_time:118361ms step_avg:89.60ms +[2025-08-22 21:03:22] [Rank 0] step:1321/10000 train_time:118361ms step_avg:89.60ms +[2025-08-22 21:03:24] [Rank 0] step:1341/10000 train_time:120160ms step_avg:89.61ms +[2025-08-22 21:03:24] [Rank 0] step:1341/10000 train_time:120160ms step_avg:89.61ms +[2025-08-22 21:03:26] [Rank 0] step:1361/10000 train_time:121961ms step_avg:89.61ms +[2025-08-22 21:03:26] [Rank 0] step:1361/10000 train_time:121961ms step_avg:89.61ms +[2025-08-22 21:03:28] [Rank 0] step:1381/10000 train_time:123802ms step_avg:89.65ms +[2025-08-22 21:03:28] [Rank 0] step:1381/10000 train_time:123802ms step_avg:89.65ms +[2025-08-22 21:03:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:03:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:03:43] [Rank 0] PRINT: step:1400/10000 val_loss:4.9590 svd_entropy: attn_qk:H=0.4403,top10E=0.76,eRank=26.2,q75/q25=86.97 attn_vo:H=0.3743,top10E=0.80,eRank=26.2,q75/q25=87.22 mlp_w1:H=0.5992,top10E=0.57,eRank=60.9,q75/q25=4.63 mlp_w2:H=0.5747,top10E=0.57,eRank=60.0,q75/q25=12.79 vo_prod:H=0.2272,top10E=0.92,eRank=7.9,q75/q25=2012.47 train_time:125616ms step_avg:89.73ms +[2025-08-22 21:03:43] [Rank 0] PRINT: step:1400/10000 val_loss:4.9590 svd_entropy: attn_qk:H=0.4403,top10E=0.76,eRank=26.2,q75/q25=86.97 attn_vo:H=0.3743,top10E=0.80,eRank=26.2,q75/q25=87.22 mlp_w1:H=0.5992,top10E=0.57,eRank=60.9,q75/q25=4.63 mlp_w2:H=0.5747,top10E=0.57,eRank=60.0,q75/q25=12.79 vo_prod:H=0.2272,top10E=0.92,eRank=7.9,q75/q25=2012.47 train_time:125616ms step_avg:89.73ms +[2025-08-22 21:03:43] [Rank 0] step:1401/10000 train_time:125634ms step_avg:89.67ms +[2025-08-22 21:03:43] [Rank 0] step:1401/10000 train_time:125634ms step_avg:89.67ms +[2025-08-22 21:03:45] [Rank 0] step:1421/10000 train_time:127484ms step_avg:89.71ms +[2025-08-22 21:03:45] [Rank 0] step:1421/10000 train_time:127484ms step_avg:89.71ms +[2025-08-22 21:03:47] [Rank 0] step:1441/10000 train_time:129279ms step_avg:89.71ms +[2025-08-22 21:03:47] [Rank 0] step:1441/10000 train_time:129279ms step_avg:89.71ms +[2025-08-22 21:03:49] [Rank 0] step:1461/10000 train_time:131075ms step_avg:89.72ms +[2025-08-22 21:03:49] [Rank 0] step:1461/10000 train_time:131075ms step_avg:89.72ms +[2025-08-22 21:03:50] [Rank 0] step:1481/10000 train_time:132873ms step_avg:89.72ms +[2025-08-22 21:03:50] [Rank 0] step:1481/10000 train_time:132873ms step_avg:89.72ms +[2025-08-22 21:03:52] [Rank 0] step:1501/10000 train_time:134679ms step_avg:89.73ms +[2025-08-22 21:03:52] [Rank 0] step:1501/10000 train_time:134679ms step_avg:89.73ms +[2025-08-22 21:03:54] [Rank 0] step:1521/10000 train_time:136489ms step_avg:89.74ms +[2025-08-22 21:03:54] [Rank 0] step:1521/10000 train_time:136489ms step_avg:89.74ms +[2025-08-22 21:03:56] [Rank 0] step:1541/10000 train_time:138299ms step_avg:89.75ms +[2025-08-22 21:03:56] [Rank 0] step:1541/10000 train_time:138299ms step_avg:89.75ms +[2025-08-22 21:03:58] [Rank 0] step:1561/10000 train_time:140111ms step_avg:89.76ms +[2025-08-22 21:03:58] [Rank 0] step:1561/10000 train_time:140111ms step_avg:89.76ms +[2025-08-22 21:03:59] [Rank 0] step:1581/10000 train_time:141924ms step_avg:89.77ms +[2025-08-22 21:03:59] [Rank 0] step:1581/10000 train_time:141924ms step_avg:89.77ms +[2025-08-22 21:04:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:04:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:04:15] [Rank 0] PRINT: step:1600/10000 val_loss:4.8328 svd_entropy: attn_qk:H=0.4466,top10E=0.75,eRank=27.5,q75/q25=86.40 attn_vo:H=0.3855,top10E=0.79,eRank=28.7,q75/q25=91.44 mlp_w1:H=0.5922,top10E=0.57,eRank=61.6,q75/q25=6.23 mlp_w2:H=0.5670,top10E=0.58,eRank=60.5,q75/q25=13.89 vo_prod:H=0.2321,top10E=0.91,eRank=9.0,q75/q25=2466.73 train_time:143751ms step_avg:89.84ms +[2025-08-22 21:04:15] [Rank 0] PRINT: step:1600/10000 val_loss:4.8328 svd_entropy: attn_qk:H=0.4466,top10E=0.75,eRank=27.5,q75/q25=86.40 attn_vo:H=0.3855,top10E=0.79,eRank=28.7,q75/q25=91.44 mlp_w1:H=0.5922,top10E=0.57,eRank=61.6,q75/q25=6.23 mlp_w2:H=0.5670,top10E=0.58,eRank=60.5,q75/q25=13.89 vo_prod:H=0.2321,top10E=0.91,eRank=9.0,q75/q25=2466.73 train_time:143751ms step_avg:89.84ms +[2025-08-22 21:04:15] [Rank 0] step:1601/10000 train_time:143769ms step_avg:89.80ms +[2025-08-22 21:04:15] [Rank 0] step:1601/10000 train_time:143769ms step_avg:89.80ms +[2025-08-22 21:04:17] [Rank 0] step:1621/10000 train_time:145565ms step_avg:89.80ms +[2025-08-22 21:04:17] [Rank 0] step:1621/10000 train_time:145565ms step_avg:89.80ms +[2025-08-22 21:04:19] [Rank 0] step:1641/10000 train_time:147374ms step_avg:89.81ms +[2025-08-22 21:04:19] [Rank 0] step:1641/10000 train_time:147374ms step_avg:89.81ms +[2025-08-22 21:04:20] [Rank 0] step:1661/10000 train_time:149184ms step_avg:89.82ms +[2025-08-22 21:04:20] [Rank 0] step:1661/10000 train_time:149184ms step_avg:89.82ms +[2025-08-22 21:04:22] [Rank 0] step:1681/10000 train_time:150995ms step_avg:89.82ms +[2025-08-22 21:04:22] [Rank 0] step:1681/10000 train_time:150995ms step_avg:89.82ms +[2025-08-22 21:04:24] [Rank 0] step:1701/10000 train_time:152808ms step_avg:89.83ms +[2025-08-22 21:04:24] [Rank 0] step:1701/10000 train_time:152808ms step_avg:89.83ms +[2025-08-22 21:04:26] [Rank 0] step:1721/10000 train_time:154620ms step_avg:89.84ms +[2025-08-22 21:04:26] [Rank 0] step:1721/10000 train_time:154620ms step_avg:89.84ms +[2025-08-22 21:04:28] [Rank 0] step:1741/10000 train_time:156433ms step_avg:89.85ms +[2025-08-22 21:04:28] [Rank 0] step:1741/10000 train_time:156433ms step_avg:89.85ms +[2025-08-22 21:04:30] [Rank 0] step:1761/10000 train_time:158246ms step_avg:89.86ms +[2025-08-22 21:04:30] [Rank 0] step:1761/10000 train_time:158246ms step_avg:89.86ms +[2025-08-22 21:04:31] [Rank 0] step:1781/10000 train_time:160059ms step_avg:89.87ms +[2025-08-22 21:04:31] [Rank 0] step:1781/10000 train_time:160059ms step_avg:89.87ms +[2025-08-22 21:04:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:04:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:04:47] [Rank 0] PRINT: step:1800/10000 val_loss:4.7386 svd_entropy: attn_qk:H=0.4537,top10E=0.74,eRank=29.3,q75/q25=82.67 attn_vo:H=0.3987,top10E=0.78,eRank=31.4,q75/q25=88.93 mlp_w1:H=0.5948,top10E=0.56,eRank=64.2,q75/q25=7.93 mlp_w2:H=0.5660,top10E=0.58,eRank=62.1,q75/q25=14.79 vo_prod:H=0.2435,top10E=0.90,eRank=10.3,q75/q25=2708.93 train_time:161887ms step_avg:89.94ms +[2025-08-22 21:04:47] [Rank 0] PRINT: step:1800/10000 val_loss:4.7386 svd_entropy: attn_qk:H=0.4537,top10E=0.74,eRank=29.3,q75/q25=82.67 attn_vo:H=0.3987,top10E=0.78,eRank=31.4,q75/q25=88.93 mlp_w1:H=0.5948,top10E=0.56,eRank=64.2,q75/q25=7.93 mlp_w2:H=0.5660,top10E=0.58,eRank=62.1,q75/q25=14.79 vo_prod:H=0.2435,top10E=0.90,eRank=10.3,q75/q25=2708.93 train_time:161887ms step_avg:89.94ms +[2025-08-22 21:04:47] [Rank 0] step:1801/10000 train_time:161904ms step_avg:89.90ms +[2025-08-22 21:04:47] [Rank 0] step:1801/10000 train_time:161904ms step_avg:89.90ms +[2025-08-22 21:04:49] [Rank 0] step:1821/10000 train_time:163701ms step_avg:89.90ms +[2025-08-22 21:04:49] [Rank 0] step:1821/10000 train_time:163701ms step_avg:89.90ms +[2025-08-22 21:04:51] [Rank 0] step:1841/10000 train_time:165513ms step_avg:89.90ms +[2025-08-22 21:04:51] [Rank 0] step:1841/10000 train_time:165513ms step_avg:89.90ms +[2025-08-22 21:04:52] [Rank 0] step:1861/10000 train_time:167324ms step_avg:89.91ms +[2025-08-22 21:04:52] [Rank 0] step:1861/10000 train_time:167324ms step_avg:89.91ms +[2025-08-22 21:04:54] [Rank 0] step:1881/10000 train_time:169137ms step_avg:89.92ms +[2025-08-22 21:04:54] [Rank 0] step:1881/10000 train_time:169137ms step_avg:89.92ms +[2025-08-22 21:04:56] [Rank 0] step:1901/10000 train_time:170949ms step_avg:89.93ms +[2025-08-22 21:04:56] [Rank 0] step:1901/10000 train_time:170949ms step_avg:89.93ms +[2025-08-22 21:04:58] [Rank 0] step:1921/10000 train_time:172762ms step_avg:89.93ms +[2025-08-22 21:04:58] [Rank 0] step:1921/10000 train_time:172762ms step_avg:89.93ms +[2025-08-22 21:05:00] [Rank 0] step:1941/10000 train_time:174578ms step_avg:89.94ms +[2025-08-22 21:05:00] [Rank 0] step:1941/10000 train_time:174578ms step_avg:89.94ms +[2025-08-22 21:05:01] [Rank 0] step:1961/10000 train_time:176391ms step_avg:89.95ms +[2025-08-22 21:05:01] [Rank 0] step:1961/10000 train_time:176391ms step_avg:89.95ms +[2025-08-22 21:05:03] [Rank 0] step:1981/10000 train_time:178207ms step_avg:89.96ms +[2025-08-22 21:05:03] [Rank 0] step:1981/10000 train_time:178207ms step_avg:89.96ms +[2025-08-22 21:05:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:05:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:05:19] [Rank 0] PRINT: step:2000/10000 val_loss:4.6802 svd_entropy: attn_qk:H=0.4591,top10E=0.73,eRank=30.8,q75/q25=83.68 attn_vo:H=0.4014,top10E=0.77,eRank=33.5,q75/q25=90.00 mlp_w1:H=0.5925,top10E=0.56,eRank=65.2,q75/q25=9.65 mlp_w2:H=0.5627,top10E=0.58,eRank=63.5,q75/q25=15.93 vo_prod:H=0.2394,top10E=0.89,eRank=11.2,q75/q25=3030.51 train_time:180037ms step_avg:90.02ms +[2025-08-22 21:05:19] [Rank 0] PRINT: step:2000/10000 val_loss:4.6802 svd_entropy: attn_qk:H=0.4591,top10E=0.73,eRank=30.8,q75/q25=83.68 attn_vo:H=0.4014,top10E=0.77,eRank=33.5,q75/q25=90.00 mlp_w1:H=0.5925,top10E=0.56,eRank=65.2,q75/q25=9.65 mlp_w2:H=0.5627,top10E=0.58,eRank=63.5,q75/q25=15.93 vo_prod:H=0.2394,top10E=0.89,eRank=11.2,q75/q25=3030.51 train_time:180037ms step_avg:90.02ms +[2025-08-22 21:05:19] [Rank 0] step:2001/10000 train_time:180056ms step_avg:89.98ms +[2025-08-22 21:05:19] [Rank 0] step:2001/10000 train_time:180056ms step_avg:89.98ms +[2025-08-22 21:05:21] [Rank 0] step:2021/10000 train_time:181843ms step_avg:89.98ms +[2025-08-22 21:05:21] [Rank 0] step:2021/10000 train_time:181843ms step_avg:89.98ms +[2025-08-22 21:05:23] [Rank 0] step:2041/10000 train_time:184310ms step_avg:90.30ms +[2025-08-22 21:05:23] [Rank 0] step:2041/10000 train_time:184310ms step_avg:90.30ms +[2025-08-22 21:05:25] [Rank 0] step:2061/10000 train_time:186116ms step_avg:90.30ms +[2025-08-22 21:05:25] [Rank 0] step:2061/10000 train_time:186116ms step_avg:90.30ms +[2025-08-22 21:05:27] [Rank 0] step:2081/10000 train_time:187925ms step_avg:90.30ms +[2025-08-22 21:05:27] [Rank 0] step:2081/10000 train_time:187925ms step_avg:90.30ms +[2025-08-22 21:05:28] [Rank 0] step:2101/10000 train_time:189734ms step_avg:90.31ms +[2025-08-22 21:05:28] [Rank 0] step:2101/10000 train_time:189734ms step_avg:90.31ms +[2025-08-22 21:05:30] [Rank 0] step:2121/10000 train_time:191543ms step_avg:90.31ms +[2025-08-22 21:05:30] [Rank 0] step:2121/10000 train_time:191543ms step_avg:90.31ms +[2025-08-22 21:05:32] [Rank 0] step:2141/10000 train_time:193355ms step_avg:90.31ms +[2025-08-22 21:05:32] [Rank 0] step:2141/10000 train_time:193355ms step_avg:90.31ms +[2025-08-22 21:05:34] [Rank 0] step:2161/10000 train_time:195165ms step_avg:90.31ms +[2025-08-22 21:05:34] [Rank 0] step:2161/10000 train_time:195165ms step_avg:90.31ms +[2025-08-22 21:05:36] [Rank 0] step:2181/10000 train_time:196976ms step_avg:90.31ms +[2025-08-22 21:05:36] [Rank 0] step:2181/10000 train_time:196976ms step_avg:90.31ms +[2025-08-22 21:05:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:05:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:05:51] [Rank 0] PRINT: step:2200/10000 val_loss:4.6110 svd_entropy: attn_qk:H=0.4664,top10E=0.72,eRank=32.2,q75/q25=73.88 attn_vo:H=0.4095,top10E=0.76,eRank=35.5,q75/q25=94.37 mlp_w1:H=0.5910,top10E=0.56,eRank=66.8,q75/q25=11.63 mlp_w2:H=0.5616,top10E=0.58,eRank=65.1,q75/q25=17.91 vo_prod:H=0.2460,top10E=0.89,eRank=11.9,q75/q25=3554.47 train_time:198802ms step_avg:90.36ms +[2025-08-22 21:05:51] [Rank 0] PRINT: step:2200/10000 val_loss:4.6110 svd_entropy: attn_qk:H=0.4664,top10E=0.72,eRank=32.2,q75/q25=73.88 attn_vo:H=0.4095,top10E=0.76,eRank=35.5,q75/q25=94.37 mlp_w1:H=0.5910,top10E=0.56,eRank=66.8,q75/q25=11.63 mlp_w2:H=0.5616,top10E=0.58,eRank=65.1,q75/q25=17.91 vo_prod:H=0.2460,top10E=0.89,eRank=11.9,q75/q25=3554.47 train_time:198802ms step_avg:90.36ms +[2025-08-22 21:05:51] [Rank 0] step:2201/10000 train_time:198820ms step_avg:90.33ms +[2025-08-22 21:05:51] [Rank 0] step:2201/10000 train_time:198820ms step_avg:90.33ms +[2025-08-22 21:05:53] [Rank 0] step:2221/10000 train_time:200619ms step_avg:90.33ms +[2025-08-22 21:05:53] [Rank 0] step:2221/10000 train_time:200619ms step_avg:90.33ms +[2025-08-22 21:05:55] [Rank 0] step:2241/10000 train_time:202460ms step_avg:90.34ms +[2025-08-22 21:05:55] [Rank 0] step:2241/10000 train_time:202460ms step_avg:90.34ms +[2025-08-22 21:05:57] [Rank 0] step:2261/10000 train_time:204307ms step_avg:90.36ms +[2025-08-22 21:05:57] [Rank 0] step:2261/10000 train_time:204307ms step_avg:90.36ms +[2025-08-22 21:05:58] [Rank 0] step:2281/10000 train_time:206159ms step_avg:90.38ms +[2025-08-22 21:05:58] [Rank 0] step:2281/10000 train_time:206159ms step_avg:90.38ms +[2025-08-22 21:06:00] [Rank 0] step:2301/10000 train_time:208010ms step_avg:90.40ms +[2025-08-22 21:06:00] [Rank 0] step:2301/10000 train_time:208010ms step_avg:90.40ms +[2025-08-22 21:06:02] [Rank 0] step:2321/10000 train_time:209863ms step_avg:90.42ms +[2025-08-22 21:06:02] [Rank 0] step:2321/10000 train_time:209863ms step_avg:90.42ms +[2025-08-22 21:06:04] [Rank 0] step:2341/10000 train_time:211717ms step_avg:90.44ms +[2025-08-22 21:06:04] [Rank 0] step:2341/10000 train_time:211717ms step_avg:90.44ms +[2025-08-22 21:06:06] [Rank 0] step:2361/10000 train_time:213571ms step_avg:90.46ms +[2025-08-22 21:06:06] [Rank 0] step:2361/10000 train_time:213571ms step_avg:90.46ms +[2025-08-22 21:06:08] [Rank 0] step:2381/10000 train_time:215428ms step_avg:90.48ms +[2025-08-22 21:06:08] [Rank 0] step:2381/10000 train_time:215428ms step_avg:90.48ms +[2025-08-22 21:06:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:06:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:06:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.5413 svd_entropy: attn_qk:H=0.4730,top10E=0.71,eRank=33.8,q75/q25=72.47 attn_vo:H=0.4142,top10E=0.76,eRank=37.2,q75/q25=95.42 mlp_w1:H=0.5921,top10E=0.56,eRank=68.8,q75/q25=13.79 mlp_w2:H=0.5630,top10E=0.57,eRank=67.1,q75/q25=19.87 vo_prod:H=0.2488,top10E=0.88,eRank=12.7,q75/q25=3758.59 train_time:217298ms step_avg:90.54ms +[2025-08-22 21:06:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.5413 svd_entropy: attn_qk:H=0.4730,top10E=0.71,eRank=33.8,q75/q25=72.47 attn_vo:H=0.4142,top10E=0.76,eRank=37.2,q75/q25=95.42 mlp_w1:H=0.5921,top10E=0.56,eRank=68.8,q75/q25=13.79 mlp_w2:H=0.5630,top10E=0.57,eRank=67.1,q75/q25=19.87 vo_prod:H=0.2488,top10E=0.88,eRank=12.7,q75/q25=3758.59 train_time:217298ms step_avg:90.54ms +[2025-08-22 21:06:23] [Rank 0] step:2401/10000 train_time:217316ms step_avg:90.51ms +[2025-08-22 21:06:23] [Rank 0] step:2401/10000 train_time:217316ms step_avg:90.51ms +[2025-08-22 21:06:25] [Rank 0] step:2421/10000 train_time:219168ms step_avg:90.53ms +[2025-08-22 21:06:25] [Rank 0] step:2421/10000 train_time:219168ms step_avg:90.53ms +[2025-08-22 21:06:27] [Rank 0] step:2441/10000 train_time:221019ms step_avg:90.54ms +[2025-08-22 21:06:27] [Rank 0] step:2441/10000 train_time:221019ms step_avg:90.54ms +[2025-08-22 21:06:29] [Rank 0] step:2461/10000 train_time:222875ms step_avg:90.56ms +[2025-08-22 21:06:29] [Rank 0] step:2461/10000 train_time:222875ms step_avg:90.56ms +[2025-08-22 21:06:31] [Rank 0] step:2481/10000 train_time:224732ms step_avg:90.58ms +[2025-08-22 21:06:31] [Rank 0] step:2481/10000 train_time:224732ms step_avg:90.58ms +[2025-08-22 21:06:32] [Rank 0] step:2501/10000 train_time:226588ms step_avg:90.60ms +[2025-08-22 21:06:32] [Rank 0] step:2501/10000 train_time:226588ms step_avg:90.60ms +[2025-08-22 21:06:34] [Rank 0] step:2521/10000 train_time:228445ms step_avg:90.62ms +[2025-08-22 21:06:34] [Rank 0] step:2521/10000 train_time:228445ms step_avg:90.62ms +[2025-08-22 21:06:36] [Rank 0] step:2541/10000 train_time:230303ms step_avg:90.63ms +[2025-08-22 21:06:36] [Rank 0] step:2541/10000 train_time:230303ms step_avg:90.63ms +[2025-08-22 21:06:38] [Rank 0] step:2561/10000 train_time:232161ms step_avg:90.65ms +[2025-08-22 21:06:38] [Rank 0] step:2561/10000 train_time:232161ms step_avg:90.65ms +[2025-08-22 21:06:40] [Rank 0] step:2581/10000 train_time:234019ms step_avg:90.67ms +[2025-08-22 21:06:40] [Rank 0] step:2581/10000 train_time:234019ms step_avg:90.67ms +[2025-08-22 21:06:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:06:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:06:55] [Rank 0] PRINT: step:2600/10000 val_loss:4.4950 svd_entropy: attn_qk:H=0.4795,top10E=0.70,eRank=35.4,q75/q25=69.39 attn_vo:H=0.4202,top10E=0.75,eRank=38.9,q75/q25=96.15 mlp_w1:H=0.5937,top10E=0.55,eRank=71.0,q75/q25=15.73 mlp_w2:H=0.5645,top10E=0.57,eRank=69.0,q75/q25=21.35 vo_prod:H=0.2570,top10E=0.87,eRank=13.4,q75/q25=4129.48 train_time:235892ms step_avg:90.73ms +[2025-08-22 21:06:55] [Rank 0] PRINT: step:2600/10000 val_loss:4.4950 svd_entropy: attn_qk:H=0.4795,top10E=0.70,eRank=35.4,q75/q25=69.39 attn_vo:H=0.4202,top10E=0.75,eRank=38.9,q75/q25=96.15 mlp_w1:H=0.5937,top10E=0.55,eRank=71.0,q75/q25=15.73 mlp_w2:H=0.5645,top10E=0.57,eRank=69.0,q75/q25=21.35 vo_prod:H=0.2570,top10E=0.87,eRank=13.4,q75/q25=4129.48 train_time:235892ms step_avg:90.73ms +[2025-08-22 21:06:55] [Rank 0] step:2601/10000 train_time:235909ms step_avg:90.70ms +[2025-08-22 21:06:55] [Rank 0] step:2601/10000 train_time:235909ms step_avg:90.70ms +[2025-08-22 21:06:57] [Rank 0] step:2621/10000 train_time:237750ms step_avg:90.71ms +[2025-08-22 21:06:57] [Rank 0] step:2621/10000 train_time:237750ms step_avg:90.71ms +[2025-08-22 21:06:59] [Rank 0] step:2641/10000 train_time:239598ms step_avg:90.72ms +[2025-08-22 21:06:59] [Rank 0] step:2641/10000 train_time:239598ms step_avg:90.72ms +[2025-08-22 21:07:01] [Rank 0] step:2661/10000 train_time:241447ms step_avg:90.74ms +[2025-08-22 21:07:01] [Rank 0] step:2661/10000 train_time:241447ms step_avg:90.74ms +[2025-08-22 21:07:03] [Rank 0] step:2681/10000 train_time:243297ms step_avg:90.75ms +[2025-08-22 21:07:03] [Rank 0] step:2681/10000 train_time:243297ms step_avg:90.75ms +[2025-08-22 21:07:05] [Rank 0] step:2701/10000 train_time:245146ms step_avg:90.76ms +[2025-08-22 21:07:05] [Rank 0] step:2701/10000 train_time:245146ms step_avg:90.76ms +[2025-08-22 21:07:06] [Rank 0] step:2721/10000 train_time:246997ms step_avg:90.77ms +[2025-08-22 21:07:06] [Rank 0] step:2721/10000 train_time:246997ms step_avg:90.77ms +[2025-08-22 21:07:08] [Rank 0] step:2741/10000 train_time:248850ms step_avg:90.79ms +[2025-08-22 21:07:08] [Rank 0] step:2741/10000 train_time:248850ms step_avg:90.79ms +[2025-08-22 21:07:10] [Rank 0] step:2761/10000 train_time:250703ms step_avg:90.80ms +[2025-08-22 21:07:10] [Rank 0] step:2761/10000 train_time:250703ms step_avg:90.80ms +[2025-08-22 21:07:12] [Rank 0] step:2781/10000 train_time:252557ms step_avg:90.82ms +[2025-08-22 21:07:12] [Rank 0] step:2781/10000 train_time:252557ms step_avg:90.82ms +[2025-08-22 21:07:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:07:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:07:27] [Rank 0] PRINT: step:2800/10000 val_loss:4.4675 svd_entropy: attn_qk:H=0.4872,top10E=0.69,eRank=36.8,q75/q25=69.70 attn_vo:H=0.4164,top10E=0.75,eRank=39.9,q75/q25=97.95 mlp_w1:H=0.5920,top10E=0.55,eRank=71.4,q75/q25=17.38 mlp_w2:H=0.5637,top10E=0.57,eRank=70.3,q75/q25=23.15 vo_prod:H=0.2476,top10E=0.87,eRank=14.0,q75/q25=4646.11 train_time:254424ms step_avg:90.87ms +[2025-08-22 21:07:27] [Rank 0] PRINT: step:2800/10000 val_loss:4.4675 svd_entropy: attn_qk:H=0.4872,top10E=0.69,eRank=36.8,q75/q25=69.70 attn_vo:H=0.4164,top10E=0.75,eRank=39.9,q75/q25=97.95 mlp_w1:H=0.5920,top10E=0.55,eRank=71.4,q75/q25=17.38 mlp_w2:H=0.5637,top10E=0.57,eRank=70.3,q75/q25=23.15 vo_prod:H=0.2476,top10E=0.87,eRank=14.0,q75/q25=4646.11 train_time:254424ms step_avg:90.87ms +[2025-08-22 21:07:28] [Rank 0] step:2801/10000 train_time:254442ms step_avg:90.84ms +[2025-08-22 21:07:28] [Rank 0] step:2801/10000 train_time:254442ms step_avg:90.84ms +[2025-08-22 21:07:29] [Rank 0] step:2821/10000 train_time:256277ms step_avg:90.85ms +[2025-08-22 21:07:29] [Rank 0] step:2821/10000 train_time:256277ms step_avg:90.85ms +[2025-08-22 21:07:31] [Rank 0] step:2841/10000 train_time:258123ms step_avg:90.86ms +[2025-08-22 21:07:31] [Rank 0] step:2841/10000 train_time:258123ms step_avg:90.86ms +[2025-08-22 21:07:33] [Rank 0] step:2861/10000 train_time:259971ms step_avg:90.87ms +[2025-08-22 21:07:33] [Rank 0] step:2861/10000 train_time:259971ms step_avg:90.87ms +[2025-08-22 21:07:35] [Rank 0] step:2881/10000 train_time:261819ms step_avg:90.88ms +[2025-08-22 21:07:35] [Rank 0] step:2881/10000 train_time:261819ms step_avg:90.88ms +[2025-08-22 21:07:37] [Rank 0] step:2901/10000 train_time:263668ms step_avg:90.89ms +[2025-08-22 21:07:37] [Rank 0] step:2901/10000 train_time:263668ms step_avg:90.89ms +[2025-08-22 21:07:39] [Rank 0] step:2921/10000 train_time:265518ms step_avg:90.90ms +[2025-08-22 21:07:39] [Rank 0] step:2921/10000 train_time:265518ms step_avg:90.90ms +[2025-08-22 21:07:40] [Rank 0] step:2941/10000 train_time:267368ms step_avg:90.91ms +[2025-08-22 21:07:40] [Rank 0] step:2941/10000 train_time:267368ms step_avg:90.91ms +[2025-08-22 21:07:42] [Rank 0] step:2961/10000 train_time:269220ms step_avg:90.92ms +[2025-08-22 21:07:42] [Rank 0] step:2961/10000 train_time:269220ms step_avg:90.92ms +[2025-08-22 21:07:44] [Rank 0] step:2981/10000 train_time:271081ms step_avg:90.94ms +[2025-08-22 21:07:44] [Rank 0] step:2981/10000 train_time:271081ms step_avg:90.94ms +[2025-08-22 21:07:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:07:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:08:00] [Rank 0] PRINT: step:3000/10000 val_loss:4.4255 svd_entropy: attn_qk:H=0.4919,top10E=0.69,eRank=38.1,q75/q25=68.97 attn_vo:H=0.4159,top10E=0.75,eRank=40.8,q75/q25=99.24 mlp_w1:H=0.5950,top10E=0.54,eRank=73.2,q75/q25=18.93 mlp_w2:H=0.5637,top10E=0.56,eRank=71.8,q75/q25=25.27 vo_prod:H=0.2474,top10E=0.87,eRank=14.5,q75/q25=5247.37 train_time:272954ms step_avg:90.98ms +[2025-08-22 21:08:00] [Rank 0] PRINT: step:3000/10000 val_loss:4.4255 svd_entropy: attn_qk:H=0.4919,top10E=0.69,eRank=38.1,q75/q25=68.97 attn_vo:H=0.4159,top10E=0.75,eRank=40.8,q75/q25=99.24 mlp_w1:H=0.5950,top10E=0.54,eRank=73.2,q75/q25=18.93 mlp_w2:H=0.5637,top10E=0.56,eRank=71.8,q75/q25=25.27 vo_prod:H=0.2474,top10E=0.87,eRank=14.5,q75/q25=5247.37 train_time:272954ms step_avg:90.98ms +[2025-08-22 21:08:00] [Rank 0] step:3001/10000 train_time:272971ms step_avg:90.96ms +[2025-08-22 21:08:00] [Rank 0] step:3001/10000 train_time:272971ms step_avg:90.96ms +[2025-08-22 21:08:01] [Rank 0] step:3021/10000 train_time:274818ms step_avg:90.97ms +[2025-08-22 21:08:01] [Rank 0] step:3021/10000 train_time:274818ms step_avg:90.97ms +[2025-08-22 21:08:03] [Rank 0] step:3041/10000 train_time:276675ms step_avg:90.98ms +[2025-08-22 21:08:03] [Rank 0] step:3041/10000 train_time:276675ms step_avg:90.98ms +[2025-08-22 21:08:05] [Rank 0] step:3061/10000 train_time:278531ms step_avg:90.99ms +[2025-08-22 21:08:05] [Rank 0] step:3061/10000 train_time:278531ms step_avg:90.99ms +[2025-08-22 21:08:07] [Rank 0] step:3081/10000 train_time:280386ms step_avg:91.00ms +[2025-08-22 21:08:07] [Rank 0] step:3081/10000 train_time:280386ms step_avg:91.00ms +[2025-08-22 21:08:09] [Rank 0] step:3101/10000 train_time:282244ms step_avg:91.02ms +[2025-08-22 21:08:09] [Rank 0] step:3101/10000 train_time:282244ms step_avg:91.02ms +[2025-08-22 21:08:11] [Rank 0] step:3121/10000 train_time:284101ms step_avg:91.03ms +[2025-08-22 21:08:11] [Rank 0] step:3121/10000 train_time:284101ms step_avg:91.03ms +[2025-08-22 21:08:13] [Rank 0] step:3141/10000 train_time:285961ms step_avg:91.04ms +[2025-08-22 21:08:13] [Rank 0] step:3141/10000 train_time:285961ms step_avg:91.04ms +[2025-08-22 21:08:14] [Rank 0] step:3161/10000 train_time:287822ms step_avg:91.05ms +[2025-08-22 21:08:14] [Rank 0] step:3161/10000 train_time:287822ms step_avg:91.05ms +[2025-08-22 21:08:16] [Rank 0] step:3181/10000 train_time:289684ms step_avg:91.07ms +[2025-08-22 21:08:16] [Rank 0] step:3181/10000 train_time:289684ms step_avg:91.07ms +[2025-08-22 21:08:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:08:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:08:32] [Rank 0] PRINT: step:3200/10000 val_loss:4.3939 svd_entropy: attn_qk:H=0.4960,top10E=0.68,eRank=39.1,q75/q25=67.23 attn_vo:H=0.4195,top10E=0.75,eRank=42.0,q75/q25=99.21 mlp_w1:H=0.5967,top10E=0.54,eRank=74.9,q75/q25=19.89 mlp_w2:H=0.5657,top10E=0.56,eRank=73.4,q75/q25=27.34 vo_prod:H=0.2488,top10E=0.86,eRank=15.0,q75/q25=6152.49 train_time:291560ms step_avg:91.11ms +[2025-08-22 21:08:32] [Rank 0] PRINT: step:3200/10000 val_loss:4.3939 svd_entropy: attn_qk:H=0.4960,top10E=0.68,eRank=39.1,q75/q25=67.23 attn_vo:H=0.4195,top10E=0.75,eRank=42.0,q75/q25=99.21 mlp_w1:H=0.5967,top10E=0.54,eRank=74.9,q75/q25=19.89 mlp_w2:H=0.5657,top10E=0.56,eRank=73.4,q75/q25=27.34 vo_prod:H=0.2488,top10E=0.86,eRank=15.0,q75/q25=6152.49 train_time:291560ms step_avg:91.11ms +[2025-08-22 21:08:32] [Rank 0] step:3201/10000 train_time:291578ms step_avg:91.09ms +[2025-08-22 21:08:32] [Rank 0] step:3201/10000 train_time:291578ms step_avg:91.09ms +[2025-08-22 21:08:34] [Rank 0] step:3221/10000 train_time:293435ms step_avg:91.10ms +[2025-08-22 21:08:34] [Rank 0] step:3221/10000 train_time:293435ms step_avg:91.10ms +[2025-08-22 21:08:36] [Rank 0] step:3241/10000 train_time:295296ms step_avg:91.11ms +[2025-08-22 21:08:36] [Rank 0] step:3241/10000 train_time:295296ms step_avg:91.11ms +[2025-08-22 21:08:37] [Rank 0] step:3261/10000 train_time:297156ms step_avg:91.12ms +[2025-08-22 21:08:37] [Rank 0] step:3261/10000 train_time:297156ms step_avg:91.12ms +[2025-08-22 21:08:39] [Rank 0] step:3281/10000 train_time:299017ms step_avg:91.14ms +[2025-08-22 21:08:39] [Rank 0] step:3281/10000 train_time:299017ms step_avg:91.14ms +[2025-08-22 21:08:41] [Rank 0] step:3301/10000 train_time:300880ms step_avg:91.15ms +[2025-08-22 21:08:41] [Rank 0] step:3301/10000 train_time:300880ms step_avg:91.15ms +[2025-08-22 21:08:43] [Rank 0] step:3321/10000 train_time:302745ms step_avg:91.16ms +[2025-08-22 21:08:43] [Rank 0] step:3321/10000 train_time:302745ms step_avg:91.16ms +[2025-08-22 21:08:45] [Rank 0] step:3341/10000 train_time:304612ms step_avg:91.17ms +[2025-08-22 21:08:45] [Rank 0] step:3341/10000 train_time:304612ms step_avg:91.17ms +[2025-08-22 21:08:47] [Rank 0] step:3361/10000 train_time:306480ms step_avg:91.19ms +[2025-08-22 21:08:47] [Rank 0] step:3361/10000 train_time:306480ms step_avg:91.19ms +[2025-08-22 21:08:49] [Rank 0] step:3381/10000 train_time:308347ms step_avg:91.20ms +[2025-08-22 21:08:49] [Rank 0] step:3381/10000 train_time:308347ms step_avg:91.20ms +[2025-08-22 21:08:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:08:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:09:04] [Rank 0] PRINT: step:3400/10000 val_loss:4.3662 svd_entropy: attn_qk:H=0.4935,top10E=0.68,eRank=39.3,q75/q25=67.90 attn_vo:H=0.4227,top10E=0.74,eRank=43.5,q75/q25=99.86 mlp_w1:H=0.5944,top10E=0.54,eRank=75.5,q75/q25=20.97 mlp_w2:H=0.5657,top10E=0.56,eRank=74.6,q75/q25=30.45 vo_prod:H=0.2551,top10E=0.86,eRank=15.7,q75/q25=6559.59 train_time:310230ms step_avg:91.24ms +[2025-08-22 21:09:04] [Rank 0] PRINT: step:3400/10000 val_loss:4.3662 svd_entropy: attn_qk:H=0.4935,top10E=0.68,eRank=39.3,q75/q25=67.90 attn_vo:H=0.4227,top10E=0.74,eRank=43.5,q75/q25=99.86 mlp_w1:H=0.5944,top10E=0.54,eRank=75.5,q75/q25=20.97 mlp_w2:H=0.5657,top10E=0.56,eRank=74.6,q75/q25=30.45 vo_prod:H=0.2551,top10E=0.86,eRank=15.7,q75/q25=6559.59 train_time:310230ms step_avg:91.24ms +[2025-08-22 21:09:04] [Rank 0] step:3401/10000 train_time:310248ms step_avg:91.22ms +[2025-08-22 21:09:04] [Rank 0] step:3401/10000 train_time:310248ms step_avg:91.22ms +[2025-08-22 21:09:06] [Rank 0] step:3421/10000 train_time:312106ms step_avg:91.23ms +[2025-08-22 21:09:06] [Rank 0] step:3421/10000 train_time:312106ms step_avg:91.23ms +[2025-08-22 21:09:08] [Rank 0] step:3441/10000 train_time:313964ms step_avg:91.24ms +[2025-08-22 21:09:08] [Rank 0] step:3441/10000 train_time:313964ms step_avg:91.24ms +[2025-08-22 21:09:10] [Rank 0] step:3461/10000 train_time:315822ms step_avg:91.25ms +[2025-08-22 21:09:10] [Rank 0] step:3461/10000 train_time:315822ms step_avg:91.25ms +[2025-08-22 21:09:12] [Rank 0] step:3481/10000 train_time:317681ms step_avg:91.26ms +[2025-08-22 21:09:12] [Rank 0] step:3481/10000 train_time:317681ms step_avg:91.26ms +[2025-08-22 21:09:13] [Rank 0] step:3501/10000 train_time:319541ms step_avg:91.27ms +[2025-08-22 21:09:13] [Rank 0] step:3501/10000 train_time:319541ms step_avg:91.27ms +[2025-08-22 21:09:15] [Rank 0] step:3521/10000 train_time:321404ms step_avg:91.28ms +[2025-08-22 21:09:15] [Rank 0] step:3521/10000 train_time:321404ms step_avg:91.28ms +[2025-08-22 21:09:17] [Rank 0] step:3541/10000 train_time:323265ms step_avg:91.29ms +[2025-08-22 21:09:17] [Rank 0] step:3541/10000 train_time:323265ms step_avg:91.29ms +[2025-08-22 21:09:19] [Rank 0] step:3561/10000 train_time:325126ms step_avg:91.30ms +[2025-08-22 21:09:19] [Rank 0] step:3561/10000 train_time:325126ms step_avg:91.30ms +[2025-08-22 21:09:21] [Rank 0] step:3581/10000 train_time:326989ms step_avg:91.31ms +[2025-08-22 21:09:21] [Rank 0] step:3581/10000 train_time:326989ms step_avg:91.31ms +[2025-08-22 21:09:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:09:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:09:36] [Rank 0] PRINT: step:3600/10000 val_loss:4.3620 svd_entropy: attn_qk:H=0.4953,top10E=0.68,eRank=40.0,q75/q25=67.78 attn_vo:H=0.4272,top10E=0.74,eRank=44.7,q75/q25=93.88 mlp_w1:H=0.5936,top10E=0.54,eRank=76.1,q75/q25=21.84 mlp_w2:H=0.5643,top10E=0.56,eRank=75.4,q75/q25=32.35 vo_prod:H=0.2546,top10E=0.86,eRank=16.0,q75/q25=6715.45 train_time:328868ms step_avg:91.35ms +[2025-08-22 21:09:36] [Rank 0] PRINT: step:3600/10000 val_loss:4.3620 svd_entropy: attn_qk:H=0.4953,top10E=0.68,eRank=40.0,q75/q25=67.78 attn_vo:H=0.4272,top10E=0.74,eRank=44.7,q75/q25=93.88 mlp_w1:H=0.5936,top10E=0.54,eRank=76.1,q75/q25=21.84 mlp_w2:H=0.5643,top10E=0.56,eRank=75.4,q75/q25=32.35 vo_prod:H=0.2546,top10E=0.86,eRank=16.0,q75/q25=6715.45 train_time:328868ms step_avg:91.35ms +[2025-08-22 21:09:37] [Rank 0] step:3601/10000 train_time:328886ms step_avg:91.33ms +[2025-08-22 21:09:37] [Rank 0] step:3601/10000 train_time:328886ms step_avg:91.33ms +[2025-08-22 21:09:38] [Rank 0] step:3621/10000 train_time:330732ms step_avg:91.34ms +[2025-08-22 21:09:38] [Rank 0] step:3621/10000 train_time:330732ms step_avg:91.34ms +[2025-08-22 21:09:40] [Rank 0] step:3641/10000 train_time:332586ms step_avg:91.34ms +[2025-08-22 21:09:40] [Rank 0] step:3641/10000 train_time:332586ms step_avg:91.34ms +[2025-08-22 21:09:42] [Rank 0] step:3661/10000 train_time:334440ms step_avg:91.35ms +[2025-08-22 21:09:42] [Rank 0] step:3661/10000 train_time:334440ms step_avg:91.35ms +[2025-08-22 21:09:44] [Rank 0] step:3681/10000 train_time:336297ms step_avg:91.36ms +[2025-08-22 21:09:44] [Rank 0] step:3681/10000 train_time:336297ms step_avg:91.36ms +[2025-08-22 21:09:46] [Rank 0] step:3701/10000 train_time:338157ms step_avg:91.37ms +[2025-08-22 21:09:46] [Rank 0] step:3701/10000 train_time:338157ms step_avg:91.37ms +[2025-08-22 21:09:48] [Rank 0] step:3721/10000 train_time:340043ms step_avg:91.38ms +[2025-08-22 21:09:48] [Rank 0] step:3721/10000 train_time:340043ms step_avg:91.38ms +[2025-08-22 21:09:50] [Rank 0] step:3741/10000 train_time:341939ms step_avg:91.40ms +[2025-08-22 21:09:50] [Rank 0] step:3741/10000 train_time:341939ms step_avg:91.40ms +[2025-08-22 21:09:52] [Rank 0] step:3761/10000 train_time:343835ms step_avg:91.42ms +[2025-08-22 21:09:52] [Rank 0] step:3761/10000 train_time:343835ms step_avg:91.42ms +[2025-08-22 21:09:53] [Rank 0] step:3781/10000 train_time:345735ms step_avg:91.44ms +[2025-08-22 21:09:53] [Rank 0] step:3781/10000 train_time:345735ms step_avg:91.44ms +[2025-08-22 21:09:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:09:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:10:09] [Rank 0] PRINT: step:3800/10000 val_loss:4.3152 svd_entropy: attn_qk:H=0.4969,top10E=0.67,eRank=40.7,q75/q25=67.72 attn_vo:H=0.4314,top10E=0.73,eRank=45.9,q75/q25=94.21 mlp_w1:H=0.5930,top10E=0.54,eRank=77.0,q75/q25=22.76 mlp_w2:H=0.5648,top10E=0.56,eRank=76.6,q75/q25=33.90 vo_prod:H=0.2524,top10E=0.86,eRank=16.4,q75/q25=7344.05 train_time:347646ms step_avg:91.49ms +[2025-08-22 21:10:09] [Rank 0] PRINT: step:3800/10000 val_loss:4.3152 svd_entropy: attn_qk:H=0.4969,top10E=0.67,eRank=40.7,q75/q25=67.72 attn_vo:H=0.4314,top10E=0.73,eRank=45.9,q75/q25=94.21 mlp_w1:H=0.5930,top10E=0.54,eRank=77.0,q75/q25=22.76 mlp_w2:H=0.5648,top10E=0.56,eRank=76.6,q75/q25=33.90 vo_prod:H=0.2524,top10E=0.86,eRank=16.4,q75/q25=7344.05 train_time:347646ms step_avg:91.49ms +[2025-08-22 21:10:09] [Rank 0] step:3801/10000 train_time:347663ms step_avg:91.47ms +[2025-08-22 21:10:09] [Rank 0] step:3801/10000 train_time:347663ms step_avg:91.47ms +[2025-08-22 21:10:11] [Rank 0] step:3821/10000 train_time:349544ms step_avg:91.48ms +[2025-08-22 21:10:11] [Rank 0] step:3821/10000 train_time:349544ms step_avg:91.48ms +[2025-08-22 21:10:13] [Rank 0] step:3841/10000 train_time:351438ms step_avg:91.50ms +[2025-08-22 21:10:13] [Rank 0] step:3841/10000 train_time:351438ms step_avg:91.50ms +[2025-08-22 21:10:15] [Rank 0] step:3861/10000 train_time:353330ms step_avg:91.51ms +[2025-08-22 21:10:15] [Rank 0] step:3861/10000 train_time:353330ms step_avg:91.51ms +[2025-08-22 21:10:17] [Rank 0] step:3881/10000 train_time:355220ms step_avg:91.53ms +[2025-08-22 21:10:17] [Rank 0] step:3881/10000 train_time:355220ms step_avg:91.53ms +[2025-08-22 21:10:19] [Rank 0] step:3901/10000 train_time:357111ms step_avg:91.54ms +[2025-08-22 21:10:19] [Rank 0] step:3901/10000 train_time:357111ms step_avg:91.54ms +[2025-08-22 21:10:21] [Rank 0] step:3921/10000 train_time:359003ms step_avg:91.56ms +[2025-08-22 21:10:21] [Rank 0] step:3921/10000 train_time:359003ms step_avg:91.56ms +[2025-08-22 21:10:23] [Rank 0] step:3941/10000 train_time:360897ms step_avg:91.57ms +[2025-08-22 21:10:23] [Rank 0] step:3941/10000 train_time:360897ms step_avg:91.57ms +[2025-08-22 21:10:24] [Rank 0] step:3961/10000 train_time:362789ms step_avg:91.59ms +[2025-08-22 21:10:24] [Rank 0] step:3961/10000 train_time:362789ms step_avg:91.59ms +[2025-08-22 21:10:26] [Rank 0] step:3981/10000 train_time:364683ms step_avg:91.61ms +[2025-08-22 21:10:26] [Rank 0] step:3981/10000 train_time:364683ms step_avg:91.61ms +[2025-08-22 21:10:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:10:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:10:42] [Rank 0] PRINT: step:4000/10000 val_loss:4.2919 svd_entropy: attn_qk:H=0.5002,top10E=0.67,eRank=41.6,q75/q25=65.81 attn_vo:H=0.4390,top10E=0.73,eRank=47.1,q75/q25=95.94 mlp_w1:H=0.5929,top10E=0.54,eRank=77.9,q75/q25=23.78 mlp_w2:H=0.5651,top10E=0.56,eRank=77.6,q75/q25=35.81 vo_prod:H=0.2596,top10E=0.86,eRank=16.9,q75/q25=7548.13 train_time:366592ms step_avg:91.65ms +[2025-08-22 21:10:42] [Rank 0] PRINT: step:4000/10000 val_loss:4.2919 svd_entropy: attn_qk:H=0.5002,top10E=0.67,eRank=41.6,q75/q25=65.81 attn_vo:H=0.4390,top10E=0.73,eRank=47.1,q75/q25=95.94 mlp_w1:H=0.5929,top10E=0.54,eRank=77.9,q75/q25=23.78 mlp_w2:H=0.5651,top10E=0.56,eRank=77.6,q75/q25=35.81 vo_prod:H=0.2596,top10E=0.86,eRank=16.9,q75/q25=7548.13 train_time:366592ms step_avg:91.65ms +[2025-08-22 21:10:42] [Rank 0] step:4001/10000 train_time:366610ms step_avg:91.63ms +[2025-08-22 21:10:42] [Rank 0] step:4001/10000 train_time:366610ms step_avg:91.63ms +[2025-08-22 21:10:44] [Rank 0] step:4021/10000 train_time:368487ms step_avg:91.64ms +[2025-08-22 21:10:44] [Rank 0] step:4021/10000 train_time:368487ms step_avg:91.64ms +[2025-08-22 21:10:46] [Rank 0] step:4041/10000 train_time:370380ms step_avg:91.66ms +[2025-08-22 21:10:46] [Rank 0] step:4041/10000 train_time:370380ms step_avg:91.66ms +[2025-08-22 21:10:48] [Rank 0] step:4061/10000 train_time:372274ms step_avg:91.67ms +[2025-08-22 21:10:48] [Rank 0] step:4061/10000 train_time:372274ms step_avg:91.67ms +[2025-08-22 21:10:50] [Rank 0] step:4081/10000 train_time:374814ms step_avg:91.84ms +[2025-08-22 21:10:50] [Rank 0] step:4081/10000 train_time:374814ms step_avg:91.84ms +[2025-08-22 21:10:52] [Rank 0] step:4101/10000 train_time:376709ms step_avg:91.86ms +[2025-08-22 21:10:52] [Rank 0] step:4101/10000 train_time:376709ms step_avg:91.86ms +[2025-08-22 21:10:54] [Rank 0] step:4121/10000 train_time:378606ms step_avg:91.87ms +[2025-08-22 21:10:54] [Rank 0] step:4121/10000 train_time:378606ms step_avg:91.87ms +[2025-08-22 21:10:56] [Rank 0] step:4141/10000 train_time:380509ms step_avg:91.89ms +[2025-08-22 21:10:56] [Rank 0] step:4141/10000 train_time:380509ms step_avg:91.89ms +[2025-08-22 21:10:58] [Rank 0] step:4161/10000 train_time:382408ms step_avg:91.90ms +[2025-08-22 21:10:58] [Rank 0] step:4161/10000 train_time:382408ms step_avg:91.90ms +[2025-08-22 21:11:00] [Rank 0] step:4181/10000 train_time:384311ms step_avg:91.92ms +[2025-08-22 21:11:00] [Rank 0] step:4181/10000 train_time:384311ms step_avg:91.92ms +[2025-08-22 21:11:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:11:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:11:15] [Rank 0] PRINT: step:4200/10000 val_loss:4.2804 svd_entropy: attn_qk:H=0.5036,top10E=0.66,eRank=42.5,q75/q25=64.75 attn_vo:H=0.4423,top10E=0.73,eRank=48.2,q75/q25=95.80 mlp_w1:H=0.5938,top10E=0.53,eRank=79.1,q75/q25=24.62 mlp_w2:H=0.5670,top10E=0.55,eRank=78.8,q75/q25=37.45 vo_prod:H=0.2643,top10E=0.85,eRank=17.4,q75/q25=8004.37 train_time:386225ms step_avg:91.96ms +[2025-08-22 21:11:15] [Rank 0] PRINT: step:4200/10000 val_loss:4.2804 svd_entropy: attn_qk:H=0.5036,top10E=0.66,eRank=42.5,q75/q25=64.75 attn_vo:H=0.4423,top10E=0.73,eRank=48.2,q75/q25=95.80 mlp_w1:H=0.5938,top10E=0.53,eRank=79.1,q75/q25=24.62 mlp_w2:H=0.5670,top10E=0.55,eRank=78.8,q75/q25=37.45 vo_prod:H=0.2643,top10E=0.85,eRank=17.4,q75/q25=8004.37 train_time:386225ms step_avg:91.96ms +[2025-08-22 21:11:15] [Rank 0] step:4201/10000 train_time:386243ms step_avg:91.94ms +[2025-08-22 21:11:15] [Rank 0] step:4201/10000 train_time:386243ms step_avg:91.94ms +[2025-08-22 21:11:17] [Rank 0] step:4221/10000 train_time:388119ms step_avg:91.95ms +[2025-08-22 21:11:17] [Rank 0] step:4221/10000 train_time:388119ms step_avg:91.95ms +[2025-08-22 21:11:19] [Rank 0] step:4241/10000 train_time:390012ms step_avg:91.96ms +[2025-08-22 21:11:19] [Rank 0] step:4241/10000 train_time:390012ms step_avg:91.96ms +[2025-08-22 21:11:21] [Rank 0] step:4261/10000 train_time:391906ms step_avg:91.98ms +[2025-08-22 21:11:21] [Rank 0] step:4261/10000 train_time:391906ms step_avg:91.98ms +[2025-08-22 21:11:23] [Rank 0] step:4281/10000 train_time:393801ms step_avg:91.99ms +[2025-08-22 21:11:23] [Rank 0] step:4281/10000 train_time:393801ms step_avg:91.99ms +[2025-08-22 21:11:25] [Rank 0] step:4301/10000 train_time:395696ms step_avg:92.00ms +[2025-08-22 21:11:25] [Rank 0] step:4301/10000 train_time:395696ms step_avg:92.00ms +[2025-08-22 21:11:27] [Rank 0] step:4321/10000 train_time:397593ms step_avg:92.01ms +[2025-08-22 21:11:27] [Rank 0] step:4321/10000 train_time:397593ms step_avg:92.01ms +[2025-08-22 21:11:29] [Rank 0] step:4341/10000 train_time:399488ms step_avg:92.03ms +[2025-08-22 21:11:29] [Rank 0] step:4341/10000 train_time:399488ms step_avg:92.03ms +[2025-08-22 21:11:30] [Rank 0] step:4361/10000 train_time:401385ms step_avg:92.04ms +[2025-08-22 21:11:30] [Rank 0] step:4361/10000 train_time:401385ms step_avg:92.04ms +[2025-08-22 21:11:32] [Rank 0] step:4381/10000 train_time:403280ms step_avg:92.05ms +[2025-08-22 21:11:32] [Rank 0] step:4381/10000 train_time:403280ms step_avg:92.05ms +[2025-08-22 21:11:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:11:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:11:48] [Rank 0] PRINT: step:4400/10000 val_loss:4.2589 svd_entropy: attn_qk:H=0.5041,top10E=0.66,eRank=43.0,q75/q25=65.50 attn_vo:H=0.4463,top10E=0.72,eRank=49.3,q75/q25=96.37 mlp_w1:H=0.5934,top10E=0.53,eRank=79.9,q75/q25=25.76 mlp_w2:H=0.5672,top10E=0.55,eRank=79.7,q75/q25=38.63 vo_prod:H=0.2718,top10E=0.85,eRank=17.8,q75/q25=8463.45 train_time:405192ms step_avg:92.09ms +[2025-08-22 21:11:48] [Rank 0] PRINT: step:4400/10000 val_loss:4.2589 svd_entropy: attn_qk:H=0.5041,top10E=0.66,eRank=43.0,q75/q25=65.50 attn_vo:H=0.4463,top10E=0.72,eRank=49.3,q75/q25=96.37 mlp_w1:H=0.5934,top10E=0.53,eRank=79.9,q75/q25=25.76 mlp_w2:H=0.5672,top10E=0.55,eRank=79.7,q75/q25=38.63 vo_prod:H=0.2718,top10E=0.85,eRank=17.8,q75/q25=8463.45 train_time:405192ms step_avg:92.09ms +[2025-08-22 21:11:48] [Rank 0] step:4401/10000 train_time:405209ms step_avg:92.07ms +[2025-08-22 21:11:48] [Rank 0] step:4401/10000 train_time:405209ms step_avg:92.07ms +[2025-08-22 21:11:50] [Rank 0] step:4421/10000 train_time:407103ms step_avg:92.08ms +[2025-08-22 21:11:50] [Rank 0] step:4421/10000 train_time:407103ms step_avg:92.08ms +[2025-08-22 21:11:52] [Rank 0] step:4441/10000 train_time:408992ms step_avg:92.09ms +[2025-08-22 21:11:52] [Rank 0] step:4441/10000 train_time:408992ms step_avg:92.09ms +[2025-08-22 21:11:54] [Rank 0] step:4461/10000 train_time:410888ms step_avg:92.11ms +[2025-08-22 21:11:54] [Rank 0] step:4461/10000 train_time:410888ms step_avg:92.11ms +[2025-08-22 21:11:55] [Rank 0] step:4481/10000 train_time:412787ms step_avg:92.12ms +[2025-08-22 21:11:55] [Rank 0] step:4481/10000 train_time:412787ms step_avg:92.12ms +[2025-08-22 21:11:57] [Rank 0] step:4501/10000 train_time:414683ms step_avg:92.13ms +[2025-08-22 21:11:57] [Rank 0] step:4501/10000 train_time:414683ms step_avg:92.13ms +[2025-08-22 21:11:59] [Rank 0] step:4521/10000 train_time:416583ms step_avg:92.14ms +[2025-08-22 21:11:59] [Rank 0] step:4521/10000 train_time:416583ms step_avg:92.14ms +[2025-08-22 21:12:01] [Rank 0] step:4541/10000 train_time:418483ms step_avg:92.16ms +[2025-08-22 21:12:01] [Rank 0] step:4541/10000 train_time:418483ms step_avg:92.16ms +[2025-08-22 21:12:03] [Rank 0] step:4561/10000 train_time:420383ms step_avg:92.17ms +[2025-08-22 21:12:03] [Rank 0] step:4561/10000 train_time:420383ms step_avg:92.17ms +[2025-08-22 21:12:05] [Rank 0] step:4581/10000 train_time:422287ms step_avg:92.18ms +[2025-08-22 21:12:05] [Rank 0] step:4581/10000 train_time:422287ms step_avg:92.18ms +[2025-08-22 21:12:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:12:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:12:20] [Rank 0] PRINT: step:4600/10000 val_loss:4.2385 svd_entropy: attn_qk:H=0.5020,top10E=0.67,eRank=43.1,q75/q25=65.97 attn_vo:H=0.4509,top10E=0.72,eRank=50.4,q75/q25=95.17 mlp_w1:H=0.5936,top10E=0.53,eRank=80.8,q75/q25=27.38 mlp_w2:H=0.5679,top10E=0.55,eRank=80.6,q75/q25=40.68 vo_prod:H=0.2812,top10E=0.85,eRank=18.3,q75/q25=8785.24 train_time:424203ms step_avg:92.22ms +[2025-08-22 21:12:20] [Rank 0] PRINT: step:4600/10000 val_loss:4.2385 svd_entropy: attn_qk:H=0.5020,top10E=0.67,eRank=43.1,q75/q25=65.97 attn_vo:H=0.4509,top10E=0.72,eRank=50.4,q75/q25=95.17 mlp_w1:H=0.5936,top10E=0.53,eRank=80.8,q75/q25=27.38 mlp_w2:H=0.5679,top10E=0.55,eRank=80.6,q75/q25=40.68 vo_prod:H=0.2812,top10E=0.85,eRank=18.3,q75/q25=8785.24 train_time:424203ms step_avg:92.22ms +[2025-08-22 21:12:21] [Rank 0] step:4601/10000 train_time:424220ms step_avg:92.20ms +[2025-08-22 21:12:21] [Rank 0] step:4601/10000 train_time:424220ms step_avg:92.20ms +[2025-08-22 21:12:22] [Rank 0] step:4621/10000 train_time:426102ms step_avg:92.21ms +[2025-08-22 21:12:22] [Rank 0] step:4621/10000 train_time:426102ms step_avg:92.21ms +[2025-08-22 21:12:24] [Rank 0] step:4641/10000 train_time:427999ms step_avg:92.22ms +[2025-08-22 21:12:24] [Rank 0] step:4641/10000 train_time:427999ms step_avg:92.22ms +[2025-08-22 21:12:26] [Rank 0] step:4661/10000 train_time:429894ms step_avg:92.23ms +[2025-08-22 21:12:26] [Rank 0] step:4661/10000 train_time:429894ms step_avg:92.23ms +[2025-08-22 21:12:28] [Rank 0] step:4681/10000 train_time:431792ms step_avg:92.24ms +[2025-08-22 21:12:28] [Rank 0] step:4681/10000 train_time:431792ms step_avg:92.24ms +[2025-08-22 21:12:30] [Rank 0] step:4701/10000 train_time:433691ms step_avg:92.25ms +[2025-08-22 21:12:30] [Rank 0] step:4701/10000 train_time:433691ms step_avg:92.25ms +[2025-08-22 21:12:32] [Rank 0] step:4721/10000 train_time:435590ms step_avg:92.27ms +[2025-08-22 21:12:32] [Rank 0] step:4721/10000 train_time:435590ms step_avg:92.27ms +[2025-08-22 21:12:34] [Rank 0] step:4741/10000 train_time:437490ms step_avg:92.28ms +[2025-08-22 21:12:34] [Rank 0] step:4741/10000 train_time:437490ms step_avg:92.28ms +[2025-08-22 21:12:36] [Rank 0] step:4761/10000 train_time:439391ms step_avg:92.29ms +[2025-08-22 21:12:36] [Rank 0] step:4761/10000 train_time:439391ms step_avg:92.29ms +[2025-08-22 21:12:38] [Rank 0] step:4781/10000 train_time:441290ms step_avg:92.30ms +[2025-08-22 21:12:38] [Rank 0] step:4781/10000 train_time:441290ms step_avg:92.30ms +[2025-08-22 21:12:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:12:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:12:53] [Rank 0] PRINT: step:4800/10000 val_loss:4.2300 svd_entropy: attn_qk:H=0.4994,top10E=0.67,eRank=43.1,q75/q25=66.24 attn_vo:H=0.4549,top10E=0.71,eRank=51.4,q75/q25=94.39 mlp_w1:H=0.5936,top10E=0.53,eRank=81.6,q75/q25=29.20 mlp_w2:H=0.5687,top10E=0.55,eRank=81.5,q75/q25=42.73 vo_prod:H=0.2897,top10E=0.85,eRank=18.7,q75/q25=8985.01 train_time:443206ms step_avg:92.33ms +[2025-08-22 21:12:53] [Rank 0] PRINT: step:4800/10000 val_loss:4.2300 svd_entropy: attn_qk:H=0.4994,top10E=0.67,eRank=43.1,q75/q25=66.24 attn_vo:H=0.4549,top10E=0.71,eRank=51.4,q75/q25=94.39 mlp_w1:H=0.5936,top10E=0.53,eRank=81.6,q75/q25=29.20 mlp_w2:H=0.5687,top10E=0.55,eRank=81.5,q75/q25=42.73 vo_prod:H=0.2897,top10E=0.85,eRank=18.7,q75/q25=8985.01 train_time:443206ms step_avg:92.33ms +[2025-08-22 21:12:53] [Rank 0] step:4801/10000 train_time:443223ms step_avg:92.32ms +[2025-08-22 21:12:53] [Rank 0] step:4801/10000 train_time:443223ms step_avg:92.32ms +[2025-08-22 21:12:55] [Rank 0] step:4821/10000 train_time:445124ms step_avg:92.33ms +[2025-08-22 21:12:55] [Rank 0] step:4821/10000 train_time:445124ms step_avg:92.33ms +[2025-08-22 21:12:57] [Rank 0] step:4841/10000 train_time:447022ms step_avg:92.34ms +[2025-08-22 21:12:57] [Rank 0] step:4841/10000 train_time:447022ms step_avg:92.34ms +[2025-08-22 21:12:59] [Rank 0] step:4861/10000 train_time:448926ms step_avg:92.35ms +[2025-08-22 21:12:59] [Rank 0] step:4861/10000 train_time:448926ms step_avg:92.35ms +[2025-08-22 21:13:01] [Rank 0] step:4881/10000 train_time:450827ms step_avg:92.36ms +[2025-08-22 21:13:01] [Rank 0] step:4881/10000 train_time:450827ms step_avg:92.36ms +[2025-08-22 21:13:03] [Rank 0] step:4901/10000 train_time:452730ms step_avg:92.37ms +[2025-08-22 21:13:03] [Rank 0] step:4901/10000 train_time:452730ms step_avg:92.37ms +[2025-08-22 21:13:05] [Rank 0] step:4921/10000 train_time:454636ms step_avg:92.39ms +[2025-08-22 21:13:05] [Rank 0] step:4921/10000 train_time:454636ms step_avg:92.39ms +[2025-08-22 21:13:06] [Rank 0] step:4941/10000 train_time:456543ms step_avg:92.40ms +[2025-08-22 21:13:06] [Rank 0] step:4941/10000 train_time:456543ms step_avg:92.40ms +[2025-08-22 21:13:08] [Rank 0] step:4961/10000 train_time:458448ms step_avg:92.41ms +[2025-08-22 21:13:08] [Rank 0] step:4961/10000 train_time:458448ms step_avg:92.41ms +[2025-08-22 21:13:10] [Rank 0] step:4981/10000 train_time:460355ms step_avg:92.42ms +[2025-08-22 21:13:10] [Rank 0] step:4981/10000 train_time:460355ms step_avg:92.42ms +[2025-08-22 21:13:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:13:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:13:26] [Rank 0] PRINT: step:5000/10000 val_loss:4.2145 svd_entropy: attn_qk:H=0.4992,top10E=0.67,eRank=43.4,q75/q25=65.75 attn_vo:H=0.4550,top10E=0.71,eRank=52.1,q75/q25=95.74 mlp_w1:H=0.5919,top10E=0.54,eRank=82.0,q75/q25=30.89 mlp_w2:H=0.5670,top10E=0.55,eRank=82.0,q75/q25=44.83 vo_prod:H=0.2831,top10E=0.85,eRank=19.1,q75/q25=8972.46 train_time:462332ms step_avg:92.47ms +[2025-08-22 21:13:26] [Rank 0] PRINT: step:5000/10000 val_loss:4.2145 svd_entropy: attn_qk:H=0.4992,top10E=0.67,eRank=43.4,q75/q25=65.75 attn_vo:H=0.4550,top10E=0.71,eRank=52.1,q75/q25=95.74 mlp_w1:H=0.5919,top10E=0.54,eRank=82.0,q75/q25=30.89 mlp_w2:H=0.5670,top10E=0.55,eRank=82.0,q75/q25=44.83 vo_prod:H=0.2831,top10E=0.85,eRank=19.1,q75/q25=8972.46 train_time:462332ms step_avg:92.47ms +[2025-08-22 21:13:26] [Rank 0] step:5001/10000 train_time:462350ms step_avg:92.45ms +[2025-08-22 21:13:26] [Rank 0] step:5001/10000 train_time:462350ms step_avg:92.45ms +[2025-08-22 21:13:28] [Rank 0] step:5021/10000 train_time:464239ms step_avg:92.46ms +[2025-08-22 21:13:28] [Rank 0] step:5021/10000 train_time:464239ms step_avg:92.46ms +[2025-08-22 21:13:29] [Rank 0] step:5041/10000 train_time:466140ms step_avg:92.47ms +[2025-08-22 21:13:29] [Rank 0] step:5041/10000 train_time:466140ms step_avg:92.47ms +[2025-08-22 21:13:31] [Rank 0] step:5061/10000 train_time:468039ms step_avg:92.48ms +[2025-08-22 21:13:31] [Rank 0] step:5061/10000 train_time:468039ms step_avg:92.48ms +[2025-08-22 21:13:33] [Rank 0] step:5081/10000 train_time:469940ms step_avg:92.49ms +[2025-08-22 21:13:33] [Rank 0] step:5081/10000 train_time:469940ms step_avg:92.49ms +[2025-08-22 21:13:35] [Rank 0] step:5101/10000 train_time:471839ms step_avg:92.50ms +[2025-08-22 21:13:35] [Rank 0] step:5101/10000 train_time:471839ms step_avg:92.50ms +[2025-08-22 21:13:37] [Rank 0] step:5121/10000 train_time:473742ms step_avg:92.51ms +[2025-08-22 21:13:37] [Rank 0] step:5121/10000 train_time:473742ms step_avg:92.51ms +[2025-08-22 21:13:39] [Rank 0] step:5141/10000 train_time:475648ms step_avg:92.52ms +[2025-08-22 21:13:39] [Rank 0] step:5141/10000 train_time:475648ms step_avg:92.52ms +[2025-08-22 21:13:41] [Rank 0] step:5161/10000 train_time:477551ms step_avg:92.53ms +[2025-08-22 21:13:41] [Rank 0] step:5161/10000 train_time:477551ms step_avg:92.53ms +[2025-08-22 21:13:43] [Rank 0] step:5181/10000 train_time:479457ms step_avg:92.54ms +[2025-08-22 21:13:43] [Rank 0] step:5181/10000 train_time:479457ms step_avg:92.54ms +[2025-08-22 21:13:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:13:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:13:58] [Rank 0] PRINT: step:5200/10000 val_loss:4.1999 svd_entropy: attn_qk:H=0.4993,top10E=0.67,eRank=43.8,q75/q25=65.90 attn_vo:H=0.4597,top10E=0.71,eRank=53.3,q75/q25=97.51 mlp_w1:H=0.5917,top10E=0.53,eRank=82.7,q75/q25=32.85 mlp_w2:H=0.5674,top10E=0.55,eRank=82.8,q75/q25=46.87 vo_prod:H=0.2911,top10E=0.85,eRank=19.5,q75/q25=9302.89 train_time:481400ms step_avg:92.58ms +[2025-08-22 21:13:58] [Rank 0] PRINT: step:5200/10000 val_loss:4.1999 svd_entropy: attn_qk:H=0.4993,top10E=0.67,eRank=43.8,q75/q25=65.90 attn_vo:H=0.4597,top10E=0.71,eRank=53.3,q75/q25=97.51 mlp_w1:H=0.5917,top10E=0.53,eRank=82.7,q75/q25=32.85 mlp_w2:H=0.5674,top10E=0.55,eRank=82.8,q75/q25=46.87 vo_prod:H=0.2911,top10E=0.85,eRank=19.5,q75/q25=9302.89 train_time:481400ms step_avg:92.58ms +[2025-08-22 21:13:58] [Rank 0] step:5201/10000 train_time:481418ms step_avg:92.56ms +[2025-08-22 21:13:58] [Rank 0] step:5201/10000 train_time:481418ms step_avg:92.56ms +[2025-08-22 21:14:00] [Rank 0] step:5221/10000 train_time:483346ms step_avg:92.58ms +[2025-08-22 21:14:00] [Rank 0] step:5221/10000 train_time:483346ms step_avg:92.58ms +[2025-08-22 21:14:02] [Rank 0] step:5241/10000 train_time:485276ms step_avg:92.59ms +[2025-08-22 21:14:02] [Rank 0] step:5241/10000 train_time:485276ms step_avg:92.59ms +[2025-08-22 21:14:04] [Rank 0] step:5261/10000 train_time:487206ms step_avg:92.61ms +[2025-08-22 21:14:04] [Rank 0] step:5261/10000 train_time:487206ms step_avg:92.61ms +[2025-08-22 21:14:06] [Rank 0] step:5281/10000 train_time:489137ms step_avg:92.62ms +[2025-08-22 21:14:06] [Rank 0] step:5281/10000 train_time:489137ms step_avg:92.62ms +[2025-08-22 21:14:08] [Rank 0] step:5301/10000 train_time:491079ms step_avg:92.64ms +[2025-08-22 21:14:08] [Rank 0] step:5301/10000 train_time:491079ms step_avg:92.64ms +[2025-08-22 21:14:10] [Rank 0] step:5321/10000 train_time:493014ms step_avg:92.65ms +[2025-08-22 21:14:10] [Rank 0] step:5321/10000 train_time:493014ms step_avg:92.65ms +[2025-08-22 21:14:12] [Rank 0] step:5341/10000 train_time:494947ms step_avg:92.67ms +[2025-08-22 21:14:12] [Rank 0] step:5341/10000 train_time:494947ms step_avg:92.67ms +[2025-08-22 21:14:14] [Rank 0] step:5361/10000 train_time:496881ms step_avg:92.68ms +[2025-08-22 21:14:14] [Rank 0] step:5361/10000 train_time:496881ms step_avg:92.68ms +[2025-08-22 21:14:16] [Rank 0] step:5381/10000 train_time:498872ms step_avg:92.71ms +[2025-08-22 21:14:16] [Rank 0] step:5381/10000 train_time:498872ms step_avg:92.71ms +[2025-08-22 21:14:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:14:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:14:31] [Rank 0] PRINT: step:5400/10000 val_loss:4.1841 svd_entropy: attn_qk:H=0.4991,top10E=0.67,eRank=44.1,q75/q25=66.42 attn_vo:H=0.4619,top10E=0.70,eRank=54.2,q75/q25=96.66 mlp_w1:H=0.5916,top10E=0.53,eRank=83.4,q75/q25=34.99 mlp_w2:H=0.5682,top10E=0.55,eRank=83.6,q75/q25=48.90 vo_prod:H=0.2924,top10E=0.85,eRank=19.9,q75/q25=9501.23 train_time:500863ms step_avg:92.75ms +[2025-08-22 21:14:31] [Rank 0] PRINT: step:5400/10000 val_loss:4.1841 svd_entropy: attn_qk:H=0.4991,top10E=0.67,eRank=44.1,q75/q25=66.42 attn_vo:H=0.4619,top10E=0.70,eRank=54.2,q75/q25=96.66 mlp_w1:H=0.5916,top10E=0.53,eRank=83.4,q75/q25=34.99 mlp_w2:H=0.5682,top10E=0.55,eRank=83.6,q75/q25=48.90 vo_prod:H=0.2924,top10E=0.85,eRank=19.9,q75/q25=9501.23 train_time:500863ms step_avg:92.75ms +[2025-08-22 21:14:31] [Rank 0] step:5401/10000 train_time:500881ms step_avg:92.74ms +[2025-08-22 21:14:31] [Rank 0] step:5401/10000 train_time:500881ms step_avg:92.74ms +[2025-08-22 21:14:33] [Rank 0] step:5421/10000 train_time:502809ms step_avg:92.75ms +[2025-08-22 21:14:33] [Rank 0] step:5421/10000 train_time:502809ms step_avg:92.75ms +[2025-08-22 21:14:35] [Rank 0] step:5441/10000 train_time:504734ms step_avg:92.76ms +[2025-08-22 21:14:35] [Rank 0] step:5441/10000 train_time:504734ms step_avg:92.76ms +[2025-08-22 21:14:37] [Rank 0] step:5461/10000 train_time:506666ms step_avg:92.78ms +[2025-08-22 21:14:37] [Rank 0] step:5461/10000 train_time:506666ms step_avg:92.78ms +[2025-08-22 21:14:39] [Rank 0] step:5481/10000 train_time:508597ms step_avg:92.79ms +[2025-08-22 21:14:39] [Rank 0] step:5481/10000 train_time:508597ms step_avg:92.79ms +[2025-08-22 21:14:41] [Rank 0] step:5501/10000 train_time:510535ms step_avg:92.81ms +[2025-08-22 21:14:41] [Rank 0] step:5501/10000 train_time:510535ms step_avg:92.81ms +[2025-08-22 21:14:43] [Rank 0] step:5521/10000 train_time:512472ms step_avg:92.82ms +[2025-08-22 21:14:43] [Rank 0] step:5521/10000 train_time:512472ms step_avg:92.82ms +[2025-08-22 21:14:45] [Rank 0] step:5541/10000 train_time:514405ms step_avg:92.84ms +[2025-08-22 21:14:45] [Rank 0] step:5541/10000 train_time:514405ms step_avg:92.84ms +[2025-08-22 21:14:47] [Rank 0] step:5561/10000 train_time:516339ms step_avg:92.85ms +[2025-08-22 21:14:47] [Rank 0] step:5561/10000 train_time:516339ms step_avg:92.85ms +[2025-08-22 21:14:49] [Rank 0] step:5581/10000 train_time:518274ms step_avg:92.86ms +[2025-08-22 21:14:49] [Rank 0] step:5581/10000 train_time:518274ms step_avg:92.86ms +[2025-08-22 21:14:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:14:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:15:04] [Rank 0] PRINT: step:5600/10000 val_loss:4.1788 svd_entropy: attn_qk:H=0.4983,top10E=0.67,eRank=44.4,q75/q25=67.15 attn_vo:H=0.4680,top10E=0.70,eRank=55.5,q75/q25=95.25 mlp_w1:H=0.5915,top10E=0.53,eRank=84.1,q75/q25=37.32 mlp_w2:H=0.5689,top10E=0.55,eRank=84.3,q75/q25=51.22 vo_prod:H=0.2991,top10E=0.85,eRank=20.3,q75/q25=9600.52 train_time:520227ms step_avg:92.90ms +[2025-08-22 21:15:04] [Rank 0] PRINT: step:5600/10000 val_loss:4.1788 svd_entropy: attn_qk:H=0.4983,top10E=0.67,eRank=44.4,q75/q25=67.15 attn_vo:H=0.4680,top10E=0.70,eRank=55.5,q75/q25=95.25 mlp_w1:H=0.5915,top10E=0.53,eRank=84.1,q75/q25=37.32 mlp_w2:H=0.5689,top10E=0.55,eRank=84.3,q75/q25=51.22 vo_prod:H=0.2991,top10E=0.85,eRank=20.3,q75/q25=9600.52 train_time:520227ms step_avg:92.90ms +[2025-08-22 21:15:04] [Rank 0] step:5601/10000 train_time:520245ms step_avg:92.88ms +[2025-08-22 21:15:04] [Rank 0] step:5601/10000 train_time:520245ms step_avg:92.88ms +[2025-08-22 21:15:06] [Rank 0] step:5621/10000 train_time:522164ms step_avg:92.90ms +[2025-08-22 21:15:06] [Rank 0] step:5621/10000 train_time:522164ms step_avg:92.90ms +[2025-08-22 21:15:08] [Rank 0] step:5641/10000 train_time:524097ms step_avg:92.91ms +[2025-08-22 21:15:08] [Rank 0] step:5641/10000 train_time:524097ms step_avg:92.91ms +[2025-08-22 21:15:10] [Rank 0] step:5661/10000 train_time:526029ms step_avg:92.92ms +[2025-08-22 21:15:10] [Rank 0] step:5661/10000 train_time:526029ms step_avg:92.92ms +[2025-08-22 21:15:12] [Rank 0] step:5681/10000 train_time:527968ms step_avg:92.94ms +[2025-08-22 21:15:12] [Rank 0] step:5681/10000 train_time:527968ms step_avg:92.94ms +[2025-08-22 21:15:14] [Rank 0] step:5701/10000 train_time:529905ms step_avg:92.95ms +[2025-08-22 21:15:14] [Rank 0] step:5701/10000 train_time:529905ms step_avg:92.95ms +[2025-08-22 21:15:16] [Rank 0] step:5721/10000 train_time:531847ms step_avg:92.96ms +[2025-08-22 21:15:16] [Rank 0] step:5721/10000 train_time:531847ms step_avg:92.96ms +[2025-08-22 21:15:18] [Rank 0] step:5741/10000 train_time:533786ms step_avg:92.98ms +[2025-08-22 21:15:18] [Rank 0] step:5741/10000 train_time:533786ms step_avg:92.98ms +[2025-08-22 21:15:20] [Rank 0] step:5761/10000 train_time:535770ms step_avg:93.00ms +[2025-08-22 21:15:20] [Rank 0] step:5761/10000 train_time:535770ms step_avg:93.00ms +[2025-08-22 21:15:22] [Rank 0] step:5781/10000 train_time:537804ms step_avg:93.03ms +[2025-08-22 21:15:22] [Rank 0] step:5781/10000 train_time:537804ms step_avg:93.03ms +[2025-08-22 21:15:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:15:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:15:38] [Rank 0] PRINT: step:5800/10000 val_loss:4.1713 svd_entropy: attn_qk:H=0.4982,top10E=0.67,eRank=44.7,q75/q25=67.46 attn_vo:H=0.4728,top10E=0.69,eRank=56.6,q75/q25=94.60 mlp_w1:H=0.5919,top10E=0.53,eRank=84.9,q75/q25=39.56 mlp_w2:H=0.5696,top10E=0.55,eRank=84.9,q75/q25=52.81 vo_prod:H=0.3049,top10E=0.84,eRank=20.8,q75/q25=9439.49 train_time:539760ms step_avg:93.06ms +[2025-08-22 21:15:38] [Rank 0] PRINT: step:5800/10000 val_loss:4.1713 svd_entropy: attn_qk:H=0.4982,top10E=0.67,eRank=44.7,q75/q25=67.46 attn_vo:H=0.4728,top10E=0.69,eRank=56.6,q75/q25=94.60 mlp_w1:H=0.5919,top10E=0.53,eRank=84.9,q75/q25=39.56 mlp_w2:H=0.5696,top10E=0.55,eRank=84.9,q75/q25=52.81 vo_prod:H=0.3049,top10E=0.84,eRank=20.8,q75/q25=9439.49 train_time:539760ms step_avg:93.06ms +[2025-08-22 21:15:38] [Rank 0] step:5801/10000 train_time:539778ms step_avg:93.05ms +[2025-08-22 21:15:38] [Rank 0] step:5801/10000 train_time:539778ms step_avg:93.05ms +[2025-08-22 21:15:40] [Rank 0] step:5821/10000 train_time:541688ms step_avg:93.06ms +[2025-08-22 21:15:40] [Rank 0] step:5821/10000 train_time:541688ms step_avg:93.06ms +[2025-08-22 21:15:42] [Rank 0] step:5841/10000 train_time:543618ms step_avg:93.07ms +[2025-08-22 21:15:42] [Rank 0] step:5841/10000 train_time:543618ms step_avg:93.07ms +[2025-08-22 21:15:44] [Rank 0] step:5861/10000 train_time:545554ms step_avg:93.08ms +[2025-08-22 21:15:44] [Rank 0] step:5861/10000 train_time:545554ms step_avg:93.08ms +[2025-08-22 21:15:46] [Rank 0] step:5881/10000 train_time:547489ms step_avg:93.09ms +[2025-08-22 21:15:46] [Rank 0] step:5881/10000 train_time:547489ms step_avg:93.09ms +[2025-08-22 21:15:47] [Rank 0] step:5901/10000 train_time:549422ms step_avg:93.11ms +[2025-08-22 21:15:47] [Rank 0] step:5901/10000 train_time:549422ms step_avg:93.11ms +[2025-08-22 21:15:49] [Rank 0] step:5921/10000 train_time:551356ms step_avg:93.12ms +[2025-08-22 21:15:49] [Rank 0] step:5921/10000 train_time:551356ms step_avg:93.12ms +[2025-08-22 21:15:51] [Rank 0] step:5941/10000 train_time:553298ms step_avg:93.13ms +[2025-08-22 21:15:51] [Rank 0] step:5941/10000 train_time:553298ms step_avg:93.13ms +[2025-08-22 21:15:53] [Rank 0] step:5961/10000 train_time:555235ms step_avg:93.14ms +[2025-08-22 21:15:53] [Rank 0] step:5961/10000 train_time:555235ms step_avg:93.14ms +[2025-08-22 21:15:55] [Rank 0] step:5981/10000 train_time:557171ms step_avg:93.16ms +[2025-08-22 21:15:55] [Rank 0] step:5981/10000 train_time:557171ms step_avg:93.16ms +[2025-08-22 21:15:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:15:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:16:11] [Rank 0] PRINT: step:6000/10000 val_loss:4.1488 svd_entropy: attn_qk:H=0.4984,top10E=0.67,eRank=45.0,q75/q25=67.81 attn_vo:H=0.4771,top10E=0.69,eRank=57.7,q75/q25=93.53 mlp_w1:H=0.5918,top10E=0.53,eRank=85.5,q75/q25=41.69 mlp_w2:H=0.5705,top10E=0.55,eRank=85.6,q75/q25=55.31 vo_prod:H=0.3102,top10E=0.84,eRank=21.3,q75/q25=9061.00 train_time:559121ms step_avg:93.19ms +[2025-08-22 21:16:11] [Rank 0] PRINT: step:6000/10000 val_loss:4.1488 svd_entropy: attn_qk:H=0.4984,top10E=0.67,eRank=45.0,q75/q25=67.81 attn_vo:H=0.4771,top10E=0.69,eRank=57.7,q75/q25=93.53 mlp_w1:H=0.5918,top10E=0.53,eRank=85.5,q75/q25=41.69 mlp_w2:H=0.5705,top10E=0.55,eRank=85.6,q75/q25=55.31 vo_prod:H=0.3102,top10E=0.84,eRank=21.3,q75/q25=9061.00 train_time:559121ms step_avg:93.19ms +[2025-08-22 21:16:11] [Rank 0] step:6001/10000 train_time:559139ms step_avg:93.17ms +[2025-08-22 21:16:11] [Rank 0] step:6001/10000 train_time:559139ms step_avg:93.17ms +[2025-08-22 21:16:13] [Rank 0] step:6021/10000 train_time:561072ms step_avg:93.19ms +[2025-08-22 21:16:13] [Rank 0] step:6021/10000 train_time:561072ms step_avg:93.19ms +[2025-08-22 21:16:15] [Rank 0] step:6041/10000 train_time:563010ms step_avg:93.20ms +[2025-08-22 21:16:15] [Rank 0] step:6041/10000 train_time:563010ms step_avg:93.20ms +[2025-08-22 21:16:17] [Rank 0] step:6061/10000 train_time:564947ms step_avg:93.21ms +[2025-08-22 21:16:17] [Rank 0] step:6061/10000 train_time:564947ms step_avg:93.21ms +[2025-08-22 21:16:19] [Rank 0] step:6081/10000 train_time:566880ms step_avg:93.22ms +[2025-08-22 21:16:19] [Rank 0] step:6081/10000 train_time:566880ms step_avg:93.22ms +[2025-08-22 21:16:20] [Rank 0] step:6101/10000 train_time:568824ms step_avg:93.23ms +[2025-08-22 21:16:20] [Rank 0] step:6101/10000 train_time:568824ms step_avg:93.23ms +[2025-08-22 21:16:23] [Rank 0] step:6121/10000 train_time:571081ms step_avg:93.30ms +[2025-08-22 21:16:23] [Rank 0] step:6121/10000 train_time:571081ms step_avg:93.30ms +[2025-08-22 21:16:25] [Rank 0] step:6141/10000 train_time:573087ms step_avg:93.32ms +[2025-08-22 21:16:25] [Rank 0] step:6141/10000 train_time:573087ms step_avg:93.32ms +[2025-08-22 21:16:27] [Rank 0] step:6161/10000 train_time:575026ms step_avg:93.33ms +[2025-08-22 21:16:27] [Rank 0] step:6161/10000 train_time:575026ms step_avg:93.33ms +[2025-08-22 21:16:29] [Rank 0] step:6181/10000 train_time:576964ms step_avg:93.34ms +[2025-08-22 21:16:29] [Rank 0] step:6181/10000 train_time:576964ms step_avg:93.34ms +[2025-08-22 21:16:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:16:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:16:44] [Rank 0] PRINT: step:6200/10000 val_loss:4.1360 svd_entropy: attn_qk:H=0.4988,top10E=0.67,eRank=45.4,q75/q25=68.32 attn_vo:H=0.4813,top10E=0.69,eRank=58.7,q75/q25=90.14 mlp_w1:H=0.5917,top10E=0.53,eRank=86.1,q75/q25=44.13 mlp_w2:H=0.5708,top10E=0.54,eRank=86.1,q75/q25=57.36 vo_prod:H=0.3156,top10E=0.84,eRank=21.7,q75/q25=8222.56 train_time:578917ms step_avg:93.37ms +[2025-08-22 21:16:44] [Rank 0] PRINT: step:6200/10000 val_loss:4.1360 svd_entropy: attn_qk:H=0.4988,top10E=0.67,eRank=45.4,q75/q25=68.32 attn_vo:H=0.4813,top10E=0.69,eRank=58.7,q75/q25=90.14 mlp_w1:H=0.5917,top10E=0.53,eRank=86.1,q75/q25=44.13 mlp_w2:H=0.5708,top10E=0.54,eRank=86.1,q75/q25=57.36 vo_prod:H=0.3156,top10E=0.84,eRank=21.7,q75/q25=8222.56 train_time:578917ms step_avg:93.37ms +[2025-08-22 21:16:44] [Rank 0] step:6201/10000 train_time:578934ms step_avg:93.36ms +[2025-08-22 21:16:44] [Rank 0] step:6201/10000 train_time:578934ms step_avg:93.36ms +[2025-08-22 21:16:46] [Rank 0] step:6221/10000 train_time:580863ms step_avg:93.37ms +[2025-08-22 21:16:46] [Rank 0] step:6221/10000 train_time:580863ms step_avg:93.37ms +[2025-08-22 21:16:48] [Rank 0] step:6241/10000 train_time:582795ms step_avg:93.38ms +[2025-08-22 21:16:48] [Rank 0] step:6241/10000 train_time:582795ms step_avg:93.38ms +[2025-08-22 21:16:50] [Rank 0] step:6261/10000 train_time:584734ms step_avg:93.39ms +[2025-08-22 21:16:50] [Rank 0] step:6261/10000 train_time:584734ms step_avg:93.39ms +[2025-08-22 21:16:52] [Rank 0] step:6281/10000 train_time:586675ms step_avg:93.40ms +[2025-08-22 21:16:52] [Rank 0] step:6281/10000 train_time:586675ms step_avg:93.40ms +[2025-08-22 21:16:54] [Rank 0] step:6301/10000 train_time:588614ms step_avg:93.42ms +[2025-08-22 21:16:54] [Rank 0] step:6301/10000 train_time:588614ms step_avg:93.42ms +[2025-08-22 21:16:56] [Rank 0] step:6321/10000 train_time:590554ms step_avg:93.43ms +[2025-08-22 21:16:56] [Rank 0] step:6321/10000 train_time:590554ms step_avg:93.43ms +[2025-08-22 21:16:58] [Rank 0] step:6341/10000 train_time:592495ms step_avg:93.44ms +[2025-08-22 21:16:58] [Rank 0] step:6341/10000 train_time:592495ms step_avg:93.44ms +[2025-08-22 21:17:00] [Rank 0] step:6361/10000 train_time:594446ms step_avg:93.45ms +[2025-08-22 21:17:00] [Rank 0] step:6361/10000 train_time:594446ms step_avg:93.45ms +[2025-08-22 21:17:02] [Rank 0] step:6381/10000 train_time:596389ms step_avg:93.46ms +[2025-08-22 21:17:02] [Rank 0] step:6381/10000 train_time:596389ms step_avg:93.46ms +[2025-08-22 21:17:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:17:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:17:17] [Rank 0] PRINT: step:6400/10000 val_loss:4.1225 svd_entropy: attn_qk:H=0.4986,top10E=0.67,eRank=45.7,q75/q25=68.51 attn_vo:H=0.4863,top10E=0.68,eRank=59.8,q75/q25=88.69 mlp_w1:H=0.5920,top10E=0.52,eRank=86.7,q75/q25=46.18 mlp_w2:H=0.5717,top10E=0.54,eRank=86.7,q75/q25=58.71 vo_prod:H=0.3231,top10E=0.84,eRank=22.2,q75/q25=7442.62 train_time:598346ms step_avg:93.49ms +[2025-08-22 21:17:17] [Rank 0] PRINT: step:6400/10000 val_loss:4.1225 svd_entropy: attn_qk:H=0.4986,top10E=0.67,eRank=45.7,q75/q25=68.51 attn_vo:H=0.4863,top10E=0.68,eRank=59.8,q75/q25=88.69 mlp_w1:H=0.5920,top10E=0.52,eRank=86.7,q75/q25=46.18 mlp_w2:H=0.5717,top10E=0.54,eRank=86.7,q75/q25=58.71 vo_prod:H=0.3231,top10E=0.84,eRank=22.2,q75/q25=7442.62 train_time:598346ms step_avg:93.49ms +[2025-08-22 21:17:17] [Rank 0] step:6401/10000 train_time:598363ms step_avg:93.48ms +[2025-08-22 21:17:17] [Rank 0] step:6401/10000 train_time:598363ms step_avg:93.48ms +[2025-08-22 21:17:19] [Rank 0] step:6421/10000 train_time:600290ms step_avg:93.49ms +[2025-08-22 21:17:19] [Rank 0] step:6421/10000 train_time:600290ms step_avg:93.49ms +[2025-08-22 21:17:21] [Rank 0] step:6441/10000 train_time:602230ms step_avg:93.50ms +[2025-08-22 21:17:21] [Rank 0] step:6441/10000 train_time:602230ms step_avg:93.50ms +[2025-08-22 21:17:23] [Rank 0] step:6461/10000 train_time:604176ms step_avg:93.51ms +[2025-08-22 21:17:23] [Rank 0] step:6461/10000 train_time:604176ms step_avg:93.51ms +[2025-08-22 21:17:25] [Rank 0] step:6481/10000 train_time:606192ms step_avg:93.53ms +[2025-08-22 21:17:25] [Rank 0] step:6481/10000 train_time:606192ms step_avg:93.53ms +[2025-08-22 21:17:27] [Rank 0] step:6501/10000 train_time:608131ms step_avg:93.54ms +[2025-08-22 21:17:27] [Rank 0] step:6501/10000 train_time:608131ms step_avg:93.54ms +[2025-08-22 21:17:29] [Rank 0] step:6521/10000 train_time:610074ms step_avg:93.56ms +[2025-08-22 21:17:29] [Rank 0] step:6521/10000 train_time:610074ms step_avg:93.56ms +[2025-08-22 21:17:31] [Rank 0] step:6541/10000 train_time:612022ms step_avg:93.57ms +[2025-08-22 21:17:31] [Rank 0] step:6541/10000 train_time:612022ms step_avg:93.57ms +[2025-08-22 21:17:33] [Rank 0] step:6561/10000 train_time:613971ms step_avg:93.58ms +[2025-08-22 21:17:33] [Rank 0] step:6561/10000 train_time:613971ms step_avg:93.58ms +[2025-08-22 21:17:35] [Rank 0] step:6581/10000 train_time:615917ms step_avg:93.59ms +[2025-08-22 21:17:35] [Rank 0] step:6581/10000 train_time:615917ms step_avg:93.59ms +[2025-08-22 21:17:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:17:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:17:50] [Rank 0] PRINT: step:6600/10000 val_loss:4.1092 svd_entropy: attn_qk:H=0.4993,top10E=0.67,eRank=46.1,q75/q25=68.97 attn_vo:H=0.4896,top10E=0.68,eRank=60.8,q75/q25=87.47 mlp_w1:H=0.5918,top10E=0.52,eRank=87.2,q75/q25=48.33 mlp_w2:H=0.5723,top10E=0.54,eRank=87.1,q75/q25=60.35 vo_prod:H=0.3293,top10E=0.83,eRank=22.7,q75/q25=7134.13 train_time:617877ms step_avg:93.62ms +[2025-08-22 21:17:50] [Rank 0] PRINT: step:6600/10000 val_loss:4.1092 svd_entropy: attn_qk:H=0.4993,top10E=0.67,eRank=46.1,q75/q25=68.97 attn_vo:H=0.4896,top10E=0.68,eRank=60.8,q75/q25=87.47 mlp_w1:H=0.5918,top10E=0.52,eRank=87.2,q75/q25=48.33 mlp_w2:H=0.5723,top10E=0.54,eRank=87.1,q75/q25=60.35 vo_prod:H=0.3293,top10E=0.83,eRank=22.7,q75/q25=7134.13 train_time:617877ms step_avg:93.62ms +[2025-08-22 21:17:50] [Rank 0] step:6601/10000 train_time:617895ms step_avg:93.61ms +[2025-08-22 21:17:50] [Rank 0] step:6601/10000 train_time:617895ms step_avg:93.61ms +[2025-08-22 21:17:52] [Rank 0] step:6621/10000 train_time:619814ms step_avg:93.61ms +[2025-08-22 21:17:52] [Rank 0] step:6621/10000 train_time:619814ms step_avg:93.61ms +[2025-08-22 21:17:54] [Rank 0] step:6641/10000 train_time:621761ms step_avg:93.62ms +[2025-08-22 21:17:54] [Rank 0] step:6641/10000 train_time:621761ms step_avg:93.62ms +[2025-08-22 21:17:56] [Rank 0] step:6661/10000 train_time:623701ms step_avg:93.63ms +[2025-08-22 21:17:56] [Rank 0] step:6661/10000 train_time:623701ms step_avg:93.63ms +[2025-08-22 21:17:58] [Rank 0] step:6681/10000 train_time:625657ms step_avg:93.65ms +[2025-08-22 21:17:58] [Rank 0] step:6681/10000 train_time:625657ms step_avg:93.65ms +[2025-08-22 21:18:00] [Rank 0] step:6701/10000 train_time:627636ms step_avg:93.66ms +[2025-08-22 21:18:00] [Rank 0] step:6701/10000 train_time:627636ms step_avg:93.66ms +[2025-08-22 21:18:02] [Rank 0] step:6721/10000 train_time:629606ms step_avg:93.68ms +[2025-08-22 21:18:02] [Rank 0] step:6721/10000 train_time:629606ms step_avg:93.68ms +[2025-08-22 21:18:04] [Rank 0] step:6741/10000 train_time:631574ms step_avg:93.69ms +[2025-08-22 21:18:04] [Rank 0] step:6741/10000 train_time:631574ms step_avg:93.69ms +[2025-08-22 21:18:06] [Rank 0] step:6761/10000 train_time:633542ms step_avg:93.71ms +[2025-08-22 21:18:06] [Rank 0] step:6761/10000 train_time:633542ms step_avg:93.71ms +[2025-08-22 21:18:08] [Rank 0] step:6781/10000 train_time:635518ms step_avg:93.72ms +[2025-08-22 21:18:08] [Rank 0] step:6781/10000 train_time:635518ms step_avg:93.72ms +[2025-08-22 21:18:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:18:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:18:23] [Rank 0] PRINT: step:6800/10000 val_loss:4.0951 svd_entropy: attn_qk:H=0.5004,top10E=0.67,eRank=46.5,q75/q25=69.05 attn_vo:H=0.4920,top10E=0.67,eRank=61.6,q75/q25=86.12 mlp_w1:H=0.5922,top10E=0.52,eRank=87.7,q75/q25=50.13 mlp_w2:H=0.5731,top10E=0.53,eRank=87.6,q75/q25=61.55 vo_prod:H=0.3291,top10E=0.83,eRank=22.9,q75/q25=6938.92 train_time:637505ms step_avg:93.75ms +[2025-08-22 21:18:23] [Rank 0] PRINT: step:6800/10000 val_loss:4.0951 svd_entropy: attn_qk:H=0.5004,top10E=0.67,eRank=46.5,q75/q25=69.05 attn_vo:H=0.4920,top10E=0.67,eRank=61.6,q75/q25=86.12 mlp_w1:H=0.5922,top10E=0.52,eRank=87.7,q75/q25=50.13 mlp_w2:H=0.5731,top10E=0.53,eRank=87.6,q75/q25=61.55 vo_prod:H=0.3291,top10E=0.83,eRank=22.9,q75/q25=6938.92 train_time:637505ms step_avg:93.75ms +[2025-08-22 21:18:23] [Rank 0] step:6801/10000 train_time:637522ms step_avg:93.74ms +[2025-08-22 21:18:23] [Rank 0] step:6801/10000 train_time:637522ms step_avg:93.74ms +[2025-08-22 21:18:25] [Rank 0] step:6821/10000 train_time:639485ms step_avg:93.75ms +[2025-08-22 21:18:25] [Rank 0] step:6821/10000 train_time:639485ms step_avg:93.75ms +[2025-08-22 21:18:27] [Rank 0] step:6841/10000 train_time:641452ms step_avg:93.77ms +[2025-08-22 21:18:27] [Rank 0] step:6841/10000 train_time:641452ms step_avg:93.77ms +[2025-08-22 21:18:29] [Rank 0] step:6861/10000 train_time:643410ms step_avg:93.78ms +[2025-08-22 21:18:29] [Rank 0] step:6861/10000 train_time:643410ms step_avg:93.78ms +[2025-08-22 21:18:31] [Rank 0] step:6881/10000 train_time:645441ms step_avg:93.80ms +[2025-08-22 21:18:31] [Rank 0] step:6881/10000 train_time:645441ms step_avg:93.80ms +[2025-08-22 21:18:33] [Rank 0] step:6901/10000 train_time:647406ms step_avg:93.81ms +[2025-08-22 21:18:33] [Rank 0] step:6901/10000 train_time:647406ms step_avg:93.81ms +[2025-08-22 21:18:35] [Rank 0] step:6921/10000 train_time:649368ms step_avg:93.83ms +[2025-08-22 21:18:35] [Rank 0] step:6921/10000 train_time:649368ms step_avg:93.83ms +[2025-08-22 21:18:37] [Rank 0] step:6941/10000 train_time:651343ms step_avg:93.84ms +[2025-08-22 21:18:37] [Rank 0] step:6941/10000 train_time:651343ms step_avg:93.84ms +[2025-08-22 21:18:39] [Rank 0] step:6961/10000 train_time:653326ms step_avg:93.86ms +[2025-08-22 21:18:39] [Rank 0] step:6961/10000 train_time:653326ms step_avg:93.86ms +[2025-08-22 21:18:41] [Rank 0] step:6981/10000 train_time:655298ms step_avg:93.87ms +[2025-08-22 21:18:41] [Rank 0] step:6981/10000 train_time:655298ms step_avg:93.87ms +[2025-08-22 21:18:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:18:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:18:56] [Rank 0] PRINT: step:7000/10000 val_loss:4.0809 svd_entropy: attn_qk:H=0.5011,top10E=0.67,eRank=46.9,q75/q25=68.82 attn_vo:H=0.4949,top10E=0.67,eRank=62.4,q75/q25=84.72 mlp_w1:H=0.5920,top10E=0.52,eRank=88.1,q75/q25=52.27 mlp_w2:H=0.5741,top10E=0.53,eRank=88.1,q75/q25=62.83 vo_prod:H=0.3312,top10E=0.83,eRank=23.2,q75/q25=6481.51 train_time:657284ms step_avg:93.90ms +[2025-08-22 21:18:56] [Rank 0] PRINT: step:7000/10000 val_loss:4.0809 svd_entropy: attn_qk:H=0.5011,top10E=0.67,eRank=46.9,q75/q25=68.82 attn_vo:H=0.4949,top10E=0.67,eRank=62.4,q75/q25=84.72 mlp_w1:H=0.5920,top10E=0.52,eRank=88.1,q75/q25=52.27 mlp_w2:H=0.5741,top10E=0.53,eRank=88.1,q75/q25=62.83 vo_prod:H=0.3312,top10E=0.83,eRank=23.2,q75/q25=6481.51 train_time:657284ms step_avg:93.90ms +[2025-08-22 21:18:57] [Rank 0] step:7001/10000 train_time:657303ms step_avg:93.89ms +[2025-08-22 21:18:57] [Rank 0] step:7001/10000 train_time:657303ms step_avg:93.89ms +[2025-08-22 21:18:59] [Rank 0] step:7021/10000 train_time:659257ms step_avg:93.90ms +[2025-08-22 21:18:59] [Rank 0] step:7021/10000 train_time:659257ms step_avg:93.90ms +[2025-08-22 21:19:01] [Rank 0] step:7041/10000 train_time:661221ms step_avg:93.91ms +[2025-08-22 21:19:01] [Rank 0] step:7041/10000 train_time:661221ms step_avg:93.91ms +[2025-08-22 21:19:02] [Rank 0] step:7061/10000 train_time:663186ms step_avg:93.92ms +[2025-08-22 21:19:02] [Rank 0] step:7061/10000 train_time:663186ms step_avg:93.92ms +[2025-08-22 21:19:04] [Rank 0] step:7081/10000 train_time:665151ms step_avg:93.93ms +[2025-08-22 21:19:04] [Rank 0] step:7081/10000 train_time:665151ms step_avg:93.93ms +[2025-08-22 21:19:06] [Rank 0] step:7101/10000 train_time:667125ms step_avg:93.95ms +[2025-08-22 21:19:06] [Rank 0] step:7101/10000 train_time:667125ms step_avg:93.95ms +[2025-08-22 21:19:08] [Rank 0] step:7121/10000 train_time:669091ms step_avg:93.96ms +[2025-08-22 21:19:08] [Rank 0] step:7121/10000 train_time:669091ms step_avg:93.96ms +[2025-08-22 21:19:10] [Rank 0] step:7141/10000 train_time:671058ms step_avg:93.97ms +[2025-08-22 21:19:10] [Rank 0] step:7141/10000 train_time:671058ms step_avg:93.97ms +[2025-08-22 21:19:12] [Rank 0] step:7161/10000 train_time:673028ms step_avg:93.99ms +[2025-08-22 21:19:12] [Rank 0] step:7161/10000 train_time:673028ms step_avg:93.99ms +[2025-08-22 21:19:14] [Rank 0] step:7181/10000 train_time:675001ms step_avg:94.00ms +[2025-08-22 21:19:14] [Rank 0] step:7181/10000 train_time:675001ms step_avg:94.00ms +[2025-08-22 21:19:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:19:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:19:30] [Rank 0] PRINT: step:7200/10000 val_loss:4.0713 svd_entropy: attn_qk:H=0.5008,top10E=0.67,eRank=47.1,q75/q25=68.95 attn_vo:H=0.4979,top10E=0.67,eRank=63.2,q75/q25=83.50 mlp_w1:H=0.5920,top10E=0.52,eRank=88.4,q75/q25=53.31 mlp_w2:H=0.5751,top10E=0.53,eRank=88.5,q75/q25=64.24 vo_prod:H=0.3360,top10E=0.83,eRank=23.6,q75/q25=6279.74 train_time:676990ms step_avg:94.03ms +[2025-08-22 21:19:30] [Rank 0] PRINT: step:7200/10000 val_loss:4.0713 svd_entropy: attn_qk:H=0.5008,top10E=0.67,eRank=47.1,q75/q25=68.95 attn_vo:H=0.4979,top10E=0.67,eRank=63.2,q75/q25=83.50 mlp_w1:H=0.5920,top10E=0.52,eRank=88.4,q75/q25=53.31 mlp_w2:H=0.5751,top10E=0.53,eRank=88.5,q75/q25=64.24 vo_prod:H=0.3360,top10E=0.83,eRank=23.6,q75/q25=6279.74 train_time:676990ms step_avg:94.03ms +[2025-08-22 21:19:30] [Rank 0] step:7201/10000 train_time:677008ms step_avg:94.02ms +[2025-08-22 21:19:30] [Rank 0] step:7201/10000 train_time:677008ms step_avg:94.02ms +[2025-08-22 21:19:32] [Rank 0] step:7221/10000 train_time:679042ms step_avg:94.04ms +[2025-08-22 21:19:32] [Rank 0] step:7221/10000 train_time:679042ms step_avg:94.04ms +[2025-08-22 21:19:34] [Rank 0] step:7241/10000 train_time:681071ms step_avg:94.06ms +[2025-08-22 21:19:34] [Rank 0] step:7241/10000 train_time:681071ms step_avg:94.06ms +[2025-08-22 21:19:36] [Rank 0] step:7261/10000 train_time:683037ms step_avg:94.07ms +[2025-08-22 21:19:36] [Rank 0] step:7261/10000 train_time:683037ms step_avg:94.07ms +[2025-08-22 21:19:38] [Rank 0] step:7281/10000 train_time:685017ms step_avg:94.08ms +[2025-08-22 21:19:38] [Rank 0] step:7281/10000 train_time:685017ms step_avg:94.08ms +[2025-08-22 21:19:40] [Rank 0] step:7301/10000 train_time:686989ms step_avg:94.10ms +[2025-08-22 21:19:40] [Rank 0] step:7301/10000 train_time:686989ms step_avg:94.10ms +[2025-08-22 21:19:42] [Rank 0] step:7321/10000 train_time:688976ms step_avg:94.11ms +[2025-08-22 21:19:42] [Rank 0] step:7321/10000 train_time:688976ms step_avg:94.11ms +[2025-08-22 21:19:44] [Rank 0] step:7341/10000 train_time:690950ms step_avg:94.12ms +[2025-08-22 21:19:44] [Rank 0] step:7341/10000 train_time:690950ms step_avg:94.12ms +[2025-08-22 21:19:46] [Rank 0] step:7361/10000 train_time:692935ms step_avg:94.14ms +[2025-08-22 21:19:46] [Rank 0] step:7361/10000 train_time:692935ms step_avg:94.14ms +[2025-08-22 21:19:48] [Rank 0] step:7381/10000 train_time:694916ms step_avg:94.15ms +[2025-08-22 21:19:48] [Rank 0] step:7381/10000 train_time:694916ms step_avg:94.15ms +[2025-08-22 21:19:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:19:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:20:03] [Rank 0] PRINT: step:7400/10000 val_loss:4.0507 svd_entropy: attn_qk:H=0.5017,top10E=0.66,eRank=47.5,q75/q25=69.08 attn_vo:H=0.5006,top10E=0.66,eRank=63.9,q75/q25=82.66 mlp_w1:H=0.5923,top10E=0.52,eRank=88.8,q75/q25=54.72 mlp_w2:H=0.5762,top10E=0.53,eRank=88.9,q75/q25=65.33 vo_prod:H=0.3382,top10E=0.83,eRank=23.9,q75/q25=5942.24 train_time:696890ms step_avg:94.17ms +[2025-08-22 21:20:03] [Rank 0] PRINT: step:7400/10000 val_loss:4.0507 svd_entropy: attn_qk:H=0.5017,top10E=0.66,eRank=47.5,q75/q25=69.08 attn_vo:H=0.5006,top10E=0.66,eRank=63.9,q75/q25=82.66 mlp_w1:H=0.5923,top10E=0.52,eRank=88.8,q75/q25=54.72 mlp_w2:H=0.5762,top10E=0.53,eRank=88.9,q75/q25=65.33 vo_prod:H=0.3382,top10E=0.83,eRank=23.9,q75/q25=5942.24 train_time:696890ms step_avg:94.17ms +[2025-08-22 21:20:03] [Rank 0] step:7401/10000 train_time:696908ms step_avg:94.16ms +[2025-08-22 21:20:03] [Rank 0] step:7401/10000 train_time:696908ms step_avg:94.16ms +[2025-08-22 21:20:05] [Rank 0] step:7421/10000 train_time:698868ms step_avg:94.17ms +[2025-08-22 21:20:05] [Rank 0] step:7421/10000 train_time:698868ms step_avg:94.17ms +[2025-08-22 21:20:07] [Rank 0] step:7441/10000 train_time:700835ms step_avg:94.19ms +[2025-08-22 21:20:07] [Rank 0] step:7441/10000 train_time:700835ms step_avg:94.19ms +[2025-08-22 21:20:09] [Rank 0] step:7461/10000 train_time:702805ms step_avg:94.20ms +[2025-08-22 21:20:09] [Rank 0] step:7461/10000 train_time:702805ms step_avg:94.20ms +[2025-08-22 21:20:11] [Rank 0] step:7481/10000 train_time:704781ms step_avg:94.21ms +[2025-08-22 21:20:11] [Rank 0] step:7481/10000 train_time:704781ms step_avg:94.21ms +[2025-08-22 21:20:13] [Rank 0] step:7501/10000 train_time:706754ms step_avg:94.22ms +[2025-08-22 21:20:13] [Rank 0] step:7501/10000 train_time:706754ms step_avg:94.22ms +[2025-08-22 21:20:15] [Rank 0] step:7521/10000 train_time:708728ms step_avg:94.23ms +[2025-08-22 21:20:15] [Rank 0] step:7521/10000 train_time:708728ms step_avg:94.23ms +[2025-08-22 21:20:17] [Rank 0] step:7541/10000 train_time:710711ms step_avg:94.25ms +[2025-08-22 21:20:17] [Rank 0] step:7541/10000 train_time:710711ms step_avg:94.25ms +[2025-08-22 21:20:19] [Rank 0] step:7561/10000 train_time:712674ms step_avg:94.26ms +[2025-08-22 21:20:19] [Rank 0] step:7561/10000 train_time:712674ms step_avg:94.26ms +[2025-08-22 21:20:21] [Rank 0] step:7581/10000 train_time:714657ms step_avg:94.27ms +[2025-08-22 21:20:21] [Rank 0] step:7581/10000 train_time:714657ms step_avg:94.27ms +[2025-08-22 21:20:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:20:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:20:37] [Rank 0] PRINT: step:7600/10000 val_loss:4.0450 svd_entropy: attn_qk:H=0.5028,top10E=0.66,eRank=47.8,q75/q25=69.08 attn_vo:H=0.5033,top10E=0.66,eRank=64.5,q75/q25=81.14 mlp_w1:H=0.5924,top10E=0.51,eRank=89.1,q75/q25=55.94 mlp_w2:H=0.5774,top10E=0.52,eRank=89.4,q75/q25=66.44 vo_prod:H=0.3416,top10E=0.83,eRank=24.1,q75/q25=5633.76 train_time:716657ms step_avg:94.30ms +[2025-08-22 21:20:37] [Rank 0] PRINT: step:7600/10000 val_loss:4.0450 svd_entropy: attn_qk:H=0.5028,top10E=0.66,eRank=47.8,q75/q25=69.08 attn_vo:H=0.5033,top10E=0.66,eRank=64.5,q75/q25=81.14 mlp_w1:H=0.5924,top10E=0.51,eRank=89.1,q75/q25=55.94 mlp_w2:H=0.5774,top10E=0.52,eRank=89.4,q75/q25=66.44 vo_prod:H=0.3416,top10E=0.83,eRank=24.1,q75/q25=5633.76 train_time:716657ms step_avg:94.30ms +[2025-08-22 21:20:37] [Rank 0] step:7601/10000 train_time:716674ms step_avg:94.29ms +[2025-08-22 21:20:37] [Rank 0] step:7601/10000 train_time:716674ms step_avg:94.29ms +[2025-08-22 21:20:39] [Rank 0] step:7621/10000 train_time:718622ms step_avg:94.29ms +[2025-08-22 21:20:39] [Rank 0] step:7621/10000 train_time:718622ms step_avg:94.29ms +[2025-08-22 21:20:41] [Rank 0] step:7641/10000 train_time:720586ms step_avg:94.31ms +[2025-08-22 21:20:41] [Rank 0] step:7641/10000 train_time:720586ms step_avg:94.31ms +[2025-08-22 21:20:43] [Rank 0] step:7661/10000 train_time:722558ms step_avg:94.32ms +[2025-08-22 21:20:43] [Rank 0] step:7661/10000 train_time:722558ms step_avg:94.32ms +[2025-08-22 21:20:45] [Rank 0] step:7681/10000 train_time:724525ms step_avg:94.33ms +[2025-08-22 21:20:45] [Rank 0] step:7681/10000 train_time:724525ms step_avg:94.33ms +[2025-08-22 21:20:47] [Rank 0] step:7701/10000 train_time:726494ms step_avg:94.34ms +[2025-08-22 21:20:47] [Rank 0] step:7701/10000 train_time:726494ms step_avg:94.34ms +[2025-08-22 21:20:49] [Rank 0] step:7721/10000 train_time:728477ms step_avg:94.35ms +[2025-08-22 21:20:49] [Rank 0] step:7721/10000 train_time:728477ms step_avg:94.35ms +[2025-08-22 21:20:51] [Rank 0] step:7741/10000 train_time:730448ms step_avg:94.36ms +[2025-08-22 21:20:51] [Rank 0] step:7741/10000 train_time:730448ms step_avg:94.36ms +[2025-08-22 21:20:53] [Rank 0] step:7761/10000 train_time:732429ms step_avg:94.37ms +[2025-08-22 21:20:53] [Rank 0] step:7761/10000 train_time:732429ms step_avg:94.37ms +[2025-08-22 21:20:55] [Rank 0] step:7781/10000 train_time:734404ms step_avg:94.38ms +[2025-08-22 21:20:55] [Rank 0] step:7781/10000 train_time:734404ms step_avg:94.38ms +[2025-08-22 21:20:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:20:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:21:10] [Rank 0] PRINT: step:7800/10000 val_loss:4.0325 svd_entropy: attn_qk:H=0.5039,top10E=0.66,eRank=48.1,q75/q25=69.11 attn_vo:H=0.5055,top10E=0.66,eRank=65.2,q75/q25=79.93 mlp_w1:H=0.5925,top10E=0.51,eRank=89.5,q75/q25=57.22 mlp_w2:H=0.5783,top10E=0.52,eRank=89.8,q75/q25=67.07 vo_prod:H=0.3432,top10E=0.83,eRank=24.4,q75/q25=5561.67 train_time:736405ms step_avg:94.41ms +[2025-08-22 21:21:10] [Rank 0] PRINT: step:7800/10000 val_loss:4.0325 svd_entropy: attn_qk:H=0.5039,top10E=0.66,eRank=48.1,q75/q25=69.11 attn_vo:H=0.5055,top10E=0.66,eRank=65.2,q75/q25=79.93 mlp_w1:H=0.5925,top10E=0.51,eRank=89.5,q75/q25=57.22 mlp_w2:H=0.5783,top10E=0.52,eRank=89.8,q75/q25=67.07 vo_prod:H=0.3432,top10E=0.83,eRank=24.4,q75/q25=5561.67 train_time:736405ms step_avg:94.41ms +[2025-08-22 21:21:10] [Rank 0] step:7801/10000 train_time:736422ms step_avg:94.40ms +[2025-08-22 21:21:10] [Rank 0] step:7801/10000 train_time:736422ms step_avg:94.40ms +[2025-08-22 21:21:12] [Rank 0] step:7821/10000 train_time:738387ms step_avg:94.41ms +[2025-08-22 21:21:12] [Rank 0] step:7821/10000 train_time:738387ms step_avg:94.41ms +[2025-08-22 21:21:14] [Rank 0] step:7841/10000 train_time:740350ms step_avg:94.42ms +[2025-08-22 21:21:14] [Rank 0] step:7841/10000 train_time:740350ms step_avg:94.42ms +[2025-08-22 21:21:16] [Rank 0] step:7861/10000 train_time:742324ms step_avg:94.43ms +[2025-08-22 21:21:16] [Rank 0] step:7861/10000 train_time:742324ms step_avg:94.43ms +[2025-08-22 21:21:18] [Rank 0] step:7881/10000 train_time:744302ms step_avg:94.44ms +[2025-08-22 21:21:18] [Rank 0] step:7881/10000 train_time:744302ms step_avg:94.44ms +[2025-08-22 21:21:20] [Rank 0] step:7901/10000 train_time:746269ms step_avg:94.45ms +[2025-08-22 21:21:20] [Rank 0] step:7901/10000 train_time:746269ms step_avg:94.45ms +[2025-08-22 21:21:22] [Rank 0] step:7921/10000 train_time:748246ms step_avg:94.46ms +[2025-08-22 21:21:22] [Rank 0] step:7921/10000 train_time:748246ms step_avg:94.46ms +[2025-08-22 21:21:24] [Rank 0] step:7941/10000 train_time:750229ms step_avg:94.48ms +[2025-08-22 21:21:24] [Rank 0] step:7941/10000 train_time:750229ms step_avg:94.48ms +[2025-08-22 21:21:26] [Rank 0] step:7961/10000 train_time:752204ms step_avg:94.49ms +[2025-08-22 21:21:26] [Rank 0] step:7961/10000 train_time:752204ms step_avg:94.49ms +[2025-08-22 21:21:28] [Rank 0] step:7981/10000 train_time:754172ms step_avg:94.50ms +[2025-08-22 21:21:28] [Rank 0] step:7981/10000 train_time:754172ms step_avg:94.50ms +[2025-08-22 21:21:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:21:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:21:43] [Rank 0] PRINT: step:8000/10000 val_loss:4.0141 svd_entropy: attn_qk:H=0.5045,top10E=0.66,eRank=48.4,q75/q25=69.21 attn_vo:H=0.5075,top10E=0.66,eRank=65.8,q75/q25=79.40 mlp_w1:H=0.5927,top10E=0.51,eRank=89.7,q75/q25=58.84 mlp_w2:H=0.5793,top10E=0.52,eRank=90.2,q75/q25=67.54 vo_prod:H=0.3459,top10E=0.82,eRank=24.6,q75/q25=5398.62 train_time:756165ms step_avg:94.52ms +[2025-08-22 21:21:43] [Rank 0] PRINT: step:8000/10000 val_loss:4.0141 svd_entropy: attn_qk:H=0.5045,top10E=0.66,eRank=48.4,q75/q25=69.21 attn_vo:H=0.5075,top10E=0.66,eRank=65.8,q75/q25=79.40 mlp_w1:H=0.5927,top10E=0.51,eRank=89.7,q75/q25=58.84 mlp_w2:H=0.5793,top10E=0.52,eRank=90.2,q75/q25=67.54 vo_prod:H=0.3459,top10E=0.82,eRank=24.6,q75/q25=5398.62 train_time:756165ms step_avg:94.52ms +[2025-08-22 21:21:44] [Rank 0] step:8001/10000 train_time:756183ms step_avg:94.51ms +[2025-08-22 21:21:44] [Rank 0] step:8001/10000 train_time:756183ms step_avg:94.51ms +[2025-08-22 21:21:46] [Rank 0] step:8021/10000 train_time:758148ms step_avg:94.52ms +[2025-08-22 21:21:46] [Rank 0] step:8021/10000 train_time:758148ms step_avg:94.52ms +[2025-08-22 21:21:48] [Rank 0] step:8041/10000 train_time:760130ms step_avg:94.53ms +[2025-08-22 21:21:48] [Rank 0] step:8041/10000 train_time:760130ms step_avg:94.53ms +[2025-08-22 21:21:50] [Rank 0] step:8061/10000 train_time:762106ms step_avg:94.54ms +[2025-08-22 21:21:50] [Rank 0] step:8061/10000 train_time:762106ms step_avg:94.54ms +[2025-08-22 21:21:51] [Rank 0] step:8081/10000 train_time:764075ms step_avg:94.55ms +[2025-08-22 21:21:51] [Rank 0] step:8081/10000 train_time:764075ms step_avg:94.55ms +[2025-08-22 21:21:53] [Rank 0] step:8101/10000 train_time:766057ms step_avg:94.56ms +[2025-08-22 21:21:53] [Rank 0] step:8101/10000 train_time:766057ms step_avg:94.56ms +[2025-08-22 21:21:55] [Rank 0] step:8121/10000 train_time:768036ms step_avg:94.57ms +[2025-08-22 21:21:55] [Rank 0] step:8121/10000 train_time:768036ms step_avg:94.57ms +[2025-08-22 21:21:58] [Rank 0] step:8141/10000 train_time:770672ms step_avg:94.67ms +[2025-08-22 21:21:58] [Rank 0] step:8141/10000 train_time:770672ms step_avg:94.67ms +[2025-08-22 21:22:00] [Rank 0] step:8161/10000 train_time:772666ms step_avg:94.68ms +[2025-08-22 21:22:00] [Rank 0] step:8161/10000 train_time:772666ms step_avg:94.68ms +[2025-08-22 21:22:02] [Rank 0] step:8181/10000 train_time:774673ms step_avg:94.69ms +[2025-08-22 21:22:02] [Rank 0] step:8181/10000 train_time:774673ms step_avg:94.69ms +[2025-08-22 21:22:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:22:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:22:18] [Rank 0] PRINT: step:8200/10000 val_loss:4.0052 svd_entropy: attn_qk:H=0.5056,top10E=0.66,eRank=48.7,q75/q25=69.44 attn_vo:H=0.5091,top10E=0.65,eRank=66.3,q75/q25=78.08 mlp_w1:H=0.5931,top10E=0.51,eRank=90.0,q75/q25=59.28 mlp_w2:H=0.5804,top10E=0.51,eRank=90.6,q75/q25=68.65 vo_prod:H=0.3477,top10E=0.82,eRank=24.9,q75/q25=5321.31 train_time:776716ms step_avg:94.72ms +[2025-08-22 21:22:18] [Rank 0] PRINT: step:8200/10000 val_loss:4.0052 svd_entropy: attn_qk:H=0.5056,top10E=0.66,eRank=48.7,q75/q25=69.44 attn_vo:H=0.5091,top10E=0.65,eRank=66.3,q75/q25=78.08 mlp_w1:H=0.5931,top10E=0.51,eRank=90.0,q75/q25=59.28 mlp_w2:H=0.5804,top10E=0.51,eRank=90.6,q75/q25=68.65 vo_prod:H=0.3477,top10E=0.82,eRank=24.9,q75/q25=5321.31 train_time:776716ms step_avg:94.72ms +[2025-08-22 21:22:18] [Rank 0] step:8201/10000 train_time:776734ms step_avg:94.71ms +[2025-08-22 21:22:18] [Rank 0] step:8201/10000 train_time:776734ms step_avg:94.71ms +[2025-08-22 21:22:20] [Rank 0] step:8221/10000 train_time:778723ms step_avg:94.72ms +[2025-08-22 21:22:20] [Rank 0] step:8221/10000 train_time:778723ms step_avg:94.72ms +[2025-08-22 21:22:22] [Rank 0] step:8241/10000 train_time:780728ms step_avg:94.74ms +[2025-08-22 21:22:22] [Rank 0] step:8241/10000 train_time:780728ms step_avg:94.74ms +[2025-08-22 21:22:24] [Rank 0] step:8261/10000 train_time:782736ms step_avg:94.75ms +[2025-08-22 21:22:24] [Rank 0] step:8261/10000 train_time:782736ms step_avg:94.75ms +[2025-08-22 21:22:26] [Rank 0] step:8281/10000 train_time:784735ms step_avg:94.76ms +[2025-08-22 21:22:26] [Rank 0] step:8281/10000 train_time:784735ms step_avg:94.76ms +[2025-08-22 21:22:28] [Rank 0] step:8301/10000 train_time:786739ms step_avg:94.78ms +[2025-08-22 21:22:28] [Rank 0] step:8301/10000 train_time:786739ms step_avg:94.78ms +[2025-08-22 21:22:30] [Rank 0] step:8321/10000 train_time:788733ms step_avg:94.79ms +[2025-08-22 21:22:30] [Rank 0] step:8321/10000 train_time:788733ms step_avg:94.79ms +[2025-08-22 21:22:32] [Rank 0] step:8341/10000 train_time:790743ms step_avg:94.80ms +[2025-08-22 21:22:32] [Rank 0] step:8341/10000 train_time:790743ms step_avg:94.80ms +[2025-08-22 21:22:34] [Rank 0] step:8361/10000 train_time:792743ms step_avg:94.81ms +[2025-08-22 21:22:34] [Rank 0] step:8361/10000 train_time:792743ms step_avg:94.81ms +[2025-08-22 21:22:36] [Rank 0] step:8381/10000 train_time:794743ms step_avg:94.83ms +[2025-08-22 21:22:36] [Rank 0] step:8381/10000 train_time:794743ms step_avg:94.83ms +[2025-08-22 21:22:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:22:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:22:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.9935 svd_entropy: attn_qk:H=0.5061,top10E=0.66,eRank=48.9,q75/q25=69.36 attn_vo:H=0.5106,top10E=0.65,eRank=66.8,q75/q25=77.47 mlp_w1:H=0.5935,top10E=0.51,eRank=90.4,q75/q25=60.32 mlp_w2:H=0.5816,top10E=0.51,eRank=90.9,q75/q25=69.15 vo_prod:H=0.3485,top10E=0.82,eRank=25.0,q75/q25=5174.58 train_time:796756ms step_avg:94.85ms +[2025-08-22 21:22:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.9935 svd_entropy: attn_qk:H=0.5061,top10E=0.66,eRank=48.9,q75/q25=69.36 attn_vo:H=0.5106,top10E=0.65,eRank=66.8,q75/q25=77.47 mlp_w1:H=0.5935,top10E=0.51,eRank=90.4,q75/q25=60.32 mlp_w2:H=0.5816,top10E=0.51,eRank=90.9,q75/q25=69.15 vo_prod:H=0.3485,top10E=0.82,eRank=25.0,q75/q25=5174.58 train_time:796756ms step_avg:94.85ms +[2025-08-22 21:22:51] [Rank 0] step:8401/10000 train_time:796773ms step_avg:94.84ms +[2025-08-22 21:22:51] [Rank 0] step:8401/10000 train_time:796773ms step_avg:94.84ms +[2025-08-22 21:22:53] [Rank 0] step:8421/10000 train_time:798761ms step_avg:94.85ms +[2025-08-22 21:22:53] [Rank 0] step:8421/10000 train_time:798761ms step_avg:94.85ms +[2025-08-22 21:22:55] [Rank 0] step:8441/10000 train_time:800758ms step_avg:94.87ms +[2025-08-22 21:22:55] [Rank 0] step:8441/10000 train_time:800758ms step_avg:94.87ms +[2025-08-22 21:22:57] [Rank 0] step:8461/10000 train_time:802752ms step_avg:94.88ms +[2025-08-22 21:22:57] [Rank 0] step:8461/10000 train_time:802752ms step_avg:94.88ms +[2025-08-22 21:22:59] [Rank 0] step:8481/10000 train_time:804755ms step_avg:94.89ms +[2025-08-22 21:22:59] [Rank 0] step:8481/10000 train_time:804755ms step_avg:94.89ms +[2025-08-22 21:23:01] [Rank 0] step:8501/10000 train_time:806776ms step_avg:94.90ms +[2025-08-22 21:23:01] [Rank 0] step:8501/10000 train_time:806776ms step_avg:94.90ms +[2025-08-22 21:23:03] [Rank 0] step:8521/10000 train_time:808780ms step_avg:94.92ms +[2025-08-22 21:23:03] [Rank 0] step:8521/10000 train_time:808780ms step_avg:94.92ms +[2025-08-22 21:23:05] [Rank 0] step:8541/10000 train_time:810794ms step_avg:94.93ms +[2025-08-22 21:23:05] [Rank 0] step:8541/10000 train_time:810794ms step_avg:94.93ms +[2025-08-22 21:23:07] [Rank 0] step:8561/10000 train_time:812804ms step_avg:94.94ms +[2025-08-22 21:23:07] [Rank 0] step:8561/10000 train_time:812804ms step_avg:94.94ms +[2025-08-22 21:23:09] [Rank 0] step:8581/10000 train_time:814808ms step_avg:94.95ms +[2025-08-22 21:23:09] [Rank 0] step:8581/10000 train_time:814808ms step_avg:94.95ms +[2025-08-22 21:23:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:23:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:23:25] [Rank 0] PRINT: step:8600/10000 val_loss:3.9845 svd_entropy: attn_qk:H=0.5068,top10E=0.66,eRank=49.1,q75/q25=69.49 attn_vo:H=0.5123,top10E=0.65,eRank=67.3,q75/q25=76.52 mlp_w1:H=0.5939,top10E=0.51,eRank=90.6,q75/q25=60.75 mlp_w2:H=0.5826,top10E=0.51,eRank=91.3,q75/q25=69.54 vo_prod:H=0.3504,top10E=0.82,eRank=25.2,q75/q25=4995.65 train_time:816821ms step_avg:94.98ms +[2025-08-22 21:23:25] [Rank 0] PRINT: step:8600/10000 val_loss:3.9845 svd_entropy: attn_qk:H=0.5068,top10E=0.66,eRank=49.1,q75/q25=69.49 attn_vo:H=0.5123,top10E=0.65,eRank=67.3,q75/q25=76.52 mlp_w1:H=0.5939,top10E=0.51,eRank=90.6,q75/q25=60.75 mlp_w2:H=0.5826,top10E=0.51,eRank=91.3,q75/q25=69.54 vo_prod:H=0.3504,top10E=0.82,eRank=25.2,q75/q25=4995.65 train_time:816821ms step_avg:94.98ms +[2025-08-22 21:23:25] [Rank 0] step:8601/10000 train_time:816838ms step_avg:94.97ms +[2025-08-22 21:23:25] [Rank 0] step:8601/10000 train_time:816838ms step_avg:94.97ms +[2025-08-22 21:23:27] [Rank 0] step:8621/10000 train_time:818836ms step_avg:94.98ms +[2025-08-22 21:23:27] [Rank 0] step:8621/10000 train_time:818836ms step_avg:94.98ms +[2025-08-22 21:23:29] [Rank 0] step:8641/10000 train_time:820833ms step_avg:94.99ms +[2025-08-22 21:23:29] [Rank 0] step:8641/10000 train_time:820833ms step_avg:94.99ms +[2025-08-22 21:23:31] [Rank 0] step:8661/10000 train_time:822833ms step_avg:95.00ms +[2025-08-22 21:23:31] [Rank 0] step:8661/10000 train_time:822833ms step_avg:95.00ms +[2025-08-22 21:23:33] [Rank 0] step:8681/10000 train_time:824839ms step_avg:95.02ms +[2025-08-22 21:23:33] [Rank 0] step:8681/10000 train_time:824839ms step_avg:95.02ms +[2025-08-22 21:23:35] [Rank 0] step:8701/10000 train_time:826835ms step_avg:95.03ms +[2025-08-22 21:23:35] [Rank 0] step:8701/10000 train_time:826835ms step_avg:95.03ms +[2025-08-22 21:23:37] [Rank 0] step:8721/10000 train_time:828842ms step_avg:95.04ms +[2025-08-22 21:23:37] [Rank 0] step:8721/10000 train_time:828842ms step_avg:95.04ms +[2025-08-22 21:23:39] [Rank 0] step:8741/10000 train_time:830838ms step_avg:95.05ms +[2025-08-22 21:23:39] [Rank 0] step:8741/10000 train_time:830838ms step_avg:95.05ms +[2025-08-22 21:23:41] [Rank 0] step:8761/10000 train_time:832843ms step_avg:95.06ms +[2025-08-22 21:23:41] [Rank 0] step:8761/10000 train_time:832843ms step_avg:95.06ms +[2025-08-22 21:23:43] [Rank 0] step:8781/10000 train_time:834855ms step_avg:95.08ms +[2025-08-22 21:23:43] [Rank 0] step:8781/10000 train_time:834855ms step_avg:95.08ms +[2025-08-22 21:23:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:23:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:23:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.9739 svd_entropy: attn_qk:H=0.5074,top10E=0.66,eRank=49.4,q75/q25=69.75 attn_vo:H=0.5135,top10E=0.65,eRank=67.6,q75/q25=75.88 mlp_w1:H=0.5943,top10E=0.50,eRank=90.9,q75/q25=61.33 mlp_w2:H=0.5834,top10E=0.51,eRank=91.6,q75/q25=70.21 vo_prod:H=0.3509,top10E=0.82,eRank=25.4,q75/q25=4934.35 train_time:836929ms step_avg:95.11ms +[2025-08-22 21:23:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.9739 svd_entropy: attn_qk:H=0.5074,top10E=0.66,eRank=49.4,q75/q25=69.75 attn_vo:H=0.5135,top10E=0.65,eRank=67.6,q75/q25=75.88 mlp_w1:H=0.5943,top10E=0.50,eRank=90.9,q75/q25=61.33 mlp_w2:H=0.5834,top10E=0.51,eRank=91.6,q75/q25=70.21 vo_prod:H=0.3509,top10E=0.82,eRank=25.4,q75/q25=4934.35 train_time:836929ms step_avg:95.11ms +[2025-08-22 21:23:59] [Rank 0] step:8801/10000 train_time:836946ms step_avg:95.10ms +[2025-08-22 21:23:59] [Rank 0] step:8801/10000 train_time:836946ms step_avg:95.10ms +[2025-08-22 21:24:01] [Rank 0] step:8821/10000 train_time:838933ms step_avg:95.11ms +[2025-08-22 21:24:01] [Rank 0] step:8821/10000 train_time:838933ms step_avg:95.11ms +[2025-08-22 21:24:03] [Rank 0] step:8841/10000 train_time:840954ms step_avg:95.12ms +[2025-08-22 21:24:03] [Rank 0] step:8841/10000 train_time:840954ms step_avg:95.12ms +[2025-08-22 21:24:05] [Rank 0] step:8861/10000 train_time:842951ms step_avg:95.13ms +[2025-08-22 21:24:05] [Rank 0] step:8861/10000 train_time:842951ms step_avg:95.13ms +[2025-08-22 21:24:07] [Rank 0] step:8881/10000 train_time:844957ms step_avg:95.14ms +[2025-08-22 21:24:07] [Rank 0] step:8881/10000 train_time:844957ms step_avg:95.14ms +[2025-08-22 21:24:09] [Rank 0] step:8901/10000 train_time:846966ms step_avg:95.15ms +[2025-08-22 21:24:09] [Rank 0] step:8901/10000 train_time:846966ms step_avg:95.15ms +[2025-08-22 21:24:11] [Rank 0] step:8921/10000 train_time:848990ms step_avg:95.17ms +[2025-08-22 21:24:11] [Rank 0] step:8921/10000 train_time:848990ms step_avg:95.17ms +[2025-08-22 21:24:13] [Rank 0] step:8941/10000 train_time:850999ms step_avg:95.18ms +[2025-08-22 21:24:13] [Rank 0] step:8941/10000 train_time:850999ms step_avg:95.18ms +[2025-08-22 21:24:15] [Rank 0] step:8961/10000 train_time:853007ms step_avg:95.19ms +[2025-08-22 21:24:15] [Rank 0] step:8961/10000 train_time:853007ms step_avg:95.19ms +[2025-08-22 21:24:17] [Rank 0] step:8981/10000 train_time:855014ms step_avg:95.20ms +[2025-08-22 21:24:17] [Rank 0] step:8981/10000 train_time:855014ms step_avg:95.20ms +[2025-08-22 21:24:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:24:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:24:32] [Rank 0] PRINT: step:9000/10000 val_loss:3.9645 svd_entropy: attn_qk:H=0.5081,top10E=0.66,eRank=49.6,q75/q25=69.77 attn_vo:H=0.5144,top10E=0.65,eRank=67.9,q75/q25=75.51 mlp_w1:H=0.5946,top10E=0.50,eRank=91.1,q75/q25=61.83 mlp_w2:H=0.5842,top10E=0.50,eRank=91.9,q75/q25=70.57 vo_prod:H=0.3520,top10E=0.82,eRank=25.5,q75/q25=4749.20 train_time:857035ms step_avg:95.23ms +[2025-08-22 21:24:32] [Rank 0] PRINT: step:9000/10000 val_loss:3.9645 svd_entropy: attn_qk:H=0.5081,top10E=0.66,eRank=49.6,q75/q25=69.77 attn_vo:H=0.5144,top10E=0.65,eRank=67.9,q75/q25=75.51 mlp_w1:H=0.5946,top10E=0.50,eRank=91.1,q75/q25=61.83 mlp_w2:H=0.5842,top10E=0.50,eRank=91.9,q75/q25=70.57 vo_prod:H=0.3520,top10E=0.82,eRank=25.5,q75/q25=4749.20 train_time:857035ms step_avg:95.23ms +[2025-08-22 21:24:33] [Rank 0] step:9001/10000 train_time:857052ms step_avg:95.22ms +[2025-08-22 21:24:33] [Rank 0] step:9001/10000 train_time:857052ms step_avg:95.22ms +[2025-08-22 21:24:35] [Rank 0] step:9021/10000 train_time:859043ms step_avg:95.23ms +[2025-08-22 21:24:35] [Rank 0] step:9021/10000 train_time:859043ms step_avg:95.23ms +[2025-08-22 21:24:37] [Rank 0] step:9041/10000 train_time:861052ms step_avg:95.24ms +[2025-08-22 21:24:37] [Rank 0] step:9041/10000 train_time:861052ms step_avg:95.24ms +[2025-08-22 21:24:39] [Rank 0] step:9061/10000 train_time:863067ms step_avg:95.25ms +[2025-08-22 21:24:39] [Rank 0] step:9061/10000 train_time:863067ms step_avg:95.25ms +[2025-08-22 21:24:41] [Rank 0] step:9081/10000 train_time:865088ms step_avg:95.26ms +[2025-08-22 21:24:41] [Rank 0] step:9081/10000 train_time:865088ms step_avg:95.26ms +[2025-08-22 21:24:43] [Rank 0] step:9101/10000 train_time:867115ms step_avg:95.28ms +[2025-08-22 21:24:43] [Rank 0] step:9101/10000 train_time:867115ms step_avg:95.28ms +[2025-08-22 21:24:45] [Rank 0] step:9121/10000 train_time:869137ms step_avg:95.29ms +[2025-08-22 21:24:45] [Rank 0] step:9121/10000 train_time:869137ms step_avg:95.29ms +[2025-08-22 21:24:47] [Rank 0] step:9141/10000 train_time:871138ms step_avg:95.30ms +[2025-08-22 21:24:47] [Rank 0] step:9141/10000 train_time:871138ms step_avg:95.30ms +[2025-08-22 21:24:49] [Rank 0] step:9161/10000 train_time:873192ms step_avg:95.32ms +[2025-08-22 21:24:49] [Rank 0] step:9161/10000 train_time:873192ms step_avg:95.32ms +[2025-08-22 21:24:51] [Rank 0] step:9181/10000 train_time:875295ms step_avg:95.34ms +[2025-08-22 21:24:51] [Rank 0] step:9181/10000 train_time:875295ms step_avg:95.34ms +[2025-08-22 21:24:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:24:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:25:07] [Rank 0] PRINT: step:9200/10000 val_loss:3.9560 svd_entropy: attn_qk:H=0.5087,top10E=0.66,eRank=49.7,q75/q25=69.64 attn_vo:H=0.5157,top10E=0.65,eRank=68.2,q75/q25=75.31 mlp_w1:H=0.5951,top10E=0.50,eRank=91.3,q75/q25=62.15 mlp_w2:H=0.5849,top10E=0.50,eRank=92.1,q75/q25=71.17 vo_prod:H=0.3532,top10E=0.82,eRank=25.6,q75/q25=4757.72 train_time:877318ms step_avg:95.36ms +[2025-08-22 21:25:07] [Rank 0] PRINT: step:9200/10000 val_loss:3.9560 svd_entropy: attn_qk:H=0.5087,top10E=0.66,eRank=49.7,q75/q25=69.64 attn_vo:H=0.5157,top10E=0.65,eRank=68.2,q75/q25=75.31 mlp_w1:H=0.5951,top10E=0.50,eRank=91.3,q75/q25=62.15 mlp_w2:H=0.5849,top10E=0.50,eRank=92.1,q75/q25=71.17 vo_prod:H=0.3532,top10E=0.82,eRank=25.6,q75/q25=4757.72 train_time:877318ms step_avg:95.36ms +[2025-08-22 21:25:07] [Rank 0] step:9201/10000 train_time:877336ms step_avg:95.35ms +[2025-08-22 21:25:07] [Rank 0] step:9201/10000 train_time:877336ms step_avg:95.35ms +[2025-08-22 21:25:09] [Rank 0] step:9221/10000 train_time:879346ms step_avg:95.36ms +[2025-08-22 21:25:09] [Rank 0] step:9221/10000 train_time:879346ms step_avg:95.36ms +[2025-08-22 21:25:11] [Rank 0] step:9241/10000 train_time:881359ms step_avg:95.37ms +[2025-08-22 21:25:11] [Rank 0] step:9241/10000 train_time:881359ms step_avg:95.37ms +[2025-08-22 21:25:13] [Rank 0] step:9261/10000 train_time:883372ms step_avg:95.39ms +[2025-08-22 21:25:13] [Rank 0] step:9261/10000 train_time:883372ms step_avg:95.39ms +[2025-08-22 21:25:15] [Rank 0] step:9281/10000 train_time:885371ms step_avg:95.40ms +[2025-08-22 21:25:15] [Rank 0] step:9281/10000 train_time:885371ms step_avg:95.40ms +[2025-08-22 21:25:17] [Rank 0] step:9301/10000 train_time:887372ms step_avg:95.41ms +[2025-08-22 21:25:17] [Rank 0] step:9301/10000 train_time:887372ms step_avg:95.41ms +[2025-08-22 21:25:19] [Rank 0] step:9321/10000 train_time:889381ms step_avg:95.42ms +[2025-08-22 21:25:19] [Rank 0] step:9321/10000 train_time:889381ms step_avg:95.42ms +[2025-08-22 21:25:21] [Rank 0] step:9341/10000 train_time:891389ms step_avg:95.43ms +[2025-08-22 21:25:21] [Rank 0] step:9341/10000 train_time:891389ms step_avg:95.43ms +[2025-08-22 21:25:23] [Rank 0] step:9361/10000 train_time:893400ms step_avg:95.44ms +[2025-08-22 21:25:23] [Rank 0] step:9361/10000 train_time:893400ms step_avg:95.44ms +[2025-08-22 21:25:25] [Rank 0] step:9381/10000 train_time:895418ms step_avg:95.45ms +[2025-08-22 21:25:25] [Rank 0] step:9381/10000 train_time:895418ms step_avg:95.45ms +[2025-08-22 21:25:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:25:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:25:40] [Rank 0] PRINT: step:9400/10000 val_loss:3.9485 svd_entropy: attn_qk:H=0.5092,top10E=0.66,eRank=49.9,q75/q25=70.00 attn_vo:H=0.5163,top10E=0.64,eRank=68.4,q75/q25=75.39 mlp_w1:H=0.5954,top10E=0.50,eRank=91.4,q75/q25=62.61 mlp_w2:H=0.5856,top10E=0.50,eRank=92.4,q75/q25=71.28 vo_prod:H=0.3536,top10E=0.82,eRank=25.7,q75/q25=4743.51 train_time:897443ms step_avg:95.47ms +[2025-08-22 21:25:40] [Rank 0] PRINT: step:9400/10000 val_loss:3.9485 svd_entropy: attn_qk:H=0.5092,top10E=0.66,eRank=49.9,q75/q25=70.00 attn_vo:H=0.5163,top10E=0.64,eRank=68.4,q75/q25=75.39 mlp_w1:H=0.5954,top10E=0.50,eRank=91.4,q75/q25=62.61 mlp_w2:H=0.5856,top10E=0.50,eRank=92.4,q75/q25=71.28 vo_prod:H=0.3536,top10E=0.82,eRank=25.7,q75/q25=4743.51 train_time:897443ms step_avg:95.47ms +[2025-08-22 21:25:40] [Rank 0] step:9401/10000 train_time:897461ms step_avg:95.46ms +[2025-08-22 21:25:40] [Rank 0] step:9401/10000 train_time:897461ms step_avg:95.46ms +[2025-08-22 21:25:42] [Rank 0] step:9421/10000 train_time:899443ms step_avg:95.47ms +[2025-08-22 21:25:42] [Rank 0] step:9421/10000 train_time:899443ms step_avg:95.47ms +[2025-08-22 21:25:44] [Rank 0] step:9441/10000 train_time:901446ms step_avg:95.48ms +[2025-08-22 21:25:44] [Rank 0] step:9441/10000 train_time:901446ms step_avg:95.48ms +[2025-08-22 21:25:46] [Rank 0] step:9461/10000 train_time:903455ms step_avg:95.49ms +[2025-08-22 21:25:46] [Rank 0] step:9461/10000 train_time:903455ms step_avg:95.49ms +[2025-08-22 21:25:48] [Rank 0] step:9481/10000 train_time:905460ms step_avg:95.50ms +[2025-08-22 21:25:48] [Rank 0] step:9481/10000 train_time:905460ms step_avg:95.50ms +[2025-08-22 21:25:50] [Rank 0] step:9501/10000 train_time:907476ms step_avg:95.51ms +[2025-08-22 21:25:50] [Rank 0] step:9501/10000 train_time:907476ms step_avg:95.51ms +[2025-08-22 21:25:52] [Rank 0] step:9521/10000 train_time:909471ms step_avg:95.52ms +[2025-08-22 21:25:52] [Rank 0] step:9521/10000 train_time:909471ms step_avg:95.52ms +[2025-08-22 21:25:55] [Rank 0] step:9541/10000 train_time:911520ms step_avg:95.54ms +[2025-08-22 21:25:55] [Rank 0] step:9541/10000 train_time:911520ms step_avg:95.54ms +[2025-08-22 21:25:57] [Rank 0] step:9561/10000 train_time:913520ms step_avg:95.55ms +[2025-08-22 21:25:57] [Rank 0] step:9561/10000 train_time:913520ms step_avg:95.55ms +[2025-08-22 21:25:59] [Rank 0] step:9581/10000 train_time:915527ms step_avg:95.56ms +[2025-08-22 21:25:59] [Rank 0] step:9581/10000 train_time:915527ms step_avg:95.56ms +[2025-08-22 21:26:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:26:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:26:14] [Rank 0] PRINT: step:9600/10000 val_loss:3.9403 svd_entropy: attn_qk:H=0.5096,top10E=0.65,eRank=50.0,q75/q25=70.07 attn_vo:H=0.5169,top10E=0.64,eRank=68.6,q75/q25=75.24 mlp_w1:H=0.5957,top10E=0.50,eRank=91.6,q75/q25=62.67 mlp_w2:H=0.5861,top10E=0.50,eRank=92.5,q75/q25=71.16 vo_prod:H=0.3543,top10E=0.82,eRank=25.8,q75/q25=4675.69 train_time:917561ms step_avg:95.58ms +[2025-08-22 21:26:14] [Rank 0] PRINT: step:9600/10000 val_loss:3.9403 svd_entropy: attn_qk:H=0.5096,top10E=0.65,eRank=50.0,q75/q25=70.07 attn_vo:H=0.5169,top10E=0.64,eRank=68.6,q75/q25=75.24 mlp_w1:H=0.5957,top10E=0.50,eRank=91.6,q75/q25=62.67 mlp_w2:H=0.5861,top10E=0.50,eRank=92.5,q75/q25=71.16 vo_prod:H=0.3543,top10E=0.82,eRank=25.8,q75/q25=4675.69 train_time:917561ms step_avg:95.58ms +[2025-08-22 21:26:14] [Rank 0] step:9601/10000 train_time:917579ms step_avg:95.57ms +[2025-08-22 21:26:14] [Rank 0] step:9601/10000 train_time:917579ms step_avg:95.57ms +[2025-08-22 21:26:16] [Rank 0] step:9621/10000 train_time:919559ms step_avg:95.58ms +[2025-08-22 21:26:16] [Rank 0] step:9621/10000 train_time:919559ms step_avg:95.58ms +[2025-08-22 21:26:18] [Rank 0] step:9641/10000 train_time:921569ms step_avg:95.59ms +[2025-08-22 21:26:18] [Rank 0] step:9641/10000 train_time:921569ms step_avg:95.59ms +[2025-08-22 21:26:20] [Rank 0] step:9661/10000 train_time:923600ms step_avg:95.60ms +[2025-08-22 21:26:20] [Rank 0] step:9661/10000 train_time:923600ms step_avg:95.60ms +[2025-08-22 21:26:22] [Rank 0] step:9681/10000 train_time:925624ms step_avg:95.61ms +[2025-08-22 21:26:22] [Rank 0] step:9681/10000 train_time:925624ms step_avg:95.61ms +[2025-08-22 21:26:24] [Rank 0] step:9701/10000 train_time:927664ms step_avg:95.63ms +[2025-08-22 21:26:24] [Rank 0] step:9701/10000 train_time:927664ms step_avg:95.63ms +[2025-08-22 21:26:26] [Rank 0] step:9721/10000 train_time:929691ms step_avg:95.64ms +[2025-08-22 21:26:26] [Rank 0] step:9721/10000 train_time:929691ms step_avg:95.64ms +[2025-08-22 21:26:28] [Rank 0] step:9741/10000 train_time:931736ms step_avg:95.65ms +[2025-08-22 21:26:28] [Rank 0] step:9741/10000 train_time:931736ms step_avg:95.65ms +[2025-08-22 21:26:30] [Rank 0] step:9761/10000 train_time:933775ms step_avg:95.66ms +[2025-08-22 21:26:30] [Rank 0] step:9761/10000 train_time:933775ms step_avg:95.66ms +[2025-08-22 21:26:32] [Rank 0] step:9781/10000 train_time:935818ms step_avg:95.68ms +[2025-08-22 21:26:32] [Rank 0] step:9781/10000 train_time:935818ms step_avg:95.68ms +[2025-08-22 21:26:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:26:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:26:48] [Rank 0] PRINT: step:9800/10000 val_loss:3.9338 svd_entropy: attn_qk:H=0.5098,top10E=0.65,eRank=50.1,q75/q25=70.13 attn_vo:H=0.5175,top10E=0.64,eRank=68.8,q75/q25=75.20 mlp_w1:H=0.5960,top10E=0.50,eRank=91.7,q75/q25=63.06 mlp_w2:H=0.5865,top10E=0.50,eRank=92.7,q75/q25=71.43 vo_prod:H=0.3546,top10E=0.82,eRank=25.8,q75/q25=4637.81 train_time:937883ms step_avg:95.70ms +[2025-08-22 21:26:48] [Rank 0] PRINT: step:9800/10000 val_loss:3.9338 svd_entropy: attn_qk:H=0.5098,top10E=0.65,eRank=50.1,q75/q25=70.13 attn_vo:H=0.5175,top10E=0.64,eRank=68.8,q75/q25=75.20 mlp_w1:H=0.5960,top10E=0.50,eRank=91.7,q75/q25=63.06 mlp_w2:H=0.5865,top10E=0.50,eRank=92.7,q75/q25=71.43 vo_prod:H=0.3546,top10E=0.82,eRank=25.8,q75/q25=4637.81 train_time:937883ms step_avg:95.70ms +[2025-08-22 21:26:48] [Rank 0] step:9801/10000 train_time:937902ms step_avg:95.69ms +[2025-08-22 21:26:48] [Rank 0] step:9801/10000 train_time:937902ms step_avg:95.69ms +[2025-08-22 21:26:50] [Rank 0] step:9821/10000 train_time:939929ms step_avg:95.71ms +[2025-08-22 21:26:50] [Rank 0] step:9821/10000 train_time:939929ms step_avg:95.71ms +[2025-08-22 21:26:52] [Rank 0] step:9841/10000 train_time:941969ms step_avg:95.72ms +[2025-08-22 21:26:52] [Rank 0] step:9841/10000 train_time:941969ms step_avg:95.72ms +[2025-08-22 21:26:54] [Rank 0] step:9861/10000 train_time:944067ms step_avg:95.74ms +[2025-08-22 21:26:54] [Rank 0] step:9861/10000 train_time:944067ms step_avg:95.74ms +[2025-08-22 21:26:56] [Rank 0] step:9881/10000 train_time:946130ms step_avg:95.75ms +[2025-08-22 21:26:56] [Rank 0] step:9881/10000 train_time:946130ms step_avg:95.75ms +[2025-08-22 21:26:58] [Rank 0] step:9901/10000 train_time:948181ms step_avg:95.77ms +[2025-08-22 21:26:58] [Rank 0] step:9901/10000 train_time:948181ms step_avg:95.77ms +[2025-08-22 21:27:01] [Rank 0] step:9921/10000 train_time:950208ms step_avg:95.78ms +[2025-08-22 21:27:01] [Rank 0] step:9921/10000 train_time:950208ms step_avg:95.78ms +[2025-08-22 21:27:03] [Rank 0] step:9941/10000 train_time:952253ms step_avg:95.79ms +[2025-08-22 21:27:03] [Rank 0] step:9941/10000 train_time:952253ms step_avg:95.79ms +[2025-08-22 21:27:05] [Rank 0] step:9961/10000 train_time:954277ms step_avg:95.80ms +[2025-08-22 21:27:05] [Rank 0] step:9961/10000 train_time:954277ms step_avg:95.80ms +[2025-08-22 21:27:07] [Rank 0] step:9981/10000 train_time:956319ms step_avg:95.81ms +[2025-08-22 21:27:07] [Rank 0] step:9981/10000 train_time:956319ms step_avg:95.81ms +[2025-08-22 21:27:09] [Rank 0] step:10000/10000 train_time:958264ms step_avg:95.83ms +[2025-08-22 21:27:09] [Rank 0] step:10000/10000 train_time:958264ms step_avg:95.83ms +[2025-08-22 21:27:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:27:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:27:22] [Rank 0] PRINT: step:10000/10000 val_loss:3.9274 svd_entropy: attn_qk:H=0.5101,top10E=0.65,eRank=50.2,q75/q25=70.05 attn_vo:H=0.5179,top10E=0.64,eRank=68.9,q75/q25=74.97 mlp_w1:H=0.5961,top10E=0.50,eRank=91.8,q75/q25=63.38 mlp_w2:H=0.5867,top10E=0.50,eRank=92.8,q75/q25=71.40 vo_prod:H=0.3548,top10E=0.82,eRank=25.9,q75/q25=4660.84 train_time:958389ms step_avg:95.84ms +[2025-08-22 21:27:22] [Rank 0] PRINT: step:10000/10000 val_loss:3.9274 svd_entropy: attn_qk:H=0.5101,top10E=0.65,eRank=50.2,q75/q25=70.05 attn_vo:H=0.5179,top10E=0.64,eRank=68.9,q75/q25=74.97 mlp_w1:H=0.5961,top10E=0.50,eRank=91.8,q75/q25=63.38 mlp_w2:H=0.5867,top10E=0.50,eRank=92.8,q75/q25=71.40 vo_prod:H=0.3548,top10E=0.82,eRank=25.9,q75/q25=4660.84 train_time:958389ms step_avg:95.84ms +[2025-08-22 21:27:22] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 21:27:22 2025 --- +[2025-08-22 21:27:22] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 21:27:22 2025 --- +[2025-08-22 21:27:22] [Rank 0] PRINT: Peak memory allocated: 11208 MiB reserved: 16896 MiB +[2025-08-22 21:27:22] [Rank 0] PRINT: Peak memory allocated: 11208 MiB reserved: 16896 MiB diff --git a/logs_svd_gated/mode_5_param_gated_seed_41/config.json b/logs_svd_gated/mode_5_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b3915217db6ce745c21aae59ee418e8700ff384f --- /dev/null +++ b/logs_svd_gated/mode_5_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 5, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "4bf8a650-8746-45b6-ac21-c0c30d423572", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_5_param_gated_seed_41/training_log_4bf8a650-8746-45b6-ac21-c0c30d423572.txt b/logs_svd_gated/mode_5_param_gated_seed_41/training_log_4bf8a650-8746-45b6-ac21-c0c30d423572.txt new file mode 100644 index 0000000000000000000000000000000000000000..7762853364ee759f8501ae6a8822fdc87398b9ae --- /dev/null +++ b/logs_svd_gated/mode_5_param_gated_seed_41/training_log_4bf8a650-8746-45b6-ac21-c0c30d423572.txt @@ -0,0 +1,2926 @@ +[2025-08-22 11:00:02] [Rank 0] PRINT: --- Script Start: Fri Aug 22 11:00:02 2025 --- +[2025-08-22 11:00:02] [Rank 0] PRINT: --- Script Start: Fri Aug 22 11:00:02 2025 --- +[2025-08-22 11:00:02] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=5, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 11:00:02] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=5, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 11:00:02] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 11:00:02] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 11:00:02] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 11:00:02] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 11:00:02] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_5_param_gated_seed_41 +[2025-08-22 11:00:02] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_5_param_gated_seed_41 +[2025-08-22 11:00:02] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 11:00:02] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 11:00:02] [Rank 0] PRINT: Constructing model... +[2025-08-22 11:00:02] [Rank 0] PRINT: Constructing model... +[2025-08-22 11:00:04] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 11:00:04] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 11:00:04] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 11:00:04] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 11:00:04] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 11:00:04] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 11:00:04] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-08-22 11:00:04] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-08-22 11:00:04] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.05). +[2025-08-22 11:00:04] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.05). +[2025-08-22 11:00:04] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-08-22 11:00:04] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-08-22 11:00:04] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-08-22 11:00:04] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-08-22 11:00:04] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 11:00:04] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 11:00:04] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 11:00:04] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 11:00:04] [Rank 0] PRINT: Starting warmup... +[2025-08-22 11:00:04] [Rank 0] PRINT: Starting warmup... +[2025-08-22 11:00:50] [Rank 0] PRINT: Warmup complete. +[2025-08-22 11:00:50] [Rank 0] PRINT: Warmup complete. +[2025-08-22 11:00:51] [Rank 0] PRINT: Starting training... +[2025-08-22 11:00:51] [Rank 0] PRINT: Starting training... +[2025-08-22 11:00:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:00:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:01:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 11:01:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 11:01:14] [Rank 0] step:21/10000 train_time:1408ms step_avg:67.05ms +[2025-08-22 11:01:14] [Rank 0] step:21/10000 train_time:1408ms step_avg:67.05ms +[2025-08-22 11:01:16] [Rank 0] step:41/10000 train_time:3030ms step_avg:73.90ms +[2025-08-22 11:01:16] [Rank 0] step:41/10000 train_time:3030ms step_avg:73.90ms +[2025-08-22 11:01:18] [Rank 0] step:61/10000 train_time:4651ms step_avg:76.24ms +[2025-08-22 11:01:18] [Rank 0] step:61/10000 train_time:4651ms step_avg:76.24ms +[2025-08-22 11:01:19] [Rank 0] step:81/10000 train_time:6278ms step_avg:77.51ms +[2025-08-22 11:01:19] [Rank 0] step:81/10000 train_time:6278ms step_avg:77.51ms +[2025-08-22 11:01:21] [Rank 0] step:101/10000 train_time:7907ms step_avg:78.29ms +[2025-08-22 11:01:21] [Rank 0] step:101/10000 train_time:7907ms step_avg:78.29ms +[2025-08-22 11:01:23] [Rank 0] step:121/10000 train_time:9540ms step_avg:78.84ms +[2025-08-22 11:01:23] [Rank 0] step:121/10000 train_time:9540ms step_avg:78.84ms +[2025-08-22 11:01:24] [Rank 0] step:141/10000 train_time:11171ms step_avg:79.23ms +[2025-08-22 11:01:24] [Rank 0] step:141/10000 train_time:11171ms step_avg:79.23ms +[2025-08-22 11:01:26] [Rank 0] step:161/10000 train_time:12803ms step_avg:79.52ms +[2025-08-22 11:01:26] [Rank 0] step:161/10000 train_time:12803ms step_avg:79.52ms +[2025-08-22 11:01:27] [Rank 0] step:181/10000 train_time:14435ms step_avg:79.75ms +[2025-08-22 11:01:27] [Rank 0] step:181/10000 train_time:14435ms step_avg:79.75ms +[2025-08-22 11:01:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:01:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:01:43] [Rank 0] PRINT: step:200/10000 val_loss:6.9471 svd_entropy: attn_qk:H=0.2718,top10E=0.89,eRank=9.6,q75/q25=17.68 attn_vo:H=0.1136,top10E=0.98,eRank=2.5,q75/q25=1463.42 mlp_w1:H=0.3249,top10E=0.91,eRank=8.9,q75/q25=5.46 mlp_w2:H=0.3510,top10E=0.87,eRank=10.7,q75/q25=6.73 vo_prod:H=0.0182,top10E=1.00,eRank=1.2,q75/q25=8751.04 train_time:16348ms step_avg:81.74ms +[2025-08-22 11:01:43] [Rank 0] PRINT: step:200/10000 val_loss:6.9471 svd_entropy: attn_qk:H=0.2718,top10E=0.89,eRank=9.6,q75/q25=17.68 attn_vo:H=0.1136,top10E=0.98,eRank=2.5,q75/q25=1463.42 mlp_w1:H=0.3249,top10E=0.91,eRank=8.9,q75/q25=5.46 mlp_w2:H=0.3510,top10E=0.87,eRank=10.7,q75/q25=6.73 vo_prod:H=0.0182,top10E=1.00,eRank=1.2,q75/q25=8751.04 train_time:16348ms step_avg:81.74ms +[2025-08-22 11:01:43] [Rank 0] step:201/10000 train_time:16358ms step_avg:81.39ms +[2025-08-22 11:01:43] [Rank 0] step:201/10000 train_time:16358ms step_avg:81.39ms +[2025-08-22 11:01:45] [Rank 0] step:221/10000 train_time:17714ms step_avg:80.15ms +[2025-08-22 11:01:45] [Rank 0] step:221/10000 train_time:17714ms step_avg:80.15ms +[2025-08-22 11:01:46] [Rank 0] step:241/10000 train_time:19341ms step_avg:80.25ms +[2025-08-22 11:01:46] [Rank 0] step:241/10000 train_time:19341ms step_avg:80.25ms +[2025-08-22 11:01:48] [Rank 0] step:261/10000 train_time:20969ms step_avg:80.34ms +[2025-08-22 11:01:48] [Rank 0] step:261/10000 train_time:20969ms step_avg:80.34ms +[2025-08-22 11:01:50] [Rank 0] step:281/10000 train_time:22598ms step_avg:80.42ms +[2025-08-22 11:01:50] [Rank 0] step:281/10000 train_time:22598ms step_avg:80.42ms +[2025-08-22 11:01:51] [Rank 0] step:301/10000 train_time:24228ms step_avg:80.49ms +[2025-08-22 11:01:51] [Rank 0] step:301/10000 train_time:24228ms step_avg:80.49ms +[2025-08-22 11:01:53] [Rank 0] step:321/10000 train_time:25859ms step_avg:80.56ms +[2025-08-22 11:01:53] [Rank 0] step:321/10000 train_time:25859ms step_avg:80.56ms +[2025-08-22 11:01:54] [Rank 0] step:341/10000 train_time:27489ms step_avg:80.61ms +[2025-08-22 11:01:54] [Rank 0] step:341/10000 train_time:27489ms step_avg:80.61ms +[2025-08-22 11:01:56] [Rank 0] step:361/10000 train_time:29119ms step_avg:80.66ms +[2025-08-22 11:01:56] [Rank 0] step:361/10000 train_time:29119ms step_avg:80.66ms +[2025-08-22 11:01:58] [Rank 0] step:381/10000 train_time:30750ms step_avg:80.71ms +[2025-08-22 11:01:58] [Rank 0] step:381/10000 train_time:30750ms step_avg:80.71ms +[2025-08-22 11:01:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:01:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:02:13] [Rank 0] PRINT: step:400/10000 val_loss:6.3141 svd_entropy: attn_qk:H=0.4548,top10E=0.76,eRank=25.1,q75/q25=58.95 attn_vo:H=0.2282,top10E=0.98,eRank=5.1,q75/q25=121.83 mlp_w1:H=0.4427,top10E=0.79,eRank=20.8,q75/q25=9.23 mlp_w2:H=0.4750,top10E=0.73,eRank=25.1,q75/q25=7.66 vo_prod:H=0.1086,top10E=1.00,eRank=2.3,q75/q25=918.69 train_time:32662ms step_avg:81.65ms +[2025-08-22 11:02:13] [Rank 0] PRINT: step:400/10000 val_loss:6.3141 svd_entropy: attn_qk:H=0.4548,top10E=0.76,eRank=25.1,q75/q25=58.95 attn_vo:H=0.2282,top10E=0.98,eRank=5.1,q75/q25=121.83 mlp_w1:H=0.4427,top10E=0.79,eRank=20.8,q75/q25=9.23 mlp_w2:H=0.4750,top10E=0.73,eRank=25.1,q75/q25=7.66 vo_prod:H=0.1086,top10E=1.00,eRank=2.3,q75/q25=918.69 train_time:32662ms step_avg:81.65ms +[2025-08-22 11:02:13] [Rank 0] step:401/10000 train_time:32672ms step_avg:81.48ms +[2025-08-22 11:02:13] [Rank 0] step:401/10000 train_time:32672ms step_avg:81.48ms +[2025-08-22 11:02:15] [Rank 0] step:421/10000 train_time:34043ms step_avg:80.86ms +[2025-08-22 11:02:15] [Rank 0] step:421/10000 train_time:34043ms step_avg:80.86ms +[2025-08-22 11:02:16] [Rank 0] step:441/10000 train_time:35669ms step_avg:80.88ms +[2025-08-22 11:02:16] [Rank 0] step:441/10000 train_time:35669ms step_avg:80.88ms +[2025-08-22 11:02:18] [Rank 0] step:461/10000 train_time:37295ms step_avg:80.90ms +[2025-08-22 11:02:18] [Rank 0] step:461/10000 train_time:37295ms step_avg:80.90ms +[2025-08-22 11:02:20] [Rank 0] step:481/10000 train_time:38927ms step_avg:80.93ms +[2025-08-22 11:02:20] [Rank 0] step:481/10000 train_time:38927ms step_avg:80.93ms +[2025-08-22 11:02:21] [Rank 0] step:501/10000 train_time:40554ms step_avg:80.95ms +[2025-08-22 11:02:21] [Rank 0] step:501/10000 train_time:40554ms step_avg:80.95ms +[2025-08-22 11:02:23] [Rank 0] step:521/10000 train_time:42182ms step_avg:80.96ms +[2025-08-22 11:02:23] [Rank 0] step:521/10000 train_time:42182ms step_avg:80.96ms +[2025-08-22 11:02:24] [Rank 0] step:541/10000 train_time:43810ms step_avg:80.98ms +[2025-08-22 11:02:24] [Rank 0] step:541/10000 train_time:43810ms step_avg:80.98ms +[2025-08-22 11:02:26] [Rank 0] step:561/10000 train_time:45442ms step_avg:81.00ms +[2025-08-22 11:02:26] [Rank 0] step:561/10000 train_time:45442ms step_avg:81.00ms +[2025-08-22 11:02:28] [Rank 0] step:581/10000 train_time:47072ms step_avg:81.02ms +[2025-08-22 11:02:28] [Rank 0] step:581/10000 train_time:47072ms step_avg:81.02ms +[2025-08-22 11:02:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:02:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:02:43] [Rank 0] PRINT: step:600/10000 val_loss:5.9179 svd_entropy: attn_qk:H=0.5032,top10E=0.68,eRank=32.9,q75/q25=75.82 attn_vo:H=0.2742,top10E=0.97,eRank=6.8,q75/q25=94.48 mlp_w1:H=0.5028,top10E=0.70,eRank=32.4,q75/q25=7.96 mlp_w2:H=0.5611,top10E=0.59,eRank=46.0,q75/q25=7.49 vo_prod:H=0.1560,top10E=1.00,eRank=3.2,q75/q25=790.22 train_time:48980ms step_avg:81.63ms +[2025-08-22 11:02:43] [Rank 0] PRINT: step:600/10000 val_loss:5.9179 svd_entropy: attn_qk:H=0.5032,top10E=0.68,eRank=32.9,q75/q25=75.82 attn_vo:H=0.2742,top10E=0.97,eRank=6.8,q75/q25=94.48 mlp_w1:H=0.5028,top10E=0.70,eRank=32.4,q75/q25=7.96 mlp_w2:H=0.5611,top10E=0.59,eRank=46.0,q75/q25=7.49 vo_prod:H=0.1560,top10E=1.00,eRank=3.2,q75/q25=790.22 train_time:48980ms step_avg:81.63ms +[2025-08-22 11:02:43] [Rank 0] step:601/10000 train_time:48990ms step_avg:81.51ms +[2025-08-22 11:02:43] [Rank 0] step:601/10000 train_time:48990ms step_avg:81.51ms +[2025-08-22 11:02:45] [Rank 0] step:621/10000 train_time:50355ms step_avg:81.09ms +[2025-08-22 11:02:45] [Rank 0] step:621/10000 train_time:50355ms step_avg:81.09ms +[2025-08-22 11:02:46] [Rank 0] step:641/10000 train_time:51978ms step_avg:81.09ms +[2025-08-22 11:02:46] [Rank 0] step:641/10000 train_time:51978ms step_avg:81.09ms +[2025-08-22 11:02:48] [Rank 0] step:661/10000 train_time:53606ms step_avg:81.10ms +[2025-08-22 11:02:48] [Rank 0] step:661/10000 train_time:53606ms step_avg:81.10ms +[2025-08-22 11:02:50] [Rank 0] step:681/10000 train_time:55233ms step_avg:81.11ms +[2025-08-22 11:02:50] [Rank 0] step:681/10000 train_time:55233ms step_avg:81.11ms +[2025-08-22 11:02:51] [Rank 0] step:701/10000 train_time:56859ms step_avg:81.11ms +[2025-08-22 11:02:51] [Rank 0] step:701/10000 train_time:56859ms step_avg:81.11ms +[2025-08-22 11:02:53] [Rank 0] step:721/10000 train_time:58487ms step_avg:81.12ms +[2025-08-22 11:02:53] [Rank 0] step:721/10000 train_time:58487ms step_avg:81.12ms +[2025-08-22 11:02:55] [Rank 0] step:741/10000 train_time:60112ms step_avg:81.12ms +[2025-08-22 11:02:55] [Rank 0] step:741/10000 train_time:60112ms step_avg:81.12ms +[2025-08-22 11:02:56] [Rank 0] step:761/10000 train_time:61750ms step_avg:81.14ms +[2025-08-22 11:02:56] [Rank 0] step:761/10000 train_time:61750ms step_avg:81.14ms +[2025-08-22 11:02:58] [Rank 0] step:781/10000 train_time:63390ms step_avg:81.17ms +[2025-08-22 11:02:58] [Rank 0] step:781/10000 train_time:63390ms step_avg:81.17ms +[2025-08-22 11:02:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:02:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:03:13] [Rank 0] PRINT: step:800/10000 val_loss:5.6334 svd_entropy: attn_qk:H=0.5262,top10E=0.63,eRank=38.1,q75/q25=81.19 attn_vo:H=0.3038,top10E=0.95,eRank=8.2,q75/q25=85.38 mlp_w1:H=0.5359,top10E=0.64,eRank=40.8,q75/q25=7.43 mlp_w2:H=0.6100,top10E=0.51,eRank=64.1,q75/q25=8.05 vo_prod:H=0.1952,top10E=1.00,eRank=4.1,q75/q25=767.32 train_time:65312ms step_avg:81.64ms +[2025-08-22 11:03:13] [Rank 0] PRINT: step:800/10000 val_loss:5.6334 svd_entropy: attn_qk:H=0.5262,top10E=0.63,eRank=38.1,q75/q25=81.19 attn_vo:H=0.3038,top10E=0.95,eRank=8.2,q75/q25=85.38 mlp_w1:H=0.5359,top10E=0.64,eRank=40.8,q75/q25=7.43 mlp_w2:H=0.6100,top10E=0.51,eRank=64.1,q75/q25=8.05 vo_prod:H=0.1952,top10E=1.00,eRank=4.1,q75/q25=767.32 train_time:65312ms step_avg:81.64ms +[2025-08-22 11:03:13] [Rank 0] step:801/10000 train_time:65323ms step_avg:81.55ms +[2025-08-22 11:03:13] [Rank 0] step:801/10000 train_time:65323ms step_avg:81.55ms +[2025-08-22 11:03:15] [Rank 0] step:821/10000 train_time:66693ms step_avg:81.23ms +[2025-08-22 11:03:15] [Rank 0] step:821/10000 train_time:66693ms step_avg:81.23ms +[2025-08-22 11:03:17] [Rank 0] step:841/10000 train_time:68327ms step_avg:81.25ms +[2025-08-22 11:03:17] [Rank 0] step:841/10000 train_time:68327ms step_avg:81.25ms +[2025-08-22 11:03:18] [Rank 0] step:861/10000 train_time:69963ms step_avg:81.26ms +[2025-08-22 11:03:18] [Rank 0] step:861/10000 train_time:69963ms step_avg:81.26ms +[2025-08-22 11:03:20] [Rank 0] step:881/10000 train_time:71599ms step_avg:81.27ms +[2025-08-22 11:03:20] [Rank 0] step:881/10000 train_time:71599ms step_avg:81.27ms +[2025-08-22 11:03:22] [Rank 0] step:901/10000 train_time:73237ms step_avg:81.28ms +[2025-08-22 11:03:22] [Rank 0] step:901/10000 train_time:73237ms step_avg:81.28ms +[2025-08-22 11:03:23] [Rank 0] step:921/10000 train_time:74877ms step_avg:81.30ms +[2025-08-22 11:03:23] [Rank 0] step:921/10000 train_time:74877ms step_avg:81.30ms +[2025-08-22 11:03:25] [Rank 0] step:941/10000 train_time:76516ms step_avg:81.31ms +[2025-08-22 11:03:25] [Rank 0] step:941/10000 train_time:76516ms step_avg:81.31ms +[2025-08-22 11:03:26] [Rank 0] step:961/10000 train_time:78154ms step_avg:81.33ms +[2025-08-22 11:03:26] [Rank 0] step:961/10000 train_time:78154ms step_avg:81.33ms +[2025-08-22 11:03:28] [Rank 0] step:981/10000 train_time:79793ms step_avg:81.34ms +[2025-08-22 11:03:28] [Rank 0] step:981/10000 train_time:79793ms step_avg:81.34ms +[2025-08-22 11:03:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:03:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:03:43] [Rank 0] PRINT: step:1000/10000 val_loss:5.4508 svd_entropy: attn_qk:H=0.5416,top10E=0.60,eRank=42.2,q75/q25=81.33 attn_vo:H=0.3274,top10E=0.93,eRank=9.7,q75/q25=80.83 mlp_w1:H=0.5583,top10E=0.60,eRank=47.9,q75/q25=7.24 mlp_w2:H=0.6454,top10E=0.46,eRank=81.2,q75/q25=8.83 vo_prod:H=0.2216,top10E=0.99,eRank=4.8,q75/q25=830.31 train_time:81716ms step_avg:81.72ms +[2025-08-22 11:03:43] [Rank 0] PRINT: step:1000/10000 val_loss:5.4508 svd_entropy: attn_qk:H=0.5416,top10E=0.60,eRank=42.2,q75/q25=81.33 attn_vo:H=0.3274,top10E=0.93,eRank=9.7,q75/q25=80.83 mlp_w1:H=0.5583,top10E=0.60,eRank=47.9,q75/q25=7.24 mlp_w2:H=0.6454,top10E=0.46,eRank=81.2,q75/q25=8.83 vo_prod:H=0.2216,top10E=0.99,eRank=4.8,q75/q25=830.31 train_time:81716ms step_avg:81.72ms +[2025-08-22 11:03:44] [Rank 0] step:1001/10000 train_time:81726ms step_avg:81.64ms +[2025-08-22 11:03:44] [Rank 0] step:1001/10000 train_time:81726ms step_avg:81.64ms +[2025-08-22 11:03:45] [Rank 0] step:1021/10000 train_time:83105ms step_avg:81.40ms +[2025-08-22 11:03:45] [Rank 0] step:1021/10000 train_time:83105ms step_avg:81.40ms +[2025-08-22 11:03:47] [Rank 0] step:1041/10000 train_time:84743ms step_avg:81.40ms +[2025-08-22 11:03:47] [Rank 0] step:1041/10000 train_time:84743ms step_avg:81.40ms +[2025-08-22 11:03:48] [Rank 0] step:1061/10000 train_time:86382ms step_avg:81.42ms +[2025-08-22 11:03:48] [Rank 0] step:1061/10000 train_time:86382ms step_avg:81.42ms +[2025-08-22 11:03:50] [Rank 0] step:1081/10000 train_time:88021ms step_avg:81.43ms +[2025-08-22 11:03:50] [Rank 0] step:1081/10000 train_time:88021ms step_avg:81.43ms +[2025-08-22 11:03:52] [Rank 0] step:1101/10000 train_time:89659ms step_avg:81.43ms +[2025-08-22 11:03:52] [Rank 0] step:1101/10000 train_time:89659ms step_avg:81.43ms +[2025-08-22 11:03:53] [Rank 0] step:1121/10000 train_time:91299ms step_avg:81.44ms +[2025-08-22 11:03:53] [Rank 0] step:1121/10000 train_time:91299ms step_avg:81.44ms +[2025-08-22 11:03:55] [Rank 0] step:1141/10000 train_time:92941ms step_avg:81.46ms +[2025-08-22 11:03:55] [Rank 0] step:1141/10000 train_time:92941ms step_avg:81.46ms +[2025-08-22 11:03:57] [Rank 0] step:1161/10000 train_time:94583ms step_avg:81.47ms +[2025-08-22 11:03:57] [Rank 0] step:1161/10000 train_time:94583ms step_avg:81.47ms +[2025-08-22 11:03:58] [Rank 0] step:1181/10000 train_time:96256ms step_avg:81.50ms +[2025-08-22 11:03:58] [Rank 0] step:1181/10000 train_time:96256ms step_avg:81.50ms +[2025-08-22 11:04:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:04:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:04:14] [Rank 0] PRINT: step:1200/10000 val_loss:5.2918 svd_entropy: attn_qk:H=0.5542,top10E=0.57,eRank=45.9,q75/q25=82.29 attn_vo:H=0.3474,top10E=0.91,eRank=11.2,q75/q25=78.32 mlp_w1:H=0.5759,top10E=0.57,eRank=54.3,q75/q25=7.33 mlp_w2:H=0.6725,top10E=0.42,eRank=97.3,q75/q25=9.85 vo_prod:H=0.2423,top10E=0.98,eRank=5.6,q75/q25=916.06 train_time:98208ms step_avg:81.84ms +[2025-08-22 11:04:14] [Rank 0] PRINT: step:1200/10000 val_loss:5.2918 svd_entropy: attn_qk:H=0.5542,top10E=0.57,eRank=45.9,q75/q25=82.29 attn_vo:H=0.3474,top10E=0.91,eRank=11.2,q75/q25=78.32 mlp_w1:H=0.5759,top10E=0.57,eRank=54.3,q75/q25=7.33 mlp_w2:H=0.6725,top10E=0.42,eRank=97.3,q75/q25=9.85 vo_prod:H=0.2423,top10E=0.98,eRank=5.6,q75/q25=916.06 train_time:98208ms step_avg:81.84ms +[2025-08-22 11:04:14] [Rank 0] step:1201/10000 train_time:98218ms step_avg:81.78ms +[2025-08-22 11:04:14] [Rank 0] step:1201/10000 train_time:98218ms step_avg:81.78ms +[2025-08-22 11:04:15] [Rank 0] step:1221/10000 train_time:99597ms step_avg:81.57ms +[2025-08-22 11:04:15] [Rank 0] step:1221/10000 train_time:99597ms step_avg:81.57ms +[2025-08-22 11:04:17] [Rank 0] step:1241/10000 train_time:101237ms step_avg:81.58ms +[2025-08-22 11:04:17] [Rank 0] step:1241/10000 train_time:101237ms step_avg:81.58ms +[2025-08-22 11:04:19] [Rank 0] step:1261/10000 train_time:102878ms step_avg:81.58ms +[2025-08-22 11:04:19] [Rank 0] step:1261/10000 train_time:102878ms step_avg:81.58ms +[2025-08-22 11:04:20] [Rank 0] step:1281/10000 train_time:104520ms step_avg:81.59ms +[2025-08-22 11:04:20] [Rank 0] step:1281/10000 train_time:104520ms step_avg:81.59ms +[2025-08-22 11:04:22] [Rank 0] step:1301/10000 train_time:106163ms step_avg:81.60ms +[2025-08-22 11:04:22] [Rank 0] step:1301/10000 train_time:106163ms step_avg:81.60ms +[2025-08-22 11:04:24] [Rank 0] step:1321/10000 train_time:107807ms step_avg:81.61ms +[2025-08-22 11:04:24] [Rank 0] step:1321/10000 train_time:107807ms step_avg:81.61ms +[2025-08-22 11:04:25] [Rank 0] step:1341/10000 train_time:109453ms step_avg:81.62ms +[2025-08-22 11:04:25] [Rank 0] step:1341/10000 train_time:109453ms step_avg:81.62ms +[2025-08-22 11:04:27] [Rank 0] step:1361/10000 train_time:111099ms step_avg:81.63ms +[2025-08-22 11:04:27] [Rank 0] step:1361/10000 train_time:111099ms step_avg:81.63ms +[2025-08-22 11:04:29] [Rank 0] step:1381/10000 train_time:112745ms step_avg:81.64ms +[2025-08-22 11:04:29] [Rank 0] step:1381/10000 train_time:112745ms step_avg:81.64ms +[2025-08-22 11:04:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:04:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:04:44] [Rank 0] PRINT: step:1400/10000 val_loss:5.2000 svd_entropy: attn_qk:H=0.5647,top10E=0.55,eRank=49.3,q75/q25=81.79 attn_vo:H=0.3646,top10E=0.88,eRank=12.8,q75/q25=77.39 mlp_w1:H=0.5905,top10E=0.55,eRank=60.1,q75/q25=7.53 mlp_w2:H=0.6950,top10E=0.38,eRank=112.8,q75/q25=10.94 vo_prod:H=0.2599,top10E=0.97,eRank=6.4,q75/q25=1012.34 train_time:114675ms step_avg:81.91ms +[2025-08-22 11:04:44] [Rank 0] PRINT: step:1400/10000 val_loss:5.2000 svd_entropy: attn_qk:H=0.5647,top10E=0.55,eRank=49.3,q75/q25=81.79 attn_vo:H=0.3646,top10E=0.88,eRank=12.8,q75/q25=77.39 mlp_w1:H=0.5905,top10E=0.55,eRank=60.1,q75/q25=7.53 mlp_w2:H=0.6950,top10E=0.38,eRank=112.8,q75/q25=10.94 vo_prod:H=0.2599,top10E=0.97,eRank=6.4,q75/q25=1012.34 train_time:114675ms step_avg:81.91ms +[2025-08-22 11:04:44] [Rank 0] step:1401/10000 train_time:114684ms step_avg:81.86ms +[2025-08-22 11:04:44] [Rank 0] step:1401/10000 train_time:114684ms step_avg:81.86ms +[2025-08-22 11:04:45] [Rank 0] step:1421/10000 train_time:116048ms step_avg:81.67ms +[2025-08-22 11:04:45] [Rank 0] step:1421/10000 train_time:116048ms step_avg:81.67ms +[2025-08-22 11:04:47] [Rank 0] step:1441/10000 train_time:117687ms step_avg:81.67ms +[2025-08-22 11:04:47] [Rank 0] step:1441/10000 train_time:117687ms step_avg:81.67ms +[2025-08-22 11:04:49] [Rank 0] step:1461/10000 train_time:119329ms step_avg:81.68ms +[2025-08-22 11:04:49] [Rank 0] step:1461/10000 train_time:119329ms step_avg:81.68ms +[2025-08-22 11:04:50] [Rank 0] step:1481/10000 train_time:120971ms step_avg:81.68ms +[2025-08-22 11:04:50] [Rank 0] step:1481/10000 train_time:120971ms step_avg:81.68ms +[2025-08-22 11:04:52] [Rank 0] step:1501/10000 train_time:122622ms step_avg:81.69ms +[2025-08-22 11:04:52] [Rank 0] step:1501/10000 train_time:122622ms step_avg:81.69ms +[2025-08-22 11:04:54] [Rank 0] step:1521/10000 train_time:124275ms step_avg:81.71ms +[2025-08-22 11:04:54] [Rank 0] step:1521/10000 train_time:124275ms step_avg:81.71ms +[2025-08-22 11:04:55] [Rank 0] step:1541/10000 train_time:125931ms step_avg:81.72ms +[2025-08-22 11:04:55] [Rank 0] step:1541/10000 train_time:125931ms step_avg:81.72ms +[2025-08-22 11:04:57] [Rank 0] step:1561/10000 train_time:127583ms step_avg:81.73ms +[2025-08-22 11:04:57] [Rank 0] step:1561/10000 train_time:127583ms step_avg:81.73ms +[2025-08-22 11:04:59] [Rank 0] step:1581/10000 train_time:129237ms step_avg:81.74ms +[2025-08-22 11:04:59] [Rank 0] step:1581/10000 train_time:129237ms step_avg:81.74ms +[2025-08-22 11:05:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:05:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:05:14] [Rank 0] PRINT: step:1600/10000 val_loss:5.0750 svd_entropy: attn_qk:H=0.5741,top10E=0.53,eRank=52.5,q75/q25=81.58 attn_vo:H=0.3801,top10E=0.86,eRank=14.5,q75/q25=77.74 mlp_w1:H=0.6029,top10E=0.52,eRank=65.6,q75/q25=7.80 mlp_w2:H=0.7129,top10E=0.36,eRank=126.9,q75/q25=11.94 vo_prod:H=0.2774,top10E=0.96,eRank=7.3,q75/q25=1151.58 train_time:131178ms step_avg:81.99ms +[2025-08-22 11:05:14] [Rank 0] PRINT: step:1600/10000 val_loss:5.0750 svd_entropy: attn_qk:H=0.5741,top10E=0.53,eRank=52.5,q75/q25=81.58 attn_vo:H=0.3801,top10E=0.86,eRank=14.5,q75/q25=77.74 mlp_w1:H=0.6029,top10E=0.52,eRank=65.6,q75/q25=7.80 mlp_w2:H=0.7129,top10E=0.36,eRank=126.9,q75/q25=11.94 vo_prod:H=0.2774,top10E=0.96,eRank=7.3,q75/q25=1151.58 train_time:131178ms step_avg:81.99ms +[2025-08-22 11:05:14] [Rank 0] step:1601/10000 train_time:131187ms step_avg:81.94ms +[2025-08-22 11:05:14] [Rank 0] step:1601/10000 train_time:131187ms step_avg:81.94ms +[2025-08-22 11:05:16] [Rank 0] step:1621/10000 train_time:132556ms step_avg:81.77ms +[2025-08-22 11:05:16] [Rank 0] step:1621/10000 train_time:132556ms step_avg:81.77ms +[2025-08-22 11:05:17] [Rank 0] step:1641/10000 train_time:134206ms step_avg:81.78ms +[2025-08-22 11:05:17] [Rank 0] step:1641/10000 train_time:134206ms step_avg:81.78ms +[2025-08-22 11:05:19] [Rank 0] step:1661/10000 train_time:135857ms step_avg:81.79ms +[2025-08-22 11:05:19] [Rank 0] step:1661/10000 train_time:135857ms step_avg:81.79ms +[2025-08-22 11:05:20] [Rank 0] step:1681/10000 train_time:137509ms step_avg:81.80ms +[2025-08-22 11:05:20] [Rank 0] step:1681/10000 train_time:137509ms step_avg:81.80ms +[2025-08-22 11:05:22] [Rank 0] step:1701/10000 train_time:139162ms step_avg:81.81ms +[2025-08-22 11:05:22] [Rank 0] step:1701/10000 train_time:139162ms step_avg:81.81ms +[2025-08-22 11:05:24] [Rank 0] step:1721/10000 train_time:140817ms step_avg:81.82ms +[2025-08-22 11:05:24] [Rank 0] step:1721/10000 train_time:140817ms step_avg:81.82ms +[2025-08-22 11:05:25] [Rank 0] step:1741/10000 train_time:142472ms step_avg:81.83ms +[2025-08-22 11:05:25] [Rank 0] step:1741/10000 train_time:142472ms step_avg:81.83ms +[2025-08-22 11:05:27] [Rank 0] step:1761/10000 train_time:144127ms step_avg:81.84ms +[2025-08-22 11:05:27] [Rank 0] step:1761/10000 train_time:144127ms step_avg:81.84ms +[2025-08-22 11:05:29] [Rank 0] step:1781/10000 train_time:145782ms step_avg:81.85ms +[2025-08-22 11:05:29] [Rank 0] step:1781/10000 train_time:145782ms step_avg:81.85ms +[2025-08-22 11:05:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:05:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:05:44] [Rank 0] PRINT: step:1800/10000 val_loss:4.9380 svd_entropy: attn_qk:H=0.5815,top10E=0.51,eRank=55.3,q75/q25=81.99 attn_vo:H=0.3940,top10E=0.84,eRank=16.2,q75/q25=78.14 mlp_w1:H=0.6131,top10E=0.51,eRank=70.6,q75/q25=8.13 mlp_w2:H=0.7272,top10E=0.34,eRank=139.5,q75/q25=12.77 vo_prod:H=0.2926,top10E=0.94,eRank=8.3,q75/q25=1321.43 train_time:147724ms step_avg:82.07ms +[2025-08-22 11:05:44] [Rank 0] PRINT: step:1800/10000 val_loss:4.9380 svd_entropy: attn_qk:H=0.5815,top10E=0.51,eRank=55.3,q75/q25=81.99 attn_vo:H=0.3940,top10E=0.84,eRank=16.2,q75/q25=78.14 mlp_w1:H=0.6131,top10E=0.51,eRank=70.6,q75/q25=8.13 mlp_w2:H=0.7272,top10E=0.34,eRank=139.5,q75/q25=12.77 vo_prod:H=0.2926,top10E=0.94,eRank=8.3,q75/q25=1321.43 train_time:147724ms step_avg:82.07ms +[2025-08-22 11:05:44] [Rank 0] step:1801/10000 train_time:147735ms step_avg:82.03ms +[2025-08-22 11:05:44] [Rank 0] step:1801/10000 train_time:147735ms step_avg:82.03ms +[2025-08-22 11:05:46] [Rank 0] step:1821/10000 train_time:149125ms step_avg:81.89ms +[2025-08-22 11:05:46] [Rank 0] step:1821/10000 train_time:149125ms step_avg:81.89ms +[2025-08-22 11:05:47] [Rank 0] step:1841/10000 train_time:150777ms step_avg:81.90ms +[2025-08-22 11:05:47] [Rank 0] step:1841/10000 train_time:150777ms step_avg:81.90ms +[2025-08-22 11:05:49] [Rank 0] step:1861/10000 train_time:152430ms step_avg:81.91ms +[2025-08-22 11:05:49] [Rank 0] step:1861/10000 train_time:152430ms step_avg:81.91ms +[2025-08-22 11:05:51] [Rank 0] step:1881/10000 train_time:154086ms step_avg:81.92ms +[2025-08-22 11:05:51] [Rank 0] step:1881/10000 train_time:154086ms step_avg:81.92ms +[2025-08-22 11:05:52] [Rank 0] step:1901/10000 train_time:155742ms step_avg:81.93ms +[2025-08-22 11:05:52] [Rank 0] step:1901/10000 train_time:155742ms step_avg:81.93ms +[2025-08-22 11:05:54] [Rank 0] step:1921/10000 train_time:157400ms step_avg:81.94ms +[2025-08-22 11:05:54] [Rank 0] step:1921/10000 train_time:157400ms step_avg:81.94ms +[2025-08-22 11:05:56] [Rank 0] step:1941/10000 train_time:159058ms step_avg:81.95ms +[2025-08-22 11:05:56] [Rank 0] step:1941/10000 train_time:159058ms step_avg:81.95ms +[2025-08-22 11:05:57] [Rank 0] step:1961/10000 train_time:160715ms step_avg:81.96ms +[2025-08-22 11:05:57] [Rank 0] step:1961/10000 train_time:160715ms step_avg:81.96ms +[2025-08-22 11:05:59] [Rank 0] step:1981/10000 train_time:162374ms step_avg:81.97ms +[2025-08-22 11:05:59] [Rank 0] step:1981/10000 train_time:162374ms step_avg:81.97ms +[2025-08-22 11:06:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:06:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:06:14] [Rank 0] PRINT: step:2000/10000 val_loss:4.8670 svd_entropy: attn_qk:H=0.5885,top10E=0.50,eRank=58.1,q75/q25=82.69 attn_vo:H=0.4065,top10E=0.82,eRank=17.9,q75/q25=78.83 mlp_w1:H=0.6219,top10E=0.49,eRank=75.1,q75/q25=8.52 mlp_w2:H=0.7395,top10E=0.32,eRank=151.3,q75/q25=13.53 vo_prod:H=0.3065,top10E=0.93,eRank=9.2,q75/q25=1524.68 train_time:164320ms step_avg:82.16ms +[2025-08-22 11:06:14] [Rank 0] PRINT: step:2000/10000 val_loss:4.8670 svd_entropy: attn_qk:H=0.5885,top10E=0.50,eRank=58.1,q75/q25=82.69 attn_vo:H=0.4065,top10E=0.82,eRank=17.9,q75/q25=78.83 mlp_w1:H=0.6219,top10E=0.49,eRank=75.1,q75/q25=8.52 mlp_w2:H=0.7395,top10E=0.32,eRank=151.3,q75/q25=13.53 vo_prod:H=0.3065,top10E=0.93,eRank=9.2,q75/q25=1524.68 train_time:164320ms step_avg:82.16ms +[2025-08-22 11:06:14] [Rank 0] step:2001/10000 train_time:164330ms step_avg:82.12ms +[2025-08-22 11:06:14] [Rank 0] step:2001/10000 train_time:164330ms step_avg:82.12ms +[2025-08-22 11:06:16] [Rank 0] step:2021/10000 train_time:165710ms step_avg:81.99ms +[2025-08-22 11:06:16] [Rank 0] step:2021/10000 train_time:165710ms step_avg:81.99ms +[2025-08-22 11:06:18] [Rank 0] step:2041/10000 train_time:167750ms step_avg:82.19ms +[2025-08-22 11:06:18] [Rank 0] step:2041/10000 train_time:167750ms step_avg:82.19ms +[2025-08-22 11:06:20] [Rank 0] step:2061/10000 train_time:169402ms step_avg:82.19ms +[2025-08-22 11:06:20] [Rank 0] step:2061/10000 train_time:169402ms step_avg:82.19ms +[2025-08-22 11:06:21] [Rank 0] step:2081/10000 train_time:171056ms step_avg:82.20ms +[2025-08-22 11:06:21] [Rank 0] step:2081/10000 train_time:171056ms step_avg:82.20ms +[2025-08-22 11:06:23] [Rank 0] step:2101/10000 train_time:172711ms step_avg:82.20ms +[2025-08-22 11:06:23] [Rank 0] step:2101/10000 train_time:172711ms step_avg:82.20ms +[2025-08-22 11:06:24] [Rank 0] step:2121/10000 train_time:174366ms step_avg:82.21ms +[2025-08-22 11:06:24] [Rank 0] step:2121/10000 train_time:174366ms step_avg:82.21ms +[2025-08-22 11:06:26] [Rank 0] step:2141/10000 train_time:176021ms step_avg:82.21ms +[2025-08-22 11:06:26] [Rank 0] step:2141/10000 train_time:176021ms step_avg:82.21ms +[2025-08-22 11:06:28] [Rank 0] step:2161/10000 train_time:177676ms step_avg:82.22ms +[2025-08-22 11:06:28] [Rank 0] step:2161/10000 train_time:177676ms step_avg:82.22ms +[2025-08-22 11:06:29] [Rank 0] step:2181/10000 train_time:179334ms step_avg:82.23ms +[2025-08-22 11:06:29] [Rank 0] step:2181/10000 train_time:179334ms step_avg:82.23ms +[2025-08-22 11:06:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:06:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:06:44] [Rank 0] PRINT: step:2200/10000 val_loss:4.7638 svd_entropy: attn_qk:H=0.5942,top10E=0.49,eRank=60.4,q75/q25=82.02 attn_vo:H=0.4165,top10E=0.80,eRank=19.5,q75/q25=80.40 mlp_w1:H=0.6299,top10E=0.48,eRank=79.5,q75/q25=8.87 mlp_w2:H=0.7494,top10E=0.31,eRank=161.4,q75/q25=14.16 vo_prod:H=0.3171,top10E=0.92,eRank=10.1,q75/q25=1758.25 train_time:181276ms step_avg:82.40ms +[2025-08-22 11:06:44] [Rank 0] PRINT: step:2200/10000 val_loss:4.7638 svd_entropy: attn_qk:H=0.5942,top10E=0.49,eRank=60.4,q75/q25=82.02 attn_vo:H=0.4165,top10E=0.80,eRank=19.5,q75/q25=80.40 mlp_w1:H=0.6299,top10E=0.48,eRank=79.5,q75/q25=8.87 mlp_w2:H=0.7494,top10E=0.31,eRank=161.4,q75/q25=14.16 vo_prod:H=0.3171,top10E=0.92,eRank=10.1,q75/q25=1758.25 train_time:181276ms step_avg:82.40ms +[2025-08-22 11:06:44] [Rank 0] step:2201/10000 train_time:181285ms step_avg:82.36ms +[2025-08-22 11:06:44] [Rank 0] step:2201/10000 train_time:181285ms step_avg:82.36ms +[2025-08-22 11:06:46] [Rank 0] step:2221/10000 train_time:182666ms step_avg:82.24ms +[2025-08-22 11:06:46] [Rank 0] step:2221/10000 train_time:182666ms step_avg:82.24ms +[2025-08-22 11:06:48] [Rank 0] step:2241/10000 train_time:184348ms step_avg:82.26ms +[2025-08-22 11:06:48] [Rank 0] step:2241/10000 train_time:184348ms step_avg:82.26ms +[2025-08-22 11:06:49] [Rank 0] step:2261/10000 train_time:186045ms step_avg:82.28ms +[2025-08-22 11:06:49] [Rank 0] step:2261/10000 train_time:186045ms step_avg:82.28ms +[2025-08-22 11:06:51] [Rank 0] step:2281/10000 train_time:187742ms step_avg:82.31ms +[2025-08-22 11:06:51] [Rank 0] step:2281/10000 train_time:187742ms step_avg:82.31ms +[2025-08-22 11:06:53] [Rank 0] step:2301/10000 train_time:189440ms step_avg:82.33ms +[2025-08-22 11:06:53] [Rank 0] step:2301/10000 train_time:189440ms step_avg:82.33ms +[2025-08-22 11:06:55] [Rank 0] step:2321/10000 train_time:191138ms step_avg:82.35ms +[2025-08-22 11:06:55] [Rank 0] step:2321/10000 train_time:191138ms step_avg:82.35ms +[2025-08-22 11:06:56] [Rank 0] step:2341/10000 train_time:192836ms step_avg:82.37ms +[2025-08-22 11:06:56] [Rank 0] step:2341/10000 train_time:192836ms step_avg:82.37ms +[2025-08-22 11:06:58] [Rank 0] step:2361/10000 train_time:194534ms step_avg:82.39ms +[2025-08-22 11:06:58] [Rank 0] step:2361/10000 train_time:194534ms step_avg:82.39ms +[2025-08-22 11:07:00] [Rank 0] step:2381/10000 train_time:196235ms step_avg:82.42ms +[2025-08-22 11:07:00] [Rank 0] step:2381/10000 train_time:196235ms step_avg:82.42ms +[2025-08-22 11:07:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:07:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:07:15] [Rank 0] PRINT: step:2400/10000 val_loss:4.6772 svd_entropy: attn_qk:H=0.5990,top10E=0.48,eRank=62.6,q75/q25=82.48 attn_vo:H=0.4254,top10E=0.79,eRank=21.0,q75/q25=82.47 mlp_w1:H=0.6368,top10E=0.46,eRank=83.5,q75/q25=9.26 mlp_w2:H=0.7581,top10E=0.30,eRank=171.0,q75/q25=14.56 vo_prod:H=0.3266,top10E=0.91,eRank=10.9,q75/q25=2043.88 train_time:198230ms step_avg:82.60ms +[2025-08-22 11:07:15] [Rank 0] PRINT: step:2400/10000 val_loss:4.6772 svd_entropy: attn_qk:H=0.5990,top10E=0.48,eRank=62.6,q75/q25=82.48 attn_vo:H=0.4254,top10E=0.79,eRank=21.0,q75/q25=82.47 mlp_w1:H=0.6368,top10E=0.46,eRank=83.5,q75/q25=9.26 mlp_w2:H=0.7581,top10E=0.30,eRank=171.0,q75/q25=14.56 vo_prod:H=0.3266,top10E=0.91,eRank=10.9,q75/q25=2043.88 train_time:198230ms step_avg:82.60ms +[2025-08-22 11:07:15] [Rank 0] step:2401/10000 train_time:198239ms step_avg:82.57ms +[2025-08-22 11:07:15] [Rank 0] step:2401/10000 train_time:198239ms step_avg:82.57ms +[2025-08-22 11:07:17] [Rank 0] step:2421/10000 train_time:199656ms step_avg:82.47ms +[2025-08-22 11:07:17] [Rank 0] step:2421/10000 train_time:199656ms step_avg:82.47ms +[2025-08-22 11:07:18] [Rank 0] step:2441/10000 train_time:201353ms step_avg:82.49ms +[2025-08-22 11:07:18] [Rank 0] step:2441/10000 train_time:201353ms step_avg:82.49ms +[2025-08-22 11:07:20] [Rank 0] step:2461/10000 train_time:203052ms step_avg:82.51ms +[2025-08-22 11:07:20] [Rank 0] step:2461/10000 train_time:203052ms step_avg:82.51ms +[2025-08-22 11:07:22] [Rank 0] step:2481/10000 train_time:204751ms step_avg:82.53ms +[2025-08-22 11:07:22] [Rank 0] step:2481/10000 train_time:204751ms step_avg:82.53ms +[2025-08-22 11:07:23] [Rank 0] step:2501/10000 train_time:206452ms step_avg:82.55ms +[2025-08-22 11:07:23] [Rank 0] step:2501/10000 train_time:206452ms step_avg:82.55ms +[2025-08-22 11:07:25] [Rank 0] step:2521/10000 train_time:208153ms step_avg:82.57ms +[2025-08-22 11:07:25] [Rank 0] step:2521/10000 train_time:208153ms step_avg:82.57ms +[2025-08-22 11:07:27] [Rank 0] step:2541/10000 train_time:209855ms step_avg:82.59ms +[2025-08-22 11:07:27] [Rank 0] step:2541/10000 train_time:209855ms step_avg:82.59ms +[2025-08-22 11:07:29] [Rank 0] step:2561/10000 train_time:211558ms step_avg:82.61ms +[2025-08-22 11:07:29] [Rank 0] step:2561/10000 train_time:211558ms step_avg:82.61ms +[2025-08-22 11:07:30] [Rank 0] step:2581/10000 train_time:213263ms step_avg:82.63ms +[2025-08-22 11:07:30] [Rank 0] step:2581/10000 train_time:213263ms step_avg:82.63ms +[2025-08-22 11:07:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:07:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:07:45] [Rank 0] PRINT: step:2600/10000 val_loss:4.6056 svd_entropy: attn_qk:H=0.6039,top10E=0.47,eRank=64.7,q75/q25=83.80 attn_vo:H=0.4333,top10E=0.77,eRank=22.5,q75/q25=85.16 mlp_w1:H=0.6430,top10E=0.45,eRank=87.3,q75/q25=9.66 mlp_w2:H=0.7656,top10E=0.29,eRank=179.6,q75/q25=14.87 vo_prod:H=0.3343,top10E=0.90,eRank=11.7,q75/q25=2457.69 train_time:215263ms step_avg:82.79ms +[2025-08-22 11:07:45] [Rank 0] PRINT: step:2600/10000 val_loss:4.6056 svd_entropy: attn_qk:H=0.6039,top10E=0.47,eRank=64.7,q75/q25=83.80 attn_vo:H=0.4333,top10E=0.77,eRank=22.5,q75/q25=85.16 mlp_w1:H=0.6430,top10E=0.45,eRank=87.3,q75/q25=9.66 mlp_w2:H=0.7656,top10E=0.29,eRank=179.6,q75/q25=14.87 vo_prod:H=0.3343,top10E=0.90,eRank=11.7,q75/q25=2457.69 train_time:215263ms step_avg:82.79ms +[2025-08-22 11:07:45] [Rank 0] step:2601/10000 train_time:215273ms step_avg:82.77ms +[2025-08-22 11:07:45] [Rank 0] step:2601/10000 train_time:215273ms step_avg:82.77ms +[2025-08-22 11:07:47] [Rank 0] step:2621/10000 train_time:216709ms step_avg:82.68ms +[2025-08-22 11:07:47] [Rank 0] step:2621/10000 train_time:216709ms step_avg:82.68ms +[2025-08-22 11:07:49] [Rank 0] step:2641/10000 train_time:218406ms step_avg:82.70ms +[2025-08-22 11:07:49] [Rank 0] step:2641/10000 train_time:218406ms step_avg:82.70ms +[2025-08-22 11:07:51] [Rank 0] step:2661/10000 train_time:220102ms step_avg:82.71ms +[2025-08-22 11:07:51] [Rank 0] step:2661/10000 train_time:220102ms step_avg:82.71ms +[2025-08-22 11:07:52] [Rank 0] step:2681/10000 train_time:221799ms step_avg:82.73ms +[2025-08-22 11:07:52] [Rank 0] step:2681/10000 train_time:221799ms step_avg:82.73ms +[2025-08-22 11:07:54] [Rank 0] step:2701/10000 train_time:223500ms step_avg:82.75ms +[2025-08-22 11:07:54] [Rank 0] step:2701/10000 train_time:223500ms step_avg:82.75ms +[2025-08-22 11:07:56] [Rank 0] step:2721/10000 train_time:225198ms step_avg:82.76ms +[2025-08-22 11:07:56] [Rank 0] step:2721/10000 train_time:225198ms step_avg:82.76ms +[2025-08-22 11:07:57] [Rank 0] step:2741/10000 train_time:226898ms step_avg:82.78ms +[2025-08-22 11:07:57] [Rank 0] step:2741/10000 train_time:226898ms step_avg:82.78ms +[2025-08-22 11:07:59] [Rank 0] step:2761/10000 train_time:228597ms step_avg:82.80ms +[2025-08-22 11:07:59] [Rank 0] step:2761/10000 train_time:228597ms step_avg:82.80ms +[2025-08-22 11:08:01] [Rank 0] step:2781/10000 train_time:230299ms step_avg:82.81ms +[2025-08-22 11:08:01] [Rank 0] step:2781/10000 train_time:230299ms step_avg:82.81ms +[2025-08-22 11:08:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:08:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:08:16] [Rank 0] PRINT: step:2800/10000 val_loss:4.5669 svd_entropy: attn_qk:H=0.6081,top10E=0.46,eRank=66.6,q75/q25=85.50 attn_vo:H=0.4408,top10E=0.76,eRank=24.0,q75/q25=87.84 mlp_w1:H=0.6488,top10E=0.44,eRank=91.0,q75/q25=10.02 mlp_w2:H=0.7725,top10E=0.28,eRank=187.7,q75/q25=15.15 vo_prod:H=0.3423,top10E=0.89,eRank=12.5,q75/q25=2815.44 train_time:232291ms step_avg:82.96ms +[2025-08-22 11:08:16] [Rank 0] PRINT: step:2800/10000 val_loss:4.5669 svd_entropy: attn_qk:H=0.6081,top10E=0.46,eRank=66.6,q75/q25=85.50 attn_vo:H=0.4408,top10E=0.76,eRank=24.0,q75/q25=87.84 mlp_w1:H=0.6488,top10E=0.44,eRank=91.0,q75/q25=10.02 mlp_w2:H=0.7725,top10E=0.28,eRank=187.7,q75/q25=15.15 vo_prod:H=0.3423,top10E=0.89,eRank=12.5,q75/q25=2815.44 train_time:232291ms step_avg:82.96ms +[2025-08-22 11:08:16] [Rank 0] step:2801/10000 train_time:232300ms step_avg:82.93ms +[2025-08-22 11:08:16] [Rank 0] step:2801/10000 train_time:232300ms step_avg:82.93ms +[2025-08-22 11:08:18] [Rank 0] step:2821/10000 train_time:233718ms step_avg:82.85ms +[2025-08-22 11:08:18] [Rank 0] step:2821/10000 train_time:233718ms step_avg:82.85ms +[2025-08-22 11:08:19] [Rank 0] step:2841/10000 train_time:235416ms step_avg:82.86ms +[2025-08-22 11:08:19] [Rank 0] step:2841/10000 train_time:235416ms step_avg:82.86ms +[2025-08-22 11:08:21] [Rank 0] step:2861/10000 train_time:237111ms step_avg:82.88ms +[2025-08-22 11:08:21] [Rank 0] step:2861/10000 train_time:237111ms step_avg:82.88ms +[2025-08-22 11:08:23] [Rank 0] step:2881/10000 train_time:238808ms step_avg:82.89ms +[2025-08-22 11:08:23] [Rank 0] step:2881/10000 train_time:238808ms step_avg:82.89ms +[2025-08-22 11:08:24] [Rank 0] step:2901/10000 train_time:240505ms step_avg:82.90ms +[2025-08-22 11:08:24] [Rank 0] step:2901/10000 train_time:240505ms step_avg:82.90ms +[2025-08-22 11:08:26] [Rank 0] step:2921/10000 train_time:242204ms step_avg:82.92ms +[2025-08-22 11:08:26] [Rank 0] step:2921/10000 train_time:242204ms step_avg:82.92ms +[2025-08-22 11:08:28] [Rank 0] step:2941/10000 train_time:243906ms step_avg:82.93ms +[2025-08-22 11:08:28] [Rank 0] step:2941/10000 train_time:243906ms step_avg:82.93ms +[2025-08-22 11:08:29] [Rank 0] step:2961/10000 train_time:245607ms step_avg:82.95ms +[2025-08-22 11:08:29] [Rank 0] step:2961/10000 train_time:245607ms step_avg:82.95ms +[2025-08-22 11:08:31] [Rank 0] step:2981/10000 train_time:247314ms step_avg:82.96ms +[2025-08-22 11:08:31] [Rank 0] step:2981/10000 train_time:247314ms step_avg:82.96ms +[2025-08-22 11:08:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:08:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:08:46] [Rank 0] PRINT: step:3000/10000 val_loss:4.5284 svd_entropy: attn_qk:H=0.6122,top10E=0.46,eRank=68.6,q75/q25=86.69 attn_vo:H=0.4477,top10E=0.75,eRank=25.4,q75/q25=90.61 mlp_w1:H=0.6539,top10E=0.43,eRank=94.4,q75/q25=10.38 mlp_w2:H=0.7785,top10E=0.27,eRank=195.1,q75/q25=15.35 vo_prod:H=0.3490,top10E=0.88,eRank=13.3,q75/q25=3133.71 train_time:249318ms step_avg:83.11ms +[2025-08-22 11:08:46] [Rank 0] PRINT: step:3000/10000 val_loss:4.5284 svd_entropy: attn_qk:H=0.6122,top10E=0.46,eRank=68.6,q75/q25=86.69 attn_vo:H=0.4477,top10E=0.75,eRank=25.4,q75/q25=90.61 mlp_w1:H=0.6539,top10E=0.43,eRank=94.4,q75/q25=10.38 mlp_w2:H=0.7785,top10E=0.27,eRank=195.1,q75/q25=15.35 vo_prod:H=0.3490,top10E=0.88,eRank=13.3,q75/q25=3133.71 train_time:249318ms step_avg:83.11ms +[2025-08-22 11:08:46] [Rank 0] step:3001/10000 train_time:249327ms step_avg:83.08ms +[2025-08-22 11:08:46] [Rank 0] step:3001/10000 train_time:249327ms step_avg:83.08ms +[2025-08-22 11:08:48] [Rank 0] step:3021/10000 train_time:250759ms step_avg:83.01ms +[2025-08-22 11:08:48] [Rank 0] step:3021/10000 train_time:250759ms step_avg:83.01ms +[2025-08-22 11:08:50] [Rank 0] step:3041/10000 train_time:252464ms step_avg:83.02ms +[2025-08-22 11:08:50] [Rank 0] step:3041/10000 train_time:252464ms step_avg:83.02ms +[2025-08-22 11:08:52] [Rank 0] step:3061/10000 train_time:254171ms step_avg:83.04ms +[2025-08-22 11:08:52] [Rank 0] step:3061/10000 train_time:254171ms step_avg:83.04ms +[2025-08-22 11:08:53] [Rank 0] step:3081/10000 train_time:255878ms step_avg:83.05ms +[2025-08-22 11:08:53] [Rank 0] step:3081/10000 train_time:255878ms step_avg:83.05ms +[2025-08-22 11:08:55] [Rank 0] step:3101/10000 train_time:257588ms step_avg:83.07ms +[2025-08-22 11:08:55] [Rank 0] step:3101/10000 train_time:257588ms step_avg:83.07ms +[2025-08-22 11:08:57] [Rank 0] step:3121/10000 train_time:259296ms step_avg:83.08ms +[2025-08-22 11:08:57] [Rank 0] step:3121/10000 train_time:259296ms step_avg:83.08ms +[2025-08-22 11:08:58] [Rank 0] step:3141/10000 train_time:261008ms step_avg:83.10ms +[2025-08-22 11:08:58] [Rank 0] step:3141/10000 train_time:261008ms step_avg:83.10ms +[2025-08-22 11:09:00] [Rank 0] step:3161/10000 train_time:262720ms step_avg:83.11ms +[2025-08-22 11:09:00] [Rank 0] step:3161/10000 train_time:262720ms step_avg:83.11ms +[2025-08-22 11:09:02] [Rank 0] step:3181/10000 train_time:264434ms step_avg:83.13ms +[2025-08-22 11:09:02] [Rank 0] step:3181/10000 train_time:264434ms step_avg:83.13ms +[2025-08-22 11:09:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:09:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:09:17] [Rank 0] PRINT: step:3200/10000 val_loss:4.4921 svd_entropy: attn_qk:H=0.6160,top10E=0.45,eRank=70.4,q75/q25=88.17 attn_vo:H=0.4538,top10E=0.74,eRank=26.7,q75/q25=93.67 mlp_w1:H=0.6585,top10E=0.43,eRank=97.6,q75/q25=10.72 mlp_w2:H=0.7839,top10E=0.26,eRank=201.9,q75/q25=15.45 vo_prod:H=0.3551,top10E=0.87,eRank=14.0,q75/q25=3521.49 train_time:266445ms step_avg:83.26ms +[2025-08-22 11:09:17] [Rank 0] PRINT: step:3200/10000 val_loss:4.4921 svd_entropy: attn_qk:H=0.6160,top10E=0.45,eRank=70.4,q75/q25=88.17 attn_vo:H=0.4538,top10E=0.74,eRank=26.7,q75/q25=93.67 mlp_w1:H=0.6585,top10E=0.43,eRank=97.6,q75/q25=10.72 mlp_w2:H=0.7839,top10E=0.26,eRank=201.9,q75/q25=15.45 vo_prod:H=0.3551,top10E=0.87,eRank=14.0,q75/q25=3521.49 train_time:266445ms step_avg:83.26ms +[2025-08-22 11:09:17] [Rank 0] step:3201/10000 train_time:266455ms step_avg:83.24ms +[2025-08-22 11:09:17] [Rank 0] step:3201/10000 train_time:266455ms step_avg:83.24ms +[2025-08-22 11:09:19] [Rank 0] step:3221/10000 train_time:267896ms step_avg:83.17ms +[2025-08-22 11:09:19] [Rank 0] step:3221/10000 train_time:267896ms step_avg:83.17ms +[2025-08-22 11:09:20] [Rank 0] step:3241/10000 train_time:269602ms step_avg:83.18ms +[2025-08-22 11:09:20] [Rank 0] step:3241/10000 train_time:269602ms step_avg:83.18ms +[2025-08-22 11:09:22] [Rank 0] step:3261/10000 train_time:271306ms step_avg:83.20ms +[2025-08-22 11:09:22] [Rank 0] step:3261/10000 train_time:271306ms step_avg:83.20ms +[2025-08-22 11:09:24] [Rank 0] step:3281/10000 train_time:273012ms step_avg:83.21ms +[2025-08-22 11:09:24] [Rank 0] step:3281/10000 train_time:273012ms step_avg:83.21ms +[2025-08-22 11:09:26] [Rank 0] step:3301/10000 train_time:274719ms step_avg:83.22ms +[2025-08-22 11:09:26] [Rank 0] step:3301/10000 train_time:274719ms step_avg:83.22ms +[2025-08-22 11:09:27] [Rank 0] step:3321/10000 train_time:276429ms step_avg:83.24ms +[2025-08-22 11:09:27] [Rank 0] step:3321/10000 train_time:276429ms step_avg:83.24ms +[2025-08-22 11:09:29] [Rank 0] step:3341/10000 train_time:278139ms step_avg:83.25ms +[2025-08-22 11:09:29] [Rank 0] step:3341/10000 train_time:278139ms step_avg:83.25ms +[2025-08-22 11:09:31] [Rank 0] step:3361/10000 train_time:279847ms step_avg:83.26ms +[2025-08-22 11:09:31] [Rank 0] step:3361/10000 train_time:279847ms step_avg:83.26ms +[2025-08-22 11:09:32] [Rank 0] step:3381/10000 train_time:281557ms step_avg:83.28ms +[2025-08-22 11:09:32] [Rank 0] step:3381/10000 train_time:281557ms step_avg:83.28ms +[2025-08-22 11:09:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:09:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:09:47] [Rank 0] PRINT: step:3400/10000 val_loss:4.4620 svd_entropy: attn_qk:H=0.6200,top10E=0.44,eRank=72.3,q75/q25=89.09 attn_vo:H=0.4599,top10E=0.73,eRank=28.1,q75/q25=96.52 mlp_w1:H=0.6628,top10E=0.42,eRank=100.6,q75/q25=11.02 mlp_w2:H=0.7890,top10E=0.25,eRank=208.5,q75/q25=15.48 vo_prod:H=0.3613,top10E=0.86,eRank=14.7,q75/q25=4042.50 train_time:283561ms step_avg:83.40ms +[2025-08-22 11:09:47] [Rank 0] PRINT: step:3400/10000 val_loss:4.4620 svd_entropy: attn_qk:H=0.6200,top10E=0.44,eRank=72.3,q75/q25=89.09 attn_vo:H=0.4599,top10E=0.73,eRank=28.1,q75/q25=96.52 mlp_w1:H=0.6628,top10E=0.42,eRank=100.6,q75/q25=11.02 mlp_w2:H=0.7890,top10E=0.25,eRank=208.5,q75/q25=15.48 vo_prod:H=0.3613,top10E=0.86,eRank=14.7,q75/q25=4042.50 train_time:283561ms step_avg:83.40ms +[2025-08-22 11:09:48] [Rank 0] step:3401/10000 train_time:283571ms step_avg:83.38ms +[2025-08-22 11:09:48] [Rank 0] step:3401/10000 train_time:283571ms step_avg:83.38ms +[2025-08-22 11:09:49] [Rank 0] step:3421/10000 train_time:285014ms step_avg:83.31ms +[2025-08-22 11:09:49] [Rank 0] step:3421/10000 train_time:285014ms step_avg:83.31ms +[2025-08-22 11:09:51] [Rank 0] step:3441/10000 train_time:286717ms step_avg:83.32ms +[2025-08-22 11:09:51] [Rank 0] step:3441/10000 train_time:286717ms step_avg:83.32ms +[2025-08-22 11:09:53] [Rank 0] step:3461/10000 train_time:288419ms step_avg:83.33ms +[2025-08-22 11:09:53] [Rank 0] step:3461/10000 train_time:288419ms step_avg:83.33ms +[2025-08-22 11:09:54] [Rank 0] step:3481/10000 train_time:290123ms step_avg:83.34ms +[2025-08-22 11:09:54] [Rank 0] step:3481/10000 train_time:290123ms step_avg:83.34ms +[2025-08-22 11:09:56] [Rank 0] step:3501/10000 train_time:291831ms step_avg:83.36ms +[2025-08-22 11:09:56] [Rank 0] step:3501/10000 train_time:291831ms step_avg:83.36ms +[2025-08-22 11:09:58] [Rank 0] step:3521/10000 train_time:293541ms step_avg:83.37ms +[2025-08-22 11:09:58] [Rank 0] step:3521/10000 train_time:293541ms step_avg:83.37ms +[2025-08-22 11:10:00] [Rank 0] step:3541/10000 train_time:295248ms step_avg:83.38ms +[2025-08-22 11:10:00] [Rank 0] step:3541/10000 train_time:295248ms step_avg:83.38ms +[2025-08-22 11:10:01] [Rank 0] step:3561/10000 train_time:296957ms step_avg:83.39ms +[2025-08-22 11:10:01] [Rank 0] step:3561/10000 train_time:296957ms step_avg:83.39ms +[2025-08-22 11:10:03] [Rank 0] step:3581/10000 train_time:298667ms step_avg:83.40ms +[2025-08-22 11:10:03] [Rank 0] step:3581/10000 train_time:298667ms step_avg:83.40ms +[2025-08-22 11:10:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:10:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:10:18] [Rank 0] PRINT: step:3600/10000 val_loss:4.4296 svd_entropy: attn_qk:H=0.6233,top10E=0.44,eRank=73.9,q75/q25=89.75 attn_vo:H=0.4653,top10E=0.71,eRank=29.4,q75/q25=99.62 mlp_w1:H=0.6666,top10E=0.41,eRank=103.5,q75/q25=11.30 mlp_w2:H=0.7932,top10E=0.25,eRank=214.2,q75/q25=15.47 vo_prod:H=0.3664,top10E=0.85,eRank=15.4,q75/q25=4429.62 train_time:300674ms step_avg:83.52ms +[2025-08-22 11:10:18] [Rank 0] PRINT: step:3600/10000 val_loss:4.4296 svd_entropy: attn_qk:H=0.6233,top10E=0.44,eRank=73.9,q75/q25=89.75 attn_vo:H=0.4653,top10E=0.71,eRank=29.4,q75/q25=99.62 mlp_w1:H=0.6666,top10E=0.41,eRank=103.5,q75/q25=11.30 mlp_w2:H=0.7932,top10E=0.25,eRank=214.2,q75/q25=15.47 vo_prod:H=0.3664,top10E=0.85,eRank=15.4,q75/q25=4429.62 train_time:300674ms step_avg:83.52ms +[2025-08-22 11:10:18] [Rank 0] step:3601/10000 train_time:300684ms step_avg:83.50ms +[2025-08-22 11:10:18] [Rank 0] step:3601/10000 train_time:300684ms step_avg:83.50ms +[2025-08-22 11:10:20] [Rank 0] step:3621/10000 train_time:302118ms step_avg:83.44ms +[2025-08-22 11:10:20] [Rank 0] step:3621/10000 train_time:302118ms step_avg:83.44ms +[2025-08-22 11:10:22] [Rank 0] step:3641/10000 train_time:303824ms step_avg:83.45ms +[2025-08-22 11:10:22] [Rank 0] step:3641/10000 train_time:303824ms step_avg:83.45ms +[2025-08-22 11:10:23] [Rank 0] step:3661/10000 train_time:305532ms step_avg:83.46ms +[2025-08-22 11:10:23] [Rank 0] step:3661/10000 train_time:305532ms step_avg:83.46ms +[2025-08-22 11:10:25] [Rank 0] step:3681/10000 train_time:307241ms step_avg:83.47ms +[2025-08-22 11:10:25] [Rank 0] step:3681/10000 train_time:307241ms step_avg:83.47ms +[2025-08-22 11:10:27] [Rank 0] step:3701/10000 train_time:308951ms step_avg:83.48ms +[2025-08-22 11:10:27] [Rank 0] step:3701/10000 train_time:308951ms step_avg:83.48ms +[2025-08-22 11:10:29] [Rank 0] step:3721/10000 train_time:310685ms step_avg:83.49ms +[2025-08-22 11:10:29] [Rank 0] step:3721/10000 train_time:310685ms step_avg:83.49ms +[2025-08-22 11:10:30] [Rank 0] step:3741/10000 train_time:312432ms step_avg:83.52ms +[2025-08-22 11:10:30] [Rank 0] step:3741/10000 train_time:312432ms step_avg:83.52ms +[2025-08-22 11:10:32] [Rank 0] step:3761/10000 train_time:314181ms step_avg:83.54ms +[2025-08-22 11:10:32] [Rank 0] step:3761/10000 train_time:314181ms step_avg:83.54ms +[2025-08-22 11:10:34] [Rank 0] step:3781/10000 train_time:315931ms step_avg:83.56ms +[2025-08-22 11:10:34] [Rank 0] step:3781/10000 train_time:315931ms step_avg:83.56ms +[2025-08-22 11:10:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:10:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:10:49] [Rank 0] PRINT: step:3800/10000 val_loss:4.3660 svd_entropy: attn_qk:H=0.6265,top10E=0.43,eRank=75.6,q75/q25=91.08 attn_vo:H=0.4704,top10E=0.71,eRank=30.7,q75/q25=102.60 mlp_w1:H=0.6702,top10E=0.41,eRank=106.3,q75/q25=11.57 mlp_w2:H=0.7971,top10E=0.24,eRank=219.5,q75/q25=15.53 vo_prod:H=0.3717,top10E=0.85,eRank=16.1,q75/q25=5057.41 train_time:317985ms step_avg:83.68ms +[2025-08-22 11:10:49] [Rank 0] PRINT: step:3800/10000 val_loss:4.3660 svd_entropy: attn_qk:H=0.6265,top10E=0.43,eRank=75.6,q75/q25=91.08 attn_vo:H=0.4704,top10E=0.71,eRank=30.7,q75/q25=102.60 mlp_w1:H=0.6702,top10E=0.41,eRank=106.3,q75/q25=11.57 mlp_w2:H=0.7971,top10E=0.24,eRank=219.5,q75/q25=15.53 vo_prod:H=0.3717,top10E=0.85,eRank=16.1,q75/q25=5057.41 train_time:317985ms step_avg:83.68ms +[2025-08-22 11:10:49] [Rank 0] step:3801/10000 train_time:317995ms step_avg:83.66ms +[2025-08-22 11:10:49] [Rank 0] step:3801/10000 train_time:317995ms step_avg:83.66ms +[2025-08-22 11:10:51] [Rank 0] step:3821/10000 train_time:319461ms step_avg:83.61ms +[2025-08-22 11:10:51] [Rank 0] step:3821/10000 train_time:319461ms step_avg:83.61ms +[2025-08-22 11:10:53] [Rank 0] step:3841/10000 train_time:321204ms step_avg:83.63ms +[2025-08-22 11:10:53] [Rank 0] step:3841/10000 train_time:321204ms step_avg:83.63ms +[2025-08-22 11:10:54] [Rank 0] step:3861/10000 train_time:322949ms step_avg:83.64ms +[2025-08-22 11:10:54] [Rank 0] step:3861/10000 train_time:322949ms step_avg:83.64ms +[2025-08-22 11:10:56] [Rank 0] step:3881/10000 train_time:324691ms step_avg:83.66ms +[2025-08-22 11:10:56] [Rank 0] step:3881/10000 train_time:324691ms step_avg:83.66ms +[2025-08-22 11:10:58] [Rank 0] step:3901/10000 train_time:326433ms step_avg:83.68ms +[2025-08-22 11:10:58] [Rank 0] step:3901/10000 train_time:326433ms step_avg:83.68ms +[2025-08-22 11:11:00] [Rank 0] step:3921/10000 train_time:328177ms step_avg:83.70ms +[2025-08-22 11:11:00] [Rank 0] step:3921/10000 train_time:328177ms step_avg:83.70ms +[2025-08-22 11:11:01] [Rank 0] step:3941/10000 train_time:329923ms step_avg:83.72ms +[2025-08-22 11:11:01] [Rank 0] step:3941/10000 train_time:329923ms step_avg:83.72ms +[2025-08-22 11:11:03] [Rank 0] step:3961/10000 train_time:331666ms step_avg:83.73ms +[2025-08-22 11:11:03] [Rank 0] step:3961/10000 train_time:331666ms step_avg:83.73ms +[2025-08-22 11:11:05] [Rank 0] step:3981/10000 train_time:333410ms step_avg:83.75ms +[2025-08-22 11:11:05] [Rank 0] step:3981/10000 train_time:333410ms step_avg:83.75ms +[2025-08-22 11:11:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:11:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:11:20] [Rank 0] PRINT: step:4000/10000 val_loss:4.3369 svd_entropy: attn_qk:H=0.6299,top10E=0.43,eRank=77.3,q75/q25=91.75 attn_vo:H=0.4753,top10E=0.70,eRank=32.0,q75/q25=104.48 mlp_w1:H=0.6736,top10E=0.40,eRank=109.0,q75/q25=11.79 mlp_w2:H=0.8009,top10E=0.24,eRank=224.7,q75/q25=15.54 vo_prod:H=0.3770,top10E=0.84,eRank=16.8,q75/q25=5645.31 train_time:335456ms step_avg:83.86ms +[2025-08-22 11:11:20] [Rank 0] PRINT: step:4000/10000 val_loss:4.3369 svd_entropy: attn_qk:H=0.6299,top10E=0.43,eRank=77.3,q75/q25=91.75 attn_vo:H=0.4753,top10E=0.70,eRank=32.0,q75/q25=104.48 mlp_w1:H=0.6736,top10E=0.40,eRank=109.0,q75/q25=11.79 mlp_w2:H=0.8009,top10E=0.24,eRank=224.7,q75/q25=15.54 vo_prod:H=0.3770,top10E=0.84,eRank=16.8,q75/q25=5645.31 train_time:335456ms step_avg:83.86ms +[2025-08-22 11:11:20] [Rank 0] step:4001/10000 train_time:335466ms step_avg:83.85ms +[2025-08-22 11:11:20] [Rank 0] step:4001/10000 train_time:335466ms step_avg:83.85ms +[2025-08-22 11:11:22] [Rank 0] step:4021/10000 train_time:336932ms step_avg:83.79ms +[2025-08-22 11:11:22] [Rank 0] step:4021/10000 train_time:336932ms step_avg:83.79ms +[2025-08-22 11:11:24] [Rank 0] step:4041/10000 train_time:338672ms step_avg:83.81ms +[2025-08-22 11:11:24] [Rank 0] step:4041/10000 train_time:338672ms step_avg:83.81ms +[2025-08-22 11:11:25] [Rank 0] step:4061/10000 train_time:340413ms step_avg:83.82ms +[2025-08-22 11:11:25] [Rank 0] step:4061/10000 train_time:340413ms step_avg:83.82ms +[2025-08-22 11:11:28] [Rank 0] step:4081/10000 train_time:342525ms step_avg:83.93ms +[2025-08-22 11:11:28] [Rank 0] step:4081/10000 train_time:342525ms step_avg:83.93ms +[2025-08-22 11:11:29] [Rank 0] step:4101/10000 train_time:344267ms step_avg:83.95ms +[2025-08-22 11:11:29] [Rank 0] step:4101/10000 train_time:344267ms step_avg:83.95ms +[2025-08-22 11:11:31] [Rank 0] step:4121/10000 train_time:346009ms step_avg:83.96ms +[2025-08-22 11:11:31] [Rank 0] step:4121/10000 train_time:346009ms step_avg:83.96ms +[2025-08-22 11:11:33] [Rank 0] step:4141/10000 train_time:347753ms step_avg:83.98ms +[2025-08-22 11:11:33] [Rank 0] step:4141/10000 train_time:347753ms step_avg:83.98ms +[2025-08-22 11:11:35] [Rank 0] step:4161/10000 train_time:349497ms step_avg:83.99ms +[2025-08-22 11:11:35] [Rank 0] step:4161/10000 train_time:349497ms step_avg:83.99ms +[2025-08-22 11:11:36] [Rank 0] step:4181/10000 train_time:351243ms step_avg:84.01ms +[2025-08-22 11:11:36] [Rank 0] step:4181/10000 train_time:351243ms step_avg:84.01ms +[2025-08-22 11:11:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:11:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:11:52] [Rank 0] PRINT: step:4200/10000 val_loss:4.3229 svd_entropy: attn_qk:H=0.6330,top10E=0.42,eRank=78.9,q75/q25=92.16 attn_vo:H=0.4798,top10E=0.69,eRank=33.3,q75/q25=106.74 mlp_w1:H=0.6766,top10E=0.39,eRank=111.6,q75/q25=11.98 mlp_w2:H=0.8043,top10E=0.24,eRank=229.5,q75/q25=15.52 vo_prod:H=0.3816,top10E=0.83,eRank=17.5,q75/q25=6170.76 train_time:353291ms step_avg:84.12ms +[2025-08-22 11:11:52] [Rank 0] PRINT: step:4200/10000 val_loss:4.3229 svd_entropy: attn_qk:H=0.6330,top10E=0.42,eRank=78.9,q75/q25=92.16 attn_vo:H=0.4798,top10E=0.69,eRank=33.3,q75/q25=106.74 mlp_w1:H=0.6766,top10E=0.39,eRank=111.6,q75/q25=11.98 mlp_w2:H=0.8043,top10E=0.24,eRank=229.5,q75/q25=15.52 vo_prod:H=0.3816,top10E=0.83,eRank=17.5,q75/q25=6170.76 train_time:353291ms step_avg:84.12ms +[2025-08-22 11:11:52] [Rank 0] step:4201/10000 train_time:353301ms step_avg:84.10ms +[2025-08-22 11:11:52] [Rank 0] step:4201/10000 train_time:353301ms step_avg:84.10ms +[2025-08-22 11:11:53] [Rank 0] step:4221/10000 train_time:354743ms step_avg:84.04ms +[2025-08-22 11:11:53] [Rank 0] step:4221/10000 train_time:354743ms step_avg:84.04ms +[2025-08-22 11:11:55] [Rank 0] step:4241/10000 train_time:356484ms step_avg:84.06ms +[2025-08-22 11:11:55] [Rank 0] step:4241/10000 train_time:356484ms step_avg:84.06ms +[2025-08-22 11:11:57] [Rank 0] step:4261/10000 train_time:358228ms step_avg:84.07ms +[2025-08-22 11:11:57] [Rank 0] step:4261/10000 train_time:358228ms step_avg:84.07ms +[2025-08-22 11:11:59] [Rank 0] step:4281/10000 train_time:359976ms step_avg:84.09ms +[2025-08-22 11:11:59] [Rank 0] step:4281/10000 train_time:359976ms step_avg:84.09ms +[2025-08-22 11:12:00] [Rank 0] step:4301/10000 train_time:361723ms step_avg:84.10ms +[2025-08-22 11:12:00] [Rank 0] step:4301/10000 train_time:361723ms step_avg:84.10ms +[2025-08-22 11:12:02] [Rank 0] step:4321/10000 train_time:363473ms step_avg:84.12ms +[2025-08-22 11:12:02] [Rank 0] step:4321/10000 train_time:363473ms step_avg:84.12ms +[2025-08-22 11:12:04] [Rank 0] step:4341/10000 train_time:365219ms step_avg:84.13ms +[2025-08-22 11:12:04] [Rank 0] step:4341/10000 train_time:365219ms step_avg:84.13ms +[2025-08-22 11:12:06] [Rank 0] step:4361/10000 train_time:366968ms step_avg:84.15ms +[2025-08-22 11:12:06] [Rank 0] step:4361/10000 train_time:366968ms step_avg:84.15ms +[2025-08-22 11:12:07] [Rank 0] step:4381/10000 train_time:368717ms step_avg:84.16ms +[2025-08-22 11:12:07] [Rank 0] step:4381/10000 train_time:368717ms step_avg:84.16ms +[2025-08-22 11:12:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:12:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:12:23] [Rank 0] PRINT: step:4400/10000 val_loss:4.3056 svd_entropy: attn_qk:H=0.6359,top10E=0.42,eRank=80.3,q75/q25=93.44 attn_vo:H=0.4839,top10E=0.68,eRank=34.5,q75/q25=108.78 mlp_w1:H=0.6796,top10E=0.39,eRank=114.1,q75/q25=12.21 mlp_w2:H=0.8075,top10E=0.23,eRank=234.1,q75/q25=15.51 vo_prod:H=0.3852,top10E=0.83,eRank=18.0,q75/q25=6728.77 train_time:370769ms step_avg:84.27ms +[2025-08-22 11:12:23] [Rank 0] PRINT: step:4400/10000 val_loss:4.3056 svd_entropy: attn_qk:H=0.6359,top10E=0.42,eRank=80.3,q75/q25=93.44 attn_vo:H=0.4839,top10E=0.68,eRank=34.5,q75/q25=108.78 mlp_w1:H=0.6796,top10E=0.39,eRank=114.1,q75/q25=12.21 mlp_w2:H=0.8075,top10E=0.23,eRank=234.1,q75/q25=15.51 vo_prod:H=0.3852,top10E=0.83,eRank=18.0,q75/q25=6728.77 train_time:370769ms step_avg:84.27ms +[2025-08-22 11:12:23] [Rank 0] step:4401/10000 train_time:370778ms step_avg:84.25ms +[2025-08-22 11:12:23] [Rank 0] step:4401/10000 train_time:370778ms step_avg:84.25ms +[2025-08-22 11:12:24] [Rank 0] step:4421/10000 train_time:372228ms step_avg:84.20ms +[2025-08-22 11:12:24] [Rank 0] step:4421/10000 train_time:372228ms step_avg:84.20ms +[2025-08-22 11:12:26] [Rank 0] step:4441/10000 train_time:373969ms step_avg:84.21ms +[2025-08-22 11:12:26] [Rank 0] step:4441/10000 train_time:373969ms step_avg:84.21ms +[2025-08-22 11:12:28] [Rank 0] step:4461/10000 train_time:375714ms step_avg:84.22ms +[2025-08-22 11:12:28] [Rank 0] step:4461/10000 train_time:375714ms step_avg:84.22ms +[2025-08-22 11:12:30] [Rank 0] step:4481/10000 train_time:377462ms step_avg:84.24ms +[2025-08-22 11:12:30] [Rank 0] step:4481/10000 train_time:377462ms step_avg:84.24ms +[2025-08-22 11:12:31] [Rank 0] step:4501/10000 train_time:379210ms step_avg:84.25ms +[2025-08-22 11:12:31] [Rank 0] step:4501/10000 train_time:379210ms step_avg:84.25ms +[2025-08-22 11:12:33] [Rank 0] step:4521/10000 train_time:380959ms step_avg:84.26ms +[2025-08-22 11:12:33] [Rank 0] step:4521/10000 train_time:380959ms step_avg:84.26ms +[2025-08-22 11:12:35] [Rank 0] step:4541/10000 train_time:382709ms step_avg:84.28ms +[2025-08-22 11:12:35] [Rank 0] step:4541/10000 train_time:382709ms step_avg:84.28ms +[2025-08-22 11:12:37] [Rank 0] step:4561/10000 train_time:384460ms step_avg:84.29ms +[2025-08-22 11:12:37] [Rank 0] step:4561/10000 train_time:384460ms step_avg:84.29ms +[2025-08-22 11:12:38] [Rank 0] step:4581/10000 train_time:386213ms step_avg:84.31ms +[2025-08-22 11:12:38] [Rank 0] step:4581/10000 train_time:386213ms step_avg:84.31ms +[2025-08-22 11:12:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:12:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:12:54] [Rank 0] PRINT: step:4600/10000 val_loss:4.2647 svd_entropy: attn_qk:H=0.6390,top10E=0.41,eRank=81.9,q75/q25=94.51 attn_vo:H=0.4882,top10E=0.67,eRank=35.7,q75/q25=111.39 mlp_w1:H=0.6823,top10E=0.38,eRank=116.5,q75/q25=12.41 mlp_w2:H=0.8105,top10E=0.23,eRank=238.5,q75/q25=15.39 vo_prod:H=0.3896,top10E=0.82,eRank=18.7,q75/q25=7407.35 train_time:388268ms step_avg:84.41ms +[2025-08-22 11:12:54] [Rank 0] PRINT: step:4600/10000 val_loss:4.2647 svd_entropy: attn_qk:H=0.6390,top10E=0.41,eRank=81.9,q75/q25=94.51 attn_vo:H=0.4882,top10E=0.67,eRank=35.7,q75/q25=111.39 mlp_w1:H=0.6823,top10E=0.38,eRank=116.5,q75/q25=12.41 mlp_w2:H=0.8105,top10E=0.23,eRank=238.5,q75/q25=15.39 vo_prod:H=0.3896,top10E=0.82,eRank=18.7,q75/q25=7407.35 train_time:388268ms step_avg:84.41ms +[2025-08-22 11:12:54] [Rank 0] step:4601/10000 train_time:388278ms step_avg:84.39ms +[2025-08-22 11:12:54] [Rank 0] step:4601/10000 train_time:388278ms step_avg:84.39ms +[2025-08-22 11:12:56] [Rank 0] step:4621/10000 train_time:389731ms step_avg:84.34ms +[2025-08-22 11:12:56] [Rank 0] step:4621/10000 train_time:389731ms step_avg:84.34ms +[2025-08-22 11:12:57] [Rank 0] step:4641/10000 train_time:391478ms step_avg:84.35ms +[2025-08-22 11:12:57] [Rank 0] step:4641/10000 train_time:391478ms step_avg:84.35ms +[2025-08-22 11:12:59] [Rank 0] step:4661/10000 train_time:393225ms step_avg:84.36ms +[2025-08-22 11:12:59] [Rank 0] step:4661/10000 train_time:393225ms step_avg:84.36ms +[2025-08-22 11:13:01] [Rank 0] step:4681/10000 train_time:394972ms step_avg:84.38ms +[2025-08-22 11:13:01] [Rank 0] step:4681/10000 train_time:394972ms step_avg:84.38ms +[2025-08-22 11:13:03] [Rank 0] step:4701/10000 train_time:396723ms step_avg:84.39ms +[2025-08-22 11:13:03] [Rank 0] step:4701/10000 train_time:396723ms step_avg:84.39ms +[2025-08-22 11:13:04] [Rank 0] step:4721/10000 train_time:398470ms step_avg:84.40ms +[2025-08-22 11:13:04] [Rank 0] step:4721/10000 train_time:398470ms step_avg:84.40ms +[2025-08-22 11:13:06] [Rank 0] step:4741/10000 train_time:400221ms step_avg:84.42ms +[2025-08-22 11:13:06] [Rank 0] step:4741/10000 train_time:400221ms step_avg:84.42ms +[2025-08-22 11:13:08] [Rank 0] step:4761/10000 train_time:401971ms step_avg:84.43ms +[2025-08-22 11:13:08] [Rank 0] step:4761/10000 train_time:401971ms step_avg:84.43ms +[2025-08-22 11:13:10] [Rank 0] step:4781/10000 train_time:403719ms step_avg:84.44ms +[2025-08-22 11:13:10] [Rank 0] step:4781/10000 train_time:403719ms step_avg:84.44ms +[2025-08-22 11:13:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:13:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:13:25] [Rank 0] PRINT: step:4800/10000 val_loss:4.2533 svd_entropy: attn_qk:H=0.6419,top10E=0.41,eRank=83.5,q75/q25=95.15 attn_vo:H=0.4923,top10E=0.66,eRank=37.0,q75/q25=113.58 mlp_w1:H=0.6848,top10E=0.38,eRank=118.8,q75/q25=12.59 mlp_w2:H=0.8133,top10E=0.22,eRank=242.5,q75/q25=15.27 vo_prod:H=0.3935,top10E=0.81,eRank=19.3,q75/q25=8125.14 train_time:405771ms step_avg:84.54ms +[2025-08-22 11:13:25] [Rank 0] PRINT: step:4800/10000 val_loss:4.2533 svd_entropy: attn_qk:H=0.6419,top10E=0.41,eRank=83.5,q75/q25=95.15 attn_vo:H=0.4923,top10E=0.66,eRank=37.0,q75/q25=113.58 mlp_w1:H=0.6848,top10E=0.38,eRank=118.8,q75/q25=12.59 mlp_w2:H=0.8133,top10E=0.22,eRank=242.5,q75/q25=15.27 vo_prod:H=0.3935,top10E=0.81,eRank=19.3,q75/q25=8125.14 train_time:405771ms step_avg:84.54ms +[2025-08-22 11:13:25] [Rank 0] step:4801/10000 train_time:405781ms step_avg:84.52ms +[2025-08-22 11:13:25] [Rank 0] step:4801/10000 train_time:405781ms step_avg:84.52ms +[2025-08-22 11:13:27] [Rank 0] step:4821/10000 train_time:407249ms step_avg:84.47ms +[2025-08-22 11:13:27] [Rank 0] step:4821/10000 train_time:407249ms step_avg:84.47ms +[2025-08-22 11:13:28] [Rank 0] step:4841/10000 train_time:408995ms step_avg:84.49ms +[2025-08-22 11:13:28] [Rank 0] step:4841/10000 train_time:408995ms step_avg:84.49ms +[2025-08-22 11:13:30] [Rank 0] step:4861/10000 train_time:410745ms step_avg:84.50ms +[2025-08-22 11:13:30] [Rank 0] step:4861/10000 train_time:410745ms step_avg:84.50ms +[2025-08-22 11:13:32] [Rank 0] step:4881/10000 train_time:412491ms step_avg:84.51ms +[2025-08-22 11:13:32] [Rank 0] step:4881/10000 train_time:412491ms step_avg:84.51ms +[2025-08-22 11:13:34] [Rank 0] step:4901/10000 train_time:414239ms step_avg:84.52ms +[2025-08-22 11:13:34] [Rank 0] step:4901/10000 train_time:414239ms step_avg:84.52ms +[2025-08-22 11:13:35] [Rank 0] step:4921/10000 train_time:415991ms step_avg:84.53ms +[2025-08-22 11:13:35] [Rank 0] step:4921/10000 train_time:415991ms step_avg:84.53ms +[2025-08-22 11:13:37] [Rank 0] step:4941/10000 train_time:417744ms step_avg:84.55ms +[2025-08-22 11:13:37] [Rank 0] step:4941/10000 train_time:417744ms step_avg:84.55ms +[2025-08-22 11:13:39] [Rank 0] step:4961/10000 train_time:419496ms step_avg:84.56ms +[2025-08-22 11:13:39] [Rank 0] step:4961/10000 train_time:419496ms step_avg:84.56ms +[2025-08-22 11:13:41] [Rank 0] step:4981/10000 train_time:421248ms step_avg:84.57ms +[2025-08-22 11:13:41] [Rank 0] step:4981/10000 train_time:421248ms step_avg:84.57ms +[2025-08-22 11:13:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:13:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:13:56] [Rank 0] PRINT: step:5000/10000 val_loss:4.2297 svd_entropy: attn_qk:H=0.6446,top10E=0.40,eRank=84.9,q75/q25=95.19 attn_vo:H=0.4961,top10E=0.66,eRank=38.1,q75/q25=115.79 mlp_w1:H=0.6871,top10E=0.38,eRank=121.0,q75/q25=12.75 mlp_w2:H=0.8159,top10E=0.22,eRank=246.5,q75/q25=15.24 vo_prod:H=0.3973,top10E=0.81,eRank=19.9,q75/q25=8694.60 train_time:423305ms step_avg:84.66ms +[2025-08-22 11:13:56] [Rank 0] PRINT: step:5000/10000 val_loss:4.2297 svd_entropy: attn_qk:H=0.6446,top10E=0.40,eRank=84.9,q75/q25=95.19 attn_vo:H=0.4961,top10E=0.66,eRank=38.1,q75/q25=115.79 mlp_w1:H=0.6871,top10E=0.38,eRank=121.0,q75/q25=12.75 mlp_w2:H=0.8159,top10E=0.22,eRank=246.5,q75/q25=15.24 vo_prod:H=0.3973,top10E=0.81,eRank=19.9,q75/q25=8694.60 train_time:423305ms step_avg:84.66ms +[2025-08-22 11:13:56] [Rank 0] step:5001/10000 train_time:423315ms step_avg:84.65ms +[2025-08-22 11:13:56] [Rank 0] step:5001/10000 train_time:423315ms step_avg:84.65ms +[2025-08-22 11:13:58] [Rank 0] step:5021/10000 train_time:424788ms step_avg:84.60ms +[2025-08-22 11:13:58] [Rank 0] step:5021/10000 train_time:424788ms step_avg:84.60ms +[2025-08-22 11:14:00] [Rank 0] step:5041/10000 train_time:426540ms step_avg:84.61ms +[2025-08-22 11:14:00] [Rank 0] step:5041/10000 train_time:426540ms step_avg:84.61ms +[2025-08-22 11:14:01] [Rank 0] step:5061/10000 train_time:428287ms step_avg:84.62ms +[2025-08-22 11:14:01] [Rank 0] step:5061/10000 train_time:428287ms step_avg:84.62ms +[2025-08-22 11:14:03] [Rank 0] step:5081/10000 train_time:430038ms step_avg:84.64ms +[2025-08-22 11:14:03] [Rank 0] step:5081/10000 train_time:430038ms step_avg:84.64ms +[2025-08-22 11:14:05] [Rank 0] step:5101/10000 train_time:431788ms step_avg:84.65ms +[2025-08-22 11:14:05] [Rank 0] step:5101/10000 train_time:431788ms step_avg:84.65ms +[2025-08-22 11:14:07] [Rank 0] step:5121/10000 train_time:433540ms step_avg:84.66ms +[2025-08-22 11:14:07] [Rank 0] step:5121/10000 train_time:433540ms step_avg:84.66ms +[2025-08-22 11:14:08] [Rank 0] step:5141/10000 train_time:435296ms step_avg:84.67ms +[2025-08-22 11:14:08] [Rank 0] step:5141/10000 train_time:435296ms step_avg:84.67ms +[2025-08-22 11:14:10] [Rank 0] step:5161/10000 train_time:437050ms step_avg:84.68ms +[2025-08-22 11:14:10] [Rank 0] step:5161/10000 train_time:437050ms step_avg:84.68ms +[2025-08-22 11:14:12] [Rank 0] step:5181/10000 train_time:438805ms step_avg:84.70ms +[2025-08-22 11:14:12] [Rank 0] step:5181/10000 train_time:438805ms step_avg:84.70ms +[2025-08-22 11:14:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:14:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:14:27] [Rank 0] PRINT: step:5200/10000 val_loss:4.2153 svd_entropy: attn_qk:H=0.6471,top10E=0.40,eRank=86.2,q75/q25=95.15 attn_vo:H=0.4997,top10E=0.65,eRank=39.3,q75/q25=117.38 mlp_w1:H=0.6894,top10E=0.37,eRank=123.2,q75/q25=12.89 mlp_w2:H=0.8183,top10E=0.22,eRank=250.1,q75/q25=15.07 vo_prod:H=0.4011,top10E=0.80,eRank=20.5,q75/q25=9304.13 train_time:440891ms step_avg:84.79ms +[2025-08-22 11:14:27] [Rank 0] PRINT: step:5200/10000 val_loss:4.2153 svd_entropy: attn_qk:H=0.6471,top10E=0.40,eRank=86.2,q75/q25=95.15 attn_vo:H=0.4997,top10E=0.65,eRank=39.3,q75/q25=117.38 mlp_w1:H=0.6894,top10E=0.37,eRank=123.2,q75/q25=12.89 mlp_w2:H=0.8183,top10E=0.22,eRank=250.1,q75/q25=15.07 vo_prod:H=0.4011,top10E=0.80,eRank=20.5,q75/q25=9304.13 train_time:440891ms step_avg:84.79ms +[2025-08-22 11:14:27] [Rank 0] step:5201/10000 train_time:440901ms step_avg:84.77ms +[2025-08-22 11:14:27] [Rank 0] step:5201/10000 train_time:440901ms step_avg:84.77ms +[2025-08-22 11:14:29] [Rank 0] step:5221/10000 train_time:442383ms step_avg:84.73ms +[2025-08-22 11:14:29] [Rank 0] step:5221/10000 train_time:442383ms step_avg:84.73ms +[2025-08-22 11:14:31] [Rank 0] step:5241/10000 train_time:444160ms step_avg:84.75ms +[2025-08-22 11:14:31] [Rank 0] step:5241/10000 train_time:444160ms step_avg:84.75ms +[2025-08-22 11:14:33] [Rank 0] step:5261/10000 train_time:445977ms step_avg:84.77ms +[2025-08-22 11:14:33] [Rank 0] step:5261/10000 train_time:445977ms step_avg:84.77ms +[2025-08-22 11:14:34] [Rank 0] step:5281/10000 train_time:447761ms step_avg:84.79ms +[2025-08-22 11:14:34] [Rank 0] step:5281/10000 train_time:447761ms step_avg:84.79ms +[2025-08-22 11:14:36] [Rank 0] step:5301/10000 train_time:449553ms step_avg:84.81ms +[2025-08-22 11:14:36] [Rank 0] step:5301/10000 train_time:449553ms step_avg:84.81ms +[2025-08-22 11:14:38] [Rank 0] step:5321/10000 train_time:451334ms step_avg:84.82ms +[2025-08-22 11:14:38] [Rank 0] step:5321/10000 train_time:451334ms step_avg:84.82ms +[2025-08-22 11:14:40] [Rank 0] step:5341/10000 train_time:453114ms step_avg:84.84ms +[2025-08-22 11:14:40] [Rank 0] step:5341/10000 train_time:453114ms step_avg:84.84ms +[2025-08-22 11:14:42] [Rank 0] step:5361/10000 train_time:454897ms step_avg:84.85ms +[2025-08-22 11:14:42] [Rank 0] step:5361/10000 train_time:454897ms step_avg:84.85ms +[2025-08-22 11:14:43] [Rank 0] step:5381/10000 train_time:456678ms step_avg:84.87ms +[2025-08-22 11:14:43] [Rank 0] step:5381/10000 train_time:456678ms step_avg:84.87ms +[2025-08-22 11:14:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:14:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:14:59] [Rank 0] PRINT: step:5400/10000 val_loss:4.1879 svd_entropy: attn_qk:H=0.6496,top10E=0.40,eRank=87.6,q75/q25=95.89 attn_vo:H=0.5031,top10E=0.64,eRank=40.4,q75/q25=118.44 mlp_w1:H=0.6916,top10E=0.37,eRank=125.3,q75/q25=13.01 mlp_w2:H=0.8205,top10E=0.22,eRank=253.5,q75/q25=14.99 vo_prod:H=0.4045,top10E=0.80,eRank=21.1,q75/q25=9852.15 train_time:458766ms step_avg:84.96ms +[2025-08-22 11:14:59] [Rank 0] PRINT: step:5400/10000 val_loss:4.1879 svd_entropy: attn_qk:H=0.6496,top10E=0.40,eRank=87.6,q75/q25=95.89 attn_vo:H=0.5031,top10E=0.64,eRank=40.4,q75/q25=118.44 mlp_w1:H=0.6916,top10E=0.37,eRank=125.3,q75/q25=13.01 mlp_w2:H=0.8205,top10E=0.22,eRank=253.5,q75/q25=14.99 vo_prod:H=0.4045,top10E=0.80,eRank=21.1,q75/q25=9852.15 train_time:458766ms step_avg:84.96ms +[2025-08-22 11:14:59] [Rank 0] step:5401/10000 train_time:458776ms step_avg:84.94ms +[2025-08-22 11:14:59] [Rank 0] step:5401/10000 train_time:458776ms step_avg:84.94ms +[2025-08-22 11:15:00] [Rank 0] step:5421/10000 train_time:460282ms step_avg:84.91ms +[2025-08-22 11:15:00] [Rank 0] step:5421/10000 train_time:460282ms step_avg:84.91ms +[2025-08-22 11:15:02] [Rank 0] step:5441/10000 train_time:462061ms step_avg:84.92ms +[2025-08-22 11:15:02] [Rank 0] step:5441/10000 train_time:462061ms step_avg:84.92ms +[2025-08-22 11:15:04] [Rank 0] step:5461/10000 train_time:463837ms step_avg:84.94ms +[2025-08-22 11:15:04] [Rank 0] step:5461/10000 train_time:463837ms step_avg:84.94ms +[2025-08-22 11:15:06] [Rank 0] step:5481/10000 train_time:465617ms step_avg:84.95ms +[2025-08-22 11:15:06] [Rank 0] step:5481/10000 train_time:465617ms step_avg:84.95ms +[2025-08-22 11:15:08] [Rank 0] step:5501/10000 train_time:467400ms step_avg:84.97ms +[2025-08-22 11:15:08] [Rank 0] step:5501/10000 train_time:467400ms step_avg:84.97ms +[2025-08-22 11:15:09] [Rank 0] step:5521/10000 train_time:469184ms step_avg:84.98ms +[2025-08-22 11:15:09] [Rank 0] step:5521/10000 train_time:469184ms step_avg:84.98ms +[2025-08-22 11:15:11] [Rank 0] step:5541/10000 train_time:470964ms step_avg:85.00ms +[2025-08-22 11:15:11] [Rank 0] step:5541/10000 train_time:470964ms step_avg:85.00ms +[2025-08-22 11:15:13] [Rank 0] step:5561/10000 train_time:472749ms step_avg:85.01ms +[2025-08-22 11:15:13] [Rank 0] step:5561/10000 train_time:472749ms step_avg:85.01ms +[2025-08-22 11:15:15] [Rank 0] step:5581/10000 train_time:474531ms step_avg:85.03ms +[2025-08-22 11:15:15] [Rank 0] step:5581/10000 train_time:474531ms step_avg:85.03ms +[2025-08-22 11:15:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:15:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:15:30] [Rank 0] PRINT: step:5600/10000 val_loss:4.1762 svd_entropy: attn_qk:H=0.6520,top10E=0.39,eRank=88.8,q75/q25=96.43 attn_vo:H=0.5064,top10E=0.64,eRank=41.5,q75/q25=119.91 mlp_w1:H=0.6938,top10E=0.36,eRank=127.4,q75/q25=13.15 mlp_w2:H=0.8226,top10E=0.21,eRank=256.8,q75/q25=14.85 vo_prod:H=0.4078,top10E=0.79,eRank=21.6,q75/q25=10239.21 train_time:476625ms step_avg:85.11ms +[2025-08-22 11:15:30] [Rank 0] PRINT: step:5600/10000 val_loss:4.1762 svd_entropy: attn_qk:H=0.6520,top10E=0.39,eRank=88.8,q75/q25=96.43 attn_vo:H=0.5064,top10E=0.64,eRank=41.5,q75/q25=119.91 mlp_w1:H=0.6938,top10E=0.36,eRank=127.4,q75/q25=13.15 mlp_w2:H=0.8226,top10E=0.21,eRank=256.8,q75/q25=14.85 vo_prod:H=0.4078,top10E=0.79,eRank=21.6,q75/q25=10239.21 train_time:476625ms step_avg:85.11ms +[2025-08-22 11:15:30] [Rank 0] step:5601/10000 train_time:476634ms step_avg:85.10ms +[2025-08-22 11:15:30] [Rank 0] step:5601/10000 train_time:476634ms step_avg:85.10ms +[2025-08-22 11:15:32] [Rank 0] step:5621/10000 train_time:478109ms step_avg:85.06ms +[2025-08-22 11:15:32] [Rank 0] step:5621/10000 train_time:478109ms step_avg:85.06ms +[2025-08-22 11:15:34] [Rank 0] step:5641/10000 train_time:479889ms step_avg:85.07ms +[2025-08-22 11:15:34] [Rank 0] step:5641/10000 train_time:479889ms step_avg:85.07ms +[2025-08-22 11:15:35] [Rank 0] step:5661/10000 train_time:481663ms step_avg:85.08ms +[2025-08-22 11:15:35] [Rank 0] step:5661/10000 train_time:481663ms step_avg:85.08ms +[2025-08-22 11:15:37] [Rank 0] step:5681/10000 train_time:483448ms step_avg:85.10ms +[2025-08-22 11:15:37] [Rank 0] step:5681/10000 train_time:483448ms step_avg:85.10ms +[2025-08-22 11:15:39] [Rank 0] step:5701/10000 train_time:485230ms step_avg:85.11ms +[2025-08-22 11:15:39] [Rank 0] step:5701/10000 train_time:485230ms step_avg:85.11ms +[2025-08-22 11:15:41] [Rank 0] step:5721/10000 train_time:487016ms step_avg:85.13ms +[2025-08-22 11:15:41] [Rank 0] step:5721/10000 train_time:487016ms step_avg:85.13ms +[2025-08-22 11:15:43] [Rank 0] step:5741/10000 train_time:488807ms step_avg:85.14ms +[2025-08-22 11:15:43] [Rank 0] step:5741/10000 train_time:488807ms step_avg:85.14ms +[2025-08-22 11:15:44] [Rank 0] step:5761/10000 train_time:490592ms step_avg:85.16ms +[2025-08-22 11:15:44] [Rank 0] step:5761/10000 train_time:490592ms step_avg:85.16ms +[2025-08-22 11:15:46] [Rank 0] step:5781/10000 train_time:492379ms step_avg:85.17ms +[2025-08-22 11:15:46] [Rank 0] step:5781/10000 train_time:492379ms step_avg:85.17ms +[2025-08-22 11:15:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:15:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:16:01] [Rank 0] PRINT: step:5800/10000 val_loss:4.1677 svd_entropy: attn_qk:H=0.6543,top10E=0.39,eRank=90.1,q75/q25=96.08 attn_vo:H=0.5095,top10E=0.63,eRank=42.6,q75/q25=122.36 mlp_w1:H=0.6958,top10E=0.36,eRank=129.4,q75/q25=13.23 mlp_w2:H=0.8246,top10E=0.21,eRank=259.7,q75/q25=14.75 vo_prod:H=0.4105,top10E=0.79,eRank=22.2,q75/q25=10905.30 train_time:494478ms step_avg:85.25ms +[2025-08-22 11:16:01] [Rank 0] PRINT: step:5800/10000 val_loss:4.1677 svd_entropy: attn_qk:H=0.6543,top10E=0.39,eRank=90.1,q75/q25=96.08 attn_vo:H=0.5095,top10E=0.63,eRank=42.6,q75/q25=122.36 mlp_w1:H=0.6958,top10E=0.36,eRank=129.4,q75/q25=13.23 mlp_w2:H=0.8246,top10E=0.21,eRank=259.7,q75/q25=14.75 vo_prod:H=0.4105,top10E=0.79,eRank=22.2,q75/q25=10905.30 train_time:494478ms step_avg:85.25ms +[2025-08-22 11:16:01] [Rank 0] step:5801/10000 train_time:494489ms step_avg:85.24ms +[2025-08-22 11:16:01] [Rank 0] step:5801/10000 train_time:494489ms step_avg:85.24ms +[2025-08-22 11:16:03] [Rank 0] step:5821/10000 train_time:495974ms step_avg:85.20ms +[2025-08-22 11:16:03] [Rank 0] step:5821/10000 train_time:495974ms step_avg:85.20ms +[2025-08-22 11:16:05] [Rank 0] step:5841/10000 train_time:497752ms step_avg:85.22ms +[2025-08-22 11:16:05] [Rank 0] step:5841/10000 train_time:497752ms step_avg:85.22ms +[2025-08-22 11:16:07] [Rank 0] step:5861/10000 train_time:499537ms step_avg:85.23ms +[2025-08-22 11:16:07] [Rank 0] step:5861/10000 train_time:499537ms step_avg:85.23ms +[2025-08-22 11:16:09] [Rank 0] step:5881/10000 train_time:501321ms step_avg:85.24ms +[2025-08-22 11:16:09] [Rank 0] step:5881/10000 train_time:501321ms step_avg:85.24ms +[2025-08-22 11:16:10] [Rank 0] step:5901/10000 train_time:503101ms step_avg:85.26ms +[2025-08-22 11:16:10] [Rank 0] step:5901/10000 train_time:503101ms step_avg:85.26ms +[2025-08-22 11:16:12] [Rank 0] step:5921/10000 train_time:504884ms step_avg:85.27ms +[2025-08-22 11:16:12] [Rank 0] step:5921/10000 train_time:504884ms step_avg:85.27ms +[2025-08-22 11:16:14] [Rank 0] step:5941/10000 train_time:506669ms step_avg:85.28ms +[2025-08-22 11:16:14] [Rank 0] step:5941/10000 train_time:506669ms step_avg:85.28ms +[2025-08-22 11:16:16] [Rank 0] step:5961/10000 train_time:508453ms step_avg:85.30ms +[2025-08-22 11:16:16] [Rank 0] step:5961/10000 train_time:508453ms step_avg:85.30ms +[2025-08-22 11:16:18] [Rank 0] step:5981/10000 train_time:510238ms step_avg:85.31ms +[2025-08-22 11:16:18] [Rank 0] step:5981/10000 train_time:510238ms step_avg:85.31ms +[2025-08-22 11:16:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:16:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:16:33] [Rank 0] PRINT: step:6000/10000 val_loss:4.1366 svd_entropy: attn_qk:H=0.6563,top10E=0.39,eRank=91.3,q75/q25=97.01 attn_vo:H=0.5127,top10E=0.63,eRank=43.7,q75/q25=124.23 mlp_w1:H=0.6978,top10E=0.36,eRank=131.3,q75/q25=13.31 mlp_w2:H=0.8265,top10E=0.21,eRank=262.8,q75/q25=14.65 vo_prod:H=0.4137,top10E=0.78,eRank=22.7,q75/q25=11410.49 train_time:512330ms step_avg:85.39ms +[2025-08-22 11:16:33] [Rank 0] PRINT: step:6000/10000 val_loss:4.1366 svd_entropy: attn_qk:H=0.6563,top10E=0.39,eRank=91.3,q75/q25=97.01 attn_vo:H=0.5127,top10E=0.63,eRank=43.7,q75/q25=124.23 mlp_w1:H=0.6978,top10E=0.36,eRank=131.3,q75/q25=13.31 mlp_w2:H=0.8265,top10E=0.21,eRank=262.8,q75/q25=14.65 vo_prod:H=0.4137,top10E=0.78,eRank=22.7,q75/q25=11410.49 train_time:512330ms step_avg:85.39ms +[2025-08-22 11:16:33] [Rank 0] step:6001/10000 train_time:512340ms step_avg:85.38ms +[2025-08-22 11:16:33] [Rank 0] step:6001/10000 train_time:512340ms step_avg:85.38ms +[2025-08-22 11:16:35] [Rank 0] step:6021/10000 train_time:513838ms step_avg:85.34ms +[2025-08-22 11:16:35] [Rank 0] step:6021/10000 train_time:513838ms step_avg:85.34ms +[2025-08-22 11:16:36] [Rank 0] step:6041/10000 train_time:515622ms step_avg:85.35ms +[2025-08-22 11:16:36] [Rank 0] step:6041/10000 train_time:515622ms step_avg:85.35ms +[2025-08-22 11:16:38] [Rank 0] step:6061/10000 train_time:517411ms step_avg:85.37ms +[2025-08-22 11:16:38] [Rank 0] step:6061/10000 train_time:517411ms step_avg:85.37ms +[2025-08-22 11:16:40] [Rank 0] step:6081/10000 train_time:519194ms step_avg:85.38ms +[2025-08-22 11:16:40] [Rank 0] step:6081/10000 train_time:519194ms step_avg:85.38ms +[2025-08-22 11:16:42] [Rank 0] step:6101/10000 train_time:520984ms step_avg:85.39ms +[2025-08-22 11:16:42] [Rank 0] step:6101/10000 train_time:520984ms step_avg:85.39ms +[2025-08-22 11:16:44] [Rank 0] step:6121/10000 train_time:523030ms step_avg:85.45ms +[2025-08-22 11:16:44] [Rank 0] step:6121/10000 train_time:523030ms step_avg:85.45ms +[2025-08-22 11:16:46] [Rank 0] step:6141/10000 train_time:524828ms step_avg:85.46ms +[2025-08-22 11:16:46] [Rank 0] step:6141/10000 train_time:524828ms step_avg:85.46ms +[2025-08-22 11:16:47] [Rank 0] step:6161/10000 train_time:526617ms step_avg:85.48ms +[2025-08-22 11:16:47] [Rank 0] step:6161/10000 train_time:526617ms step_avg:85.48ms +[2025-08-22 11:16:49] [Rank 0] step:6181/10000 train_time:528400ms step_avg:85.49ms +[2025-08-22 11:16:49] [Rank 0] step:6181/10000 train_time:528400ms step_avg:85.49ms +[2025-08-22 11:16:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:16:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:17:05] [Rank 0] PRINT: step:6200/10000 val_loss:4.1213 svd_entropy: attn_qk:H=0.6581,top10E=0.38,eRank=92.4,q75/q25=96.94 attn_vo:H=0.5156,top10E=0.62,eRank=44.7,q75/q25=124.82 mlp_w1:H=0.6997,top10E=0.35,eRank=133.2,q75/q25=13.41 mlp_w2:H=0.8284,top10E=0.21,eRank=265.8,q75/q25=14.51 vo_prod:H=0.4168,top10E=0.78,eRank=23.2,q75/q25=11935.15 train_time:530502ms step_avg:85.56ms +[2025-08-22 11:17:05] [Rank 0] PRINT: step:6200/10000 val_loss:4.1213 svd_entropy: attn_qk:H=0.6581,top10E=0.38,eRank=92.4,q75/q25=96.94 attn_vo:H=0.5156,top10E=0.62,eRank=44.7,q75/q25=124.82 mlp_w1:H=0.6997,top10E=0.35,eRank=133.2,q75/q25=13.41 mlp_w2:H=0.8284,top10E=0.21,eRank=265.8,q75/q25=14.51 vo_prod:H=0.4168,top10E=0.78,eRank=23.2,q75/q25=11935.15 train_time:530502ms step_avg:85.56ms +[2025-08-22 11:17:05] [Rank 0] step:6201/10000 train_time:530511ms step_avg:85.55ms +[2025-08-22 11:17:05] [Rank 0] step:6201/10000 train_time:530511ms step_avg:85.55ms +[2025-08-22 11:17:06] [Rank 0] step:6221/10000 train_time:532009ms step_avg:85.52ms +[2025-08-22 11:17:06] [Rank 0] step:6221/10000 train_time:532009ms step_avg:85.52ms +[2025-08-22 11:17:08] [Rank 0] step:6241/10000 train_time:533790ms step_avg:85.53ms +[2025-08-22 11:17:08] [Rank 0] step:6241/10000 train_time:533790ms step_avg:85.53ms +[2025-08-22 11:17:10] [Rank 0] step:6261/10000 train_time:535576ms step_avg:85.54ms +[2025-08-22 11:17:10] [Rank 0] step:6261/10000 train_time:535576ms step_avg:85.54ms +[2025-08-22 11:17:12] [Rank 0] step:6281/10000 train_time:537365ms step_avg:85.55ms +[2025-08-22 11:17:12] [Rank 0] step:6281/10000 train_time:537365ms step_avg:85.55ms +[2025-08-22 11:17:14] [Rank 0] step:6301/10000 train_time:539155ms step_avg:85.57ms +[2025-08-22 11:17:14] [Rank 0] step:6301/10000 train_time:539155ms step_avg:85.57ms +[2025-08-22 11:17:15] [Rank 0] step:6321/10000 train_time:540945ms step_avg:85.58ms +[2025-08-22 11:17:15] [Rank 0] step:6321/10000 train_time:540945ms step_avg:85.58ms +[2025-08-22 11:17:17] [Rank 0] step:6341/10000 train_time:542737ms step_avg:85.59ms +[2025-08-22 11:17:17] [Rank 0] step:6341/10000 train_time:542737ms step_avg:85.59ms +[2025-08-22 11:17:19] [Rank 0] step:6361/10000 train_time:544530ms step_avg:85.60ms +[2025-08-22 11:17:19] [Rank 0] step:6361/10000 train_time:544530ms step_avg:85.60ms +[2025-08-22 11:17:21] [Rank 0] step:6381/10000 train_time:546330ms step_avg:85.62ms +[2025-08-22 11:17:21] [Rank 0] step:6381/10000 train_time:546330ms step_avg:85.62ms +[2025-08-22 11:17:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:17:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:17:36] [Rank 0] PRINT: step:6400/10000 val_loss:4.1013 svd_entropy: attn_qk:H=0.6600,top10E=0.38,eRank=93.5,q75/q25=97.68 attn_vo:H=0.5182,top10E=0.62,eRank=45.6,q75/q25=126.10 mlp_w1:H=0.7013,top10E=0.35,eRank=134.9,q75/q25=13.49 mlp_w2:H=0.8300,top10E=0.20,eRank=268.4,q75/q25=14.43 vo_prod:H=0.4193,top10E=0.78,eRank=23.7,q75/q25=12264.81 train_time:548435ms step_avg:85.69ms +[2025-08-22 11:17:36] [Rank 0] PRINT: step:6400/10000 val_loss:4.1013 svd_entropy: attn_qk:H=0.6600,top10E=0.38,eRank=93.5,q75/q25=97.68 attn_vo:H=0.5182,top10E=0.62,eRank=45.6,q75/q25=126.10 mlp_w1:H=0.7013,top10E=0.35,eRank=134.9,q75/q25=13.49 mlp_w2:H=0.8300,top10E=0.20,eRank=268.4,q75/q25=14.43 vo_prod:H=0.4193,top10E=0.78,eRank=23.7,q75/q25=12264.81 train_time:548435ms step_avg:85.69ms +[2025-08-22 11:17:36] [Rank 0] step:6401/10000 train_time:548445ms step_avg:85.68ms +[2025-08-22 11:17:36] [Rank 0] step:6401/10000 train_time:548445ms step_avg:85.68ms +[2025-08-22 11:17:38] [Rank 0] step:6421/10000 train_time:549936ms step_avg:85.65ms +[2025-08-22 11:17:38] [Rank 0] step:6421/10000 train_time:549936ms step_avg:85.65ms +[2025-08-22 11:17:40] [Rank 0] step:6441/10000 train_time:551722ms step_avg:85.66ms +[2025-08-22 11:17:40] [Rank 0] step:6441/10000 train_time:551722ms step_avg:85.66ms +[2025-08-22 11:17:42] [Rank 0] step:6461/10000 train_time:553511ms step_avg:85.67ms +[2025-08-22 11:17:42] [Rank 0] step:6461/10000 train_time:553511ms step_avg:85.67ms +[2025-08-22 11:17:43] [Rank 0] step:6481/10000 train_time:555301ms step_avg:85.68ms +[2025-08-22 11:17:43] [Rank 0] step:6481/10000 train_time:555301ms step_avg:85.68ms +[2025-08-22 11:17:45] [Rank 0] step:6501/10000 train_time:557087ms step_avg:85.69ms +[2025-08-22 11:17:45] [Rank 0] step:6501/10000 train_time:557087ms step_avg:85.69ms +[2025-08-22 11:17:47] [Rank 0] step:6521/10000 train_time:558872ms step_avg:85.70ms +[2025-08-22 11:17:47] [Rank 0] step:6521/10000 train_time:558872ms step_avg:85.70ms +[2025-08-22 11:17:49] [Rank 0] step:6541/10000 train_time:560662ms step_avg:85.71ms +[2025-08-22 11:17:49] [Rank 0] step:6541/10000 train_time:560662ms step_avg:85.71ms +[2025-08-22 11:17:50] [Rank 0] step:6561/10000 train_time:562454ms step_avg:85.73ms +[2025-08-22 11:17:50] [Rank 0] step:6561/10000 train_time:562454ms step_avg:85.73ms +[2025-08-22 11:17:52] [Rank 0] step:6581/10000 train_time:564238ms step_avg:85.74ms +[2025-08-22 11:17:52] [Rank 0] step:6581/10000 train_time:564238ms step_avg:85.74ms +[2025-08-22 11:17:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:17:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:18:08] [Rank 0] PRINT: step:6600/10000 val_loss:4.0870 svd_entropy: attn_qk:H=0.6617,top10E=0.38,eRank=94.5,q75/q25=97.96 attn_vo:H=0.5205,top10E=0.61,eRank=46.5,q75/q25=127.77 mlp_w1:H=0.7029,top10E=0.35,eRank=136.5,q75/q25=13.54 mlp_w2:H=0.8316,top10E=0.20,eRank=270.9,q75/q25=14.39 vo_prod:H=0.4216,top10E=0.77,eRank=24.1,q75/q25=12740.95 train_time:566341ms step_avg:85.81ms +[2025-08-22 11:18:08] [Rank 0] PRINT: step:6600/10000 val_loss:4.0870 svd_entropy: attn_qk:H=0.6617,top10E=0.38,eRank=94.5,q75/q25=97.96 attn_vo:H=0.5205,top10E=0.61,eRank=46.5,q75/q25=127.77 mlp_w1:H=0.7029,top10E=0.35,eRank=136.5,q75/q25=13.54 mlp_w2:H=0.8316,top10E=0.20,eRank=270.9,q75/q25=14.39 vo_prod:H=0.4216,top10E=0.77,eRank=24.1,q75/q25=12740.95 train_time:566341ms step_avg:85.81ms +[2025-08-22 11:18:08] [Rank 0] step:6601/10000 train_time:566351ms step_avg:85.80ms +[2025-08-22 11:18:08] [Rank 0] step:6601/10000 train_time:566351ms step_avg:85.80ms +[2025-08-22 11:18:09] [Rank 0] step:6621/10000 train_time:567837ms step_avg:85.76ms +[2025-08-22 11:18:09] [Rank 0] step:6621/10000 train_time:567837ms step_avg:85.76ms +[2025-08-22 11:18:11] [Rank 0] step:6641/10000 train_time:569624ms step_avg:85.77ms +[2025-08-22 11:18:11] [Rank 0] step:6641/10000 train_time:569624ms step_avg:85.77ms +[2025-08-22 11:18:13] [Rank 0] step:6661/10000 train_time:571408ms step_avg:85.78ms +[2025-08-22 11:18:13] [Rank 0] step:6661/10000 train_time:571408ms step_avg:85.78ms +[2025-08-22 11:18:15] [Rank 0] step:6681/10000 train_time:573206ms step_avg:85.80ms +[2025-08-22 11:18:15] [Rank 0] step:6681/10000 train_time:573206ms step_avg:85.80ms +[2025-08-22 11:18:17] [Rank 0] step:6701/10000 train_time:575023ms step_avg:85.81ms +[2025-08-22 11:18:17] [Rank 0] step:6701/10000 train_time:575023ms step_avg:85.81ms +[2025-08-22 11:18:18] [Rank 0] step:6721/10000 train_time:576844ms step_avg:85.83ms +[2025-08-22 11:18:18] [Rank 0] step:6721/10000 train_time:576844ms step_avg:85.83ms +[2025-08-22 11:18:20] [Rank 0] step:6741/10000 train_time:578655ms step_avg:85.84ms +[2025-08-22 11:18:20] [Rank 0] step:6741/10000 train_time:578655ms step_avg:85.84ms +[2025-08-22 11:18:22] [Rank 0] step:6761/10000 train_time:580468ms step_avg:85.86ms +[2025-08-22 11:18:22] [Rank 0] step:6761/10000 train_time:580468ms step_avg:85.86ms +[2025-08-22 11:18:24] [Rank 0] step:6781/10000 train_time:582288ms step_avg:85.87ms +[2025-08-22 11:18:24] [Rank 0] step:6781/10000 train_time:582288ms step_avg:85.87ms +[2025-08-22 11:18:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:18:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:18:39] [Rank 0] PRINT: step:6800/10000 val_loss:4.0672 svd_entropy: attn_qk:H=0.6632,top10E=0.37,eRank=95.4,q75/q25=98.15 attn_vo:H=0.5228,top10E=0.61,eRank=47.4,q75/q25=128.56 mlp_w1:H=0.7043,top10E=0.35,eRank=138.0,q75/q25=13.61 mlp_w2:H=0.8330,top10E=0.20,eRank=273.2,q75/q25=14.22 vo_prod:H=0.4242,top10E=0.77,eRank=24.6,q75/q25=13001.35 train_time:584422ms step_avg:85.94ms +[2025-08-22 11:18:39] [Rank 0] PRINT: step:6800/10000 val_loss:4.0672 svd_entropy: attn_qk:H=0.6632,top10E=0.37,eRank=95.4,q75/q25=98.15 attn_vo:H=0.5228,top10E=0.61,eRank=47.4,q75/q25=128.56 mlp_w1:H=0.7043,top10E=0.35,eRank=138.0,q75/q25=13.61 mlp_w2:H=0.8330,top10E=0.20,eRank=273.2,q75/q25=14.22 vo_prod:H=0.4242,top10E=0.77,eRank=24.6,q75/q25=13001.35 train_time:584422ms step_avg:85.94ms +[2025-08-22 11:18:39] [Rank 0] step:6801/10000 train_time:584432ms step_avg:85.93ms +[2025-08-22 11:18:39] [Rank 0] step:6801/10000 train_time:584432ms step_avg:85.93ms +[2025-08-22 11:18:41] [Rank 0] step:6821/10000 train_time:585940ms step_avg:85.90ms +[2025-08-22 11:18:41] [Rank 0] step:6821/10000 train_time:585940ms step_avg:85.90ms +[2025-08-22 11:18:43] [Rank 0] step:6841/10000 train_time:587744ms step_avg:85.91ms +[2025-08-22 11:18:43] [Rank 0] step:6841/10000 train_time:587744ms step_avg:85.91ms +[2025-08-22 11:18:45] [Rank 0] step:6861/10000 train_time:589561ms step_avg:85.93ms +[2025-08-22 11:18:45] [Rank 0] step:6861/10000 train_time:589561ms step_avg:85.93ms +[2025-08-22 11:18:47] [Rank 0] step:6881/10000 train_time:591373ms step_avg:85.94ms +[2025-08-22 11:18:47] [Rank 0] step:6881/10000 train_time:591373ms step_avg:85.94ms +[2025-08-22 11:18:48] [Rank 0] step:6901/10000 train_time:593186ms step_avg:85.96ms +[2025-08-22 11:18:48] [Rank 0] step:6901/10000 train_time:593186ms step_avg:85.96ms +[2025-08-22 11:18:50] [Rank 0] step:6921/10000 train_time:594991ms step_avg:85.97ms +[2025-08-22 11:18:50] [Rank 0] step:6921/10000 train_time:594991ms step_avg:85.97ms +[2025-08-22 11:18:52] [Rank 0] step:6941/10000 train_time:596814ms step_avg:85.98ms +[2025-08-22 11:18:52] [Rank 0] step:6941/10000 train_time:596814ms step_avg:85.98ms +[2025-08-22 11:18:54] [Rank 0] step:6961/10000 train_time:598645ms step_avg:86.00ms +[2025-08-22 11:18:54] [Rank 0] step:6961/10000 train_time:598645ms step_avg:86.00ms +[2025-08-22 11:18:56] [Rank 0] step:6981/10000 train_time:600468ms step_avg:86.01ms +[2025-08-22 11:18:56] [Rank 0] step:6981/10000 train_time:600468ms step_avg:86.01ms +[2025-08-22 11:18:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:18:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:19:11] [Rank 0] PRINT: step:7000/10000 val_loss:4.0493 svd_entropy: attn_qk:H=0.6645,top10E=0.37,eRank=96.3,q75/q25=98.60 attn_vo:H=0.5249,top10E=0.60,eRank=48.1,q75/q25=128.95 mlp_w1:H=0.7057,top10E=0.34,eRank=139.3,q75/q25=13.65 mlp_w2:H=0.8343,top10E=0.20,eRank=275.4,q75/q25=14.09 vo_prod:H=0.4262,top10E=0.77,eRank=25.0,q75/q25=13469.73 train_time:602605ms step_avg:86.09ms +[2025-08-22 11:19:11] [Rank 0] PRINT: step:7000/10000 val_loss:4.0493 svd_entropy: attn_qk:H=0.6645,top10E=0.37,eRank=96.3,q75/q25=98.60 attn_vo:H=0.5249,top10E=0.60,eRank=48.1,q75/q25=128.95 mlp_w1:H=0.7057,top10E=0.34,eRank=139.3,q75/q25=13.65 mlp_w2:H=0.8343,top10E=0.20,eRank=275.4,q75/q25=14.09 vo_prod:H=0.4262,top10E=0.77,eRank=25.0,q75/q25=13469.73 train_time:602605ms step_avg:86.09ms +[2025-08-22 11:19:11] [Rank 0] step:7001/10000 train_time:602615ms step_avg:86.08ms +[2025-08-22 11:19:11] [Rank 0] step:7001/10000 train_time:602615ms step_avg:86.08ms +[2025-08-22 11:19:13] [Rank 0] step:7021/10000 train_time:604140ms step_avg:86.05ms +[2025-08-22 11:19:13] [Rank 0] step:7021/10000 train_time:604140ms step_avg:86.05ms +[2025-08-22 11:19:15] [Rank 0] step:7041/10000 train_time:605957ms step_avg:86.06ms +[2025-08-22 11:19:15] [Rank 0] step:7041/10000 train_time:605957ms step_avg:86.06ms +[2025-08-22 11:19:17] [Rank 0] step:7061/10000 train_time:607769ms step_avg:86.07ms +[2025-08-22 11:19:17] [Rank 0] step:7061/10000 train_time:607769ms step_avg:86.07ms +[2025-08-22 11:19:18] [Rank 0] step:7081/10000 train_time:609592ms step_avg:86.09ms +[2025-08-22 11:19:18] [Rank 0] step:7081/10000 train_time:609592ms step_avg:86.09ms +[2025-08-22 11:19:20] [Rank 0] step:7101/10000 train_time:611411ms step_avg:86.10ms +[2025-08-22 11:19:20] [Rank 0] step:7101/10000 train_time:611411ms step_avg:86.10ms +[2025-08-22 11:19:22] [Rank 0] step:7121/10000 train_time:613231ms step_avg:86.12ms +[2025-08-22 11:19:22] [Rank 0] step:7121/10000 train_time:613231ms step_avg:86.12ms +[2025-08-22 11:19:24] [Rank 0] step:7141/10000 train_time:615052ms step_avg:86.13ms +[2025-08-22 11:19:24] [Rank 0] step:7141/10000 train_time:615052ms step_avg:86.13ms +[2025-08-22 11:19:26] [Rank 0] step:7161/10000 train_time:616876ms step_avg:86.14ms +[2025-08-22 11:19:26] [Rank 0] step:7161/10000 train_time:616876ms step_avg:86.14ms +[2025-08-22 11:19:28] [Rank 0] step:7181/10000 train_time:618693ms step_avg:86.16ms +[2025-08-22 11:19:28] [Rank 0] step:7181/10000 train_time:618693ms step_avg:86.16ms +[2025-08-22 11:19:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:19:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:19:43] [Rank 0] PRINT: step:7200/10000 val_loss:4.0340 svd_entropy: attn_qk:H=0.6658,top10E=0.37,eRank=97.1,q75/q25=98.36 attn_vo:H=0.5267,top10E=0.60,eRank=48.9,q75/q25=129.96 mlp_w1:H=0.7070,top10E=0.34,eRank=140.6,q75/q25=13.72 mlp_w2:H=0.8355,top10E=0.20,eRank=277.4,q75/q25=13.98 vo_prod:H=0.4280,top10E=0.76,eRank=25.3,q75/q25=13530.18 train_time:620842ms step_avg:86.23ms +[2025-08-22 11:19:43] [Rank 0] PRINT: step:7200/10000 val_loss:4.0340 svd_entropy: attn_qk:H=0.6658,top10E=0.37,eRank=97.1,q75/q25=98.36 attn_vo:H=0.5267,top10E=0.60,eRank=48.9,q75/q25=129.96 mlp_w1:H=0.7070,top10E=0.34,eRank=140.6,q75/q25=13.72 mlp_w2:H=0.8355,top10E=0.20,eRank=277.4,q75/q25=13.98 vo_prod:H=0.4280,top10E=0.76,eRank=25.3,q75/q25=13530.18 train_time:620842ms step_avg:86.23ms +[2025-08-22 11:19:43] [Rank 0] step:7201/10000 train_time:620851ms step_avg:86.22ms +[2025-08-22 11:19:43] [Rank 0] step:7201/10000 train_time:620851ms step_avg:86.22ms +[2025-08-22 11:19:45] [Rank 0] step:7221/10000 train_time:622369ms step_avg:86.19ms +[2025-08-22 11:19:45] [Rank 0] step:7221/10000 train_time:622369ms step_avg:86.19ms +[2025-08-22 11:19:47] [Rank 0] step:7241/10000 train_time:624241ms step_avg:86.21ms +[2025-08-22 11:19:47] [Rank 0] step:7241/10000 train_time:624241ms step_avg:86.21ms +[2025-08-22 11:19:48] [Rank 0] step:7261/10000 train_time:626067ms step_avg:86.22ms +[2025-08-22 11:19:48] [Rank 0] step:7261/10000 train_time:626067ms step_avg:86.22ms +[2025-08-22 11:19:50] [Rank 0] step:7281/10000 train_time:627888ms step_avg:86.24ms +[2025-08-22 11:19:50] [Rank 0] step:7281/10000 train_time:627888ms step_avg:86.24ms +[2025-08-22 11:19:52] [Rank 0] step:7301/10000 train_time:629706ms step_avg:86.25ms +[2025-08-22 11:19:52] [Rank 0] step:7301/10000 train_time:629706ms step_avg:86.25ms +[2025-08-22 11:19:54] [Rank 0] step:7321/10000 train_time:631529ms step_avg:86.26ms +[2025-08-22 11:19:54] [Rank 0] step:7321/10000 train_time:631529ms step_avg:86.26ms +[2025-08-22 11:19:56] [Rank 0] step:7341/10000 train_time:633346ms step_avg:86.28ms +[2025-08-22 11:19:56] [Rank 0] step:7341/10000 train_time:633346ms step_avg:86.28ms +[2025-08-22 11:19:58] [Rank 0] step:7361/10000 train_time:635170ms step_avg:86.29ms +[2025-08-22 11:19:58] [Rank 0] step:7361/10000 train_time:635170ms step_avg:86.29ms +[2025-08-22 11:19:59] [Rank 0] step:7381/10000 train_time:636994ms step_avg:86.30ms +[2025-08-22 11:19:59] [Rank 0] step:7381/10000 train_time:636994ms step_avg:86.30ms +[2025-08-22 11:20:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:20:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:20:15] [Rank 0] PRINT: step:7400/10000 val_loss:4.0069 svd_entropy: attn_qk:H=0.6670,top10E=0.37,eRank=97.8,q75/q25=98.59 attn_vo:H=0.5285,top10E=0.60,eRank=49.5,q75/q25=130.46 mlp_w1:H=0.7081,top10E=0.34,eRank=141.8,q75/q25=13.75 mlp_w2:H=0.8367,top10E=0.20,eRank=279.2,q75/q25=13.91 vo_prod:H=0.4299,top10E=0.76,eRank=25.7,q75/q25=13644.84 train_time:639112ms step_avg:86.37ms +[2025-08-22 11:20:15] [Rank 0] PRINT: step:7400/10000 val_loss:4.0069 svd_entropy: attn_qk:H=0.6670,top10E=0.37,eRank=97.8,q75/q25=98.59 attn_vo:H=0.5285,top10E=0.60,eRank=49.5,q75/q25=130.46 mlp_w1:H=0.7081,top10E=0.34,eRank=141.8,q75/q25=13.75 mlp_w2:H=0.8367,top10E=0.20,eRank=279.2,q75/q25=13.91 vo_prod:H=0.4299,top10E=0.76,eRank=25.7,q75/q25=13644.84 train_time:639112ms step_avg:86.37ms +[2025-08-22 11:20:15] [Rank 0] step:7401/10000 train_time:639121ms step_avg:86.36ms +[2025-08-22 11:20:15] [Rank 0] step:7401/10000 train_time:639121ms step_avg:86.36ms +[2025-08-22 11:20:17] [Rank 0] step:7421/10000 train_time:640635ms step_avg:86.33ms +[2025-08-22 11:20:17] [Rank 0] step:7421/10000 train_time:640635ms step_avg:86.33ms +[2025-08-22 11:20:18] [Rank 0] step:7441/10000 train_time:642448ms step_avg:86.34ms +[2025-08-22 11:20:18] [Rank 0] step:7441/10000 train_time:642448ms step_avg:86.34ms +[2025-08-22 11:20:20] [Rank 0] step:7461/10000 train_time:644264ms step_avg:86.35ms +[2025-08-22 11:20:20] [Rank 0] step:7461/10000 train_time:644264ms step_avg:86.35ms +[2025-08-22 11:20:22] [Rank 0] step:7481/10000 train_time:646085ms step_avg:86.36ms +[2025-08-22 11:20:22] [Rank 0] step:7481/10000 train_time:646085ms step_avg:86.36ms +[2025-08-22 11:20:24] [Rank 0] step:7501/10000 train_time:647905ms step_avg:86.38ms +[2025-08-22 11:20:24] [Rank 0] step:7501/10000 train_time:647905ms step_avg:86.38ms +[2025-08-22 11:20:26] [Rank 0] step:7521/10000 train_time:649722ms step_avg:86.39ms +[2025-08-22 11:20:26] [Rank 0] step:7521/10000 train_time:649722ms step_avg:86.39ms +[2025-08-22 11:20:28] [Rank 0] step:7541/10000 train_time:651557ms step_avg:86.40ms +[2025-08-22 11:20:28] [Rank 0] step:7541/10000 train_time:651557ms step_avg:86.40ms +[2025-08-22 11:20:29] [Rank 0] step:7561/10000 train_time:653373ms step_avg:86.41ms +[2025-08-22 11:20:29] [Rank 0] step:7561/10000 train_time:653373ms step_avg:86.41ms +[2025-08-22 11:20:31] [Rank 0] step:7581/10000 train_time:655202ms step_avg:86.43ms +[2025-08-22 11:20:31] [Rank 0] step:7581/10000 train_time:655202ms step_avg:86.43ms +[2025-08-22 11:20:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:20:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:20:47] [Rank 0] PRINT: step:7600/10000 val_loss:4.0061 svd_entropy: attn_qk:H=0.6681,top10E=0.37,eRank=98.5,q75/q25=99.23 attn_vo:H=0.5301,top10E=0.59,eRank=50.1,q75/q25=130.10 mlp_w1:H=0.7091,top10E=0.34,eRank=142.8,q75/q25=13.74 mlp_w2:H=0.8376,top10E=0.20,eRank=280.9,q75/q25=13.82 vo_prod:H=0.4314,top10E=0.76,eRank=26.0,q75/q25=13422.90 train_time:657350ms step_avg:86.49ms +[2025-08-22 11:20:47] [Rank 0] PRINT: step:7600/10000 val_loss:4.0061 svd_entropy: attn_qk:H=0.6681,top10E=0.37,eRank=98.5,q75/q25=99.23 attn_vo:H=0.5301,top10E=0.59,eRank=50.1,q75/q25=130.10 mlp_w1:H=0.7091,top10E=0.34,eRank=142.8,q75/q25=13.74 mlp_w2:H=0.8376,top10E=0.20,eRank=280.9,q75/q25=13.82 vo_prod:H=0.4314,top10E=0.76,eRank=26.0,q75/q25=13422.90 train_time:657350ms step_avg:86.49ms +[2025-08-22 11:20:47] [Rank 0] step:7601/10000 train_time:657360ms step_avg:86.48ms +[2025-08-22 11:20:47] [Rank 0] step:7601/10000 train_time:657360ms step_avg:86.48ms +[2025-08-22 11:20:48] [Rank 0] step:7621/10000 train_time:658874ms step_avg:86.46ms +[2025-08-22 11:20:48] [Rank 0] step:7621/10000 train_time:658874ms step_avg:86.46ms +[2025-08-22 11:20:50] [Rank 0] step:7641/10000 train_time:660713ms step_avg:86.47ms +[2025-08-22 11:20:50] [Rank 0] step:7641/10000 train_time:660713ms step_avg:86.47ms +[2025-08-22 11:20:52] [Rank 0] step:7661/10000 train_time:662550ms step_avg:86.48ms +[2025-08-22 11:20:52] [Rank 0] step:7661/10000 train_time:662550ms step_avg:86.48ms +[2025-08-22 11:20:54] [Rank 0] step:7681/10000 train_time:664372ms step_avg:86.50ms +[2025-08-22 11:20:54] [Rank 0] step:7681/10000 train_time:664372ms step_avg:86.50ms +[2025-08-22 11:20:56] [Rank 0] step:7701/10000 train_time:666193ms step_avg:86.51ms +[2025-08-22 11:20:56] [Rank 0] step:7701/10000 train_time:666193ms step_avg:86.51ms +[2025-08-22 11:20:58] [Rank 0] step:7721/10000 train_time:668032ms step_avg:86.52ms +[2025-08-22 11:20:58] [Rank 0] step:7721/10000 train_time:668032ms step_avg:86.52ms +[2025-08-22 11:20:59] [Rank 0] step:7741/10000 train_time:669860ms step_avg:86.53ms +[2025-08-22 11:20:59] [Rank 0] step:7741/10000 train_time:669860ms step_avg:86.53ms +[2025-08-22 11:21:01] [Rank 0] step:7761/10000 train_time:671686ms step_avg:86.55ms +[2025-08-22 11:21:01] [Rank 0] step:7761/10000 train_time:671686ms step_avg:86.55ms +[2025-08-22 11:21:03] [Rank 0] step:7781/10000 train_time:673521ms step_avg:86.56ms +[2025-08-22 11:21:03] [Rank 0] step:7781/10000 train_time:673521ms step_avg:86.56ms +[2025-08-22 11:21:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:21:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:21:18] [Rank 0] PRINT: step:7800/10000 val_loss:3.9852 svd_entropy: attn_qk:H=0.6691,top10E=0.37,eRank=99.2,q75/q25=98.94 attn_vo:H=0.5315,top10E=0.59,eRank=50.7,q75/q25=131.04 mlp_w1:H=0.7100,top10E=0.34,eRank=143.8,q75/q25=13.74 mlp_w2:H=0.8386,top10E=0.19,eRank=282.4,q75/q25=13.72 vo_prod:H=0.4327,top10E=0.75,eRank=26.3,q75/q25=13613.22 train_time:675675ms step_avg:86.62ms +[2025-08-22 11:21:18] [Rank 0] PRINT: step:7800/10000 val_loss:3.9852 svd_entropy: attn_qk:H=0.6691,top10E=0.37,eRank=99.2,q75/q25=98.94 attn_vo:H=0.5315,top10E=0.59,eRank=50.7,q75/q25=131.04 mlp_w1:H=0.7100,top10E=0.34,eRank=143.8,q75/q25=13.74 mlp_w2:H=0.8386,top10E=0.19,eRank=282.4,q75/q25=13.72 vo_prod:H=0.4327,top10E=0.75,eRank=26.3,q75/q25=13613.22 train_time:675675ms step_avg:86.62ms +[2025-08-22 11:21:19] [Rank 0] step:7801/10000 train_time:675684ms step_avg:86.62ms +[2025-08-22 11:21:19] [Rank 0] step:7801/10000 train_time:675684ms step_avg:86.62ms +[2025-08-22 11:21:20] [Rank 0] step:7821/10000 train_time:677190ms step_avg:86.59ms +[2025-08-22 11:21:20] [Rank 0] step:7821/10000 train_time:677190ms step_avg:86.59ms +[2025-08-22 11:21:22] [Rank 0] step:7841/10000 train_time:679009ms step_avg:86.60ms +[2025-08-22 11:21:22] [Rank 0] step:7841/10000 train_time:679009ms step_avg:86.60ms +[2025-08-22 11:21:24] [Rank 0] step:7861/10000 train_time:680870ms step_avg:86.61ms +[2025-08-22 11:21:24] [Rank 0] step:7861/10000 train_time:680870ms step_avg:86.61ms +[2025-08-22 11:21:26] [Rank 0] step:7881/10000 train_time:682697ms step_avg:86.63ms +[2025-08-22 11:21:26] [Rank 0] step:7881/10000 train_time:682697ms step_avg:86.63ms +[2025-08-22 11:21:28] [Rank 0] step:7901/10000 train_time:684512ms step_avg:86.64ms +[2025-08-22 11:21:28] [Rank 0] step:7901/10000 train_time:684512ms step_avg:86.64ms +[2025-08-22 11:21:30] [Rank 0] step:7921/10000 train_time:686337ms step_avg:86.65ms +[2025-08-22 11:21:30] [Rank 0] step:7921/10000 train_time:686337ms step_avg:86.65ms +[2025-08-22 11:21:31] [Rank 0] step:7941/10000 train_time:688165ms step_avg:86.66ms +[2025-08-22 11:21:31] [Rank 0] step:7941/10000 train_time:688165ms step_avg:86.66ms +[2025-08-22 11:21:33] [Rank 0] step:7961/10000 train_time:689991ms step_avg:86.67ms +[2025-08-22 11:21:33] [Rank 0] step:7961/10000 train_time:689991ms step_avg:86.67ms +[2025-08-22 11:21:35] [Rank 0] step:7981/10000 train_time:691809ms step_avg:86.68ms +[2025-08-22 11:21:35] [Rank 0] step:7981/10000 train_time:691809ms step_avg:86.68ms +[2025-08-22 11:21:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:21:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:21:50] [Rank 0] PRINT: step:8000/10000 val_loss:3.9644 svd_entropy: attn_qk:H=0.6700,top10E=0.36,eRank=99.8,q75/q25=98.71 attn_vo:H=0.5328,top10E=0.59,eRank=51.2,q75/q25=131.43 mlp_w1:H=0.7109,top10E=0.34,eRank=144.7,q75/q25=13.76 mlp_w2:H=0.8394,top10E=0.19,eRank=283.9,q75/q25=13.65 vo_prod:H=0.4342,top10E=0.75,eRank=26.6,q75/q25=13836.93 train_time:693949ms step_avg:86.74ms +[2025-08-22 11:21:50] [Rank 0] PRINT: step:8000/10000 val_loss:3.9644 svd_entropy: attn_qk:H=0.6700,top10E=0.36,eRank=99.8,q75/q25=98.71 attn_vo:H=0.5328,top10E=0.59,eRank=51.2,q75/q25=131.43 mlp_w1:H=0.7109,top10E=0.34,eRank=144.7,q75/q25=13.76 mlp_w2:H=0.8394,top10E=0.19,eRank=283.9,q75/q25=13.65 vo_prod:H=0.4342,top10E=0.75,eRank=26.6,q75/q25=13836.93 train_time:693949ms step_avg:86.74ms +[2025-08-22 11:21:50] [Rank 0] step:8001/10000 train_time:693958ms step_avg:86.73ms +[2025-08-22 11:21:50] [Rank 0] step:8001/10000 train_time:693958ms step_avg:86.73ms +[2025-08-22 11:21:52] [Rank 0] step:8021/10000 train_time:695474ms step_avg:86.71ms +[2025-08-22 11:21:52] [Rank 0] step:8021/10000 train_time:695474ms step_avg:86.71ms +[2025-08-22 11:21:54] [Rank 0] step:8041/10000 train_time:697291ms step_avg:86.72ms +[2025-08-22 11:21:54] [Rank 0] step:8041/10000 train_time:697291ms step_avg:86.72ms +[2025-08-22 11:21:56] [Rank 0] step:8061/10000 train_time:699109ms step_avg:86.73ms +[2025-08-22 11:21:56] [Rank 0] step:8061/10000 train_time:699109ms step_avg:86.73ms +[2025-08-22 11:21:58] [Rank 0] step:8081/10000 train_time:700923ms step_avg:86.74ms +[2025-08-22 11:21:58] [Rank 0] step:8081/10000 train_time:700923ms step_avg:86.74ms +[2025-08-22 11:22:00] [Rank 0] step:8101/10000 train_time:702749ms step_avg:86.75ms +[2025-08-22 11:22:00] [Rank 0] step:8101/10000 train_time:702749ms step_avg:86.75ms +[2025-08-22 11:22:01] [Rank 0] step:8121/10000 train_time:704567ms step_avg:86.76ms +[2025-08-22 11:22:01] [Rank 0] step:8121/10000 train_time:704567ms step_avg:86.76ms +[2025-08-22 11:22:04] [Rank 0] step:8141/10000 train_time:706391ms step_avg:86.77ms +[2025-08-22 11:22:04] [Rank 0] step:8141/10000 train_time:706391ms step_avg:86.77ms +[2025-08-22 11:22:05] [Rank 0] step:8161/10000 train_time:708567ms step_avg:86.82ms +[2025-08-22 11:22:05] [Rank 0] step:8161/10000 train_time:708567ms step_avg:86.82ms +[2025-08-22 11:22:07] [Rank 0] step:8181/10000 train_time:710426ms step_avg:86.84ms +[2025-08-22 11:22:07] [Rank 0] step:8181/10000 train_time:710426ms step_avg:86.84ms +[2025-08-22 11:22:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:22:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:22:23] [Rank 0] PRINT: step:8200/10000 val_loss:3.9524 svd_entropy: attn_qk:H=0.6709,top10E=0.36,eRank=100.3,q75/q25=99.36 attn_vo:H=0.5340,top10E=0.59,eRank=51.7,q75/q25=131.41 mlp_w1:H=0.7116,top10E=0.34,eRank=145.5,q75/q25=13.73 mlp_w2:H=0.8402,top10E=0.19,eRank=285.2,q75/q25=13.58 vo_prod:H=0.4354,top10E=0.75,eRank=26.8,q75/q25=13847.77 train_time:712617ms step_avg:86.90ms +[2025-08-22 11:22:23] [Rank 0] PRINT: step:8200/10000 val_loss:3.9524 svd_entropy: attn_qk:H=0.6709,top10E=0.36,eRank=100.3,q75/q25=99.36 attn_vo:H=0.5340,top10E=0.59,eRank=51.7,q75/q25=131.41 mlp_w1:H=0.7116,top10E=0.34,eRank=145.5,q75/q25=13.73 mlp_w2:H=0.8402,top10E=0.19,eRank=285.2,q75/q25=13.58 vo_prod:H=0.4354,top10E=0.75,eRank=26.8,q75/q25=13847.77 train_time:712617ms step_avg:86.90ms +[2025-08-22 11:22:23] [Rank 0] step:8201/10000 train_time:712627ms step_avg:86.90ms +[2025-08-22 11:22:23] [Rank 0] step:8201/10000 train_time:712627ms step_avg:86.90ms +[2025-08-22 11:22:25] [Rank 0] step:8221/10000 train_time:714168ms step_avg:86.87ms +[2025-08-22 11:22:25] [Rank 0] step:8221/10000 train_time:714168ms step_avg:86.87ms +[2025-08-22 11:22:26] [Rank 0] step:8241/10000 train_time:716022ms step_avg:86.89ms +[2025-08-22 11:22:26] [Rank 0] step:8241/10000 train_time:716022ms step_avg:86.89ms +[2025-08-22 11:22:28] [Rank 0] step:8261/10000 train_time:717869ms step_avg:86.90ms +[2025-08-22 11:22:28] [Rank 0] step:8261/10000 train_time:717869ms step_avg:86.90ms +[2025-08-22 11:22:30] [Rank 0] step:8281/10000 train_time:719725ms step_avg:86.91ms +[2025-08-22 11:22:30] [Rank 0] step:8281/10000 train_time:719725ms step_avg:86.91ms +[2025-08-22 11:22:32] [Rank 0] step:8301/10000 train_time:721567ms step_avg:86.93ms +[2025-08-22 11:22:32] [Rank 0] step:8301/10000 train_time:721567ms step_avg:86.93ms +[2025-08-22 11:22:34] [Rank 0] step:8321/10000 train_time:723414ms step_avg:86.94ms +[2025-08-22 11:22:34] [Rank 0] step:8321/10000 train_time:723414ms step_avg:86.94ms +[2025-08-22 11:22:36] [Rank 0] step:8341/10000 train_time:725263ms step_avg:86.95ms +[2025-08-22 11:22:36] [Rank 0] step:8341/10000 train_time:725263ms step_avg:86.95ms +[2025-08-22 11:22:38] [Rank 0] step:8361/10000 train_time:727116ms step_avg:86.97ms +[2025-08-22 11:22:38] [Rank 0] step:8361/10000 train_time:727116ms step_avg:86.97ms +[2025-08-22 11:22:39] [Rank 0] step:8381/10000 train_time:728967ms step_avg:86.98ms +[2025-08-22 11:22:39] [Rank 0] step:8381/10000 train_time:728967ms step_avg:86.98ms +[2025-08-22 11:22:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:22:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:22:55] [Rank 0] PRINT: step:8400/10000 val_loss:3.9379 svd_entropy: attn_qk:H=0.6716,top10E=0.36,eRank=100.8,q75/q25=99.36 attn_vo:H=0.5351,top10E=0.58,eRank=52.1,q75/q25=132.09 mlp_w1:H=0.7123,top10E=0.34,eRank=146.2,q75/q25=13.74 mlp_w2:H=0.8410,top10E=0.19,eRank=286.5,q75/q25=13.56 vo_prod:H=0.4366,top10E=0.75,eRank=27.0,q75/q25=14097.18 train_time:731136ms step_avg:87.04ms +[2025-08-22 11:22:55] [Rank 0] PRINT: step:8400/10000 val_loss:3.9379 svd_entropy: attn_qk:H=0.6716,top10E=0.36,eRank=100.8,q75/q25=99.36 attn_vo:H=0.5351,top10E=0.58,eRank=52.1,q75/q25=132.09 mlp_w1:H=0.7123,top10E=0.34,eRank=146.2,q75/q25=13.74 mlp_w2:H=0.8410,top10E=0.19,eRank=286.5,q75/q25=13.56 vo_prod:H=0.4366,top10E=0.75,eRank=27.0,q75/q25=14097.18 train_time:731136ms step_avg:87.04ms +[2025-08-22 11:22:55] [Rank 0] step:8401/10000 train_time:731146ms step_avg:87.03ms +[2025-08-22 11:22:55] [Rank 0] step:8401/10000 train_time:731146ms step_avg:87.03ms +[2025-08-22 11:22:57] [Rank 0] step:8421/10000 train_time:732690ms step_avg:87.01ms +[2025-08-22 11:22:57] [Rank 0] step:8421/10000 train_time:732690ms step_avg:87.01ms +[2025-08-22 11:22:59] [Rank 0] step:8441/10000 train_time:734543ms step_avg:87.02ms +[2025-08-22 11:22:59] [Rank 0] step:8441/10000 train_time:734543ms step_avg:87.02ms +[2025-08-22 11:23:00] [Rank 0] step:8461/10000 train_time:736385ms step_avg:87.03ms +[2025-08-22 11:23:00] [Rank 0] step:8461/10000 train_time:736385ms step_avg:87.03ms +[2025-08-22 11:23:02] [Rank 0] step:8481/10000 train_time:738241ms step_avg:87.05ms +[2025-08-22 11:23:02] [Rank 0] step:8481/10000 train_time:738241ms step_avg:87.05ms +[2025-08-22 11:23:04] [Rank 0] step:8501/10000 train_time:740108ms step_avg:87.06ms +[2025-08-22 11:23:04] [Rank 0] step:8501/10000 train_time:740108ms step_avg:87.06ms +[2025-08-22 11:23:06] [Rank 0] step:8521/10000 train_time:741970ms step_avg:87.08ms +[2025-08-22 11:23:06] [Rank 0] step:8521/10000 train_time:741970ms step_avg:87.08ms +[2025-08-22 11:23:08] [Rank 0] step:8541/10000 train_time:743830ms step_avg:87.09ms +[2025-08-22 11:23:08] [Rank 0] step:8541/10000 train_time:743830ms step_avg:87.09ms +[2025-08-22 11:23:10] [Rank 0] step:8561/10000 train_time:745694ms step_avg:87.10ms +[2025-08-22 11:23:10] [Rank 0] step:8561/10000 train_time:745694ms step_avg:87.10ms +[2025-08-22 11:23:12] [Rank 0] step:8581/10000 train_time:747552ms step_avg:87.12ms +[2025-08-22 11:23:12] [Rank 0] step:8581/10000 train_time:747552ms step_avg:87.12ms +[2025-08-22 11:23:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:23:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:23:27] [Rank 0] PRINT: step:8600/10000 val_loss:3.9272 svd_entropy: attn_qk:H=0.6723,top10E=0.36,eRank=101.2,q75/q25=99.27 attn_vo:H=0.5361,top10E=0.58,eRank=52.5,q75/q25=132.59 mlp_w1:H=0.7129,top10E=0.33,eRank=146.8,q75/q25=13.74 mlp_w2:H=0.8416,top10E=0.19,eRank=287.6,q75/q25=13.47 vo_prod:H=0.4374,top10E=0.75,eRank=27.2,q75/q25=14218.88 train_time:749721ms step_avg:87.18ms +[2025-08-22 11:23:27] [Rank 0] PRINT: step:8600/10000 val_loss:3.9272 svd_entropy: attn_qk:H=0.6723,top10E=0.36,eRank=101.2,q75/q25=99.27 attn_vo:H=0.5361,top10E=0.58,eRank=52.5,q75/q25=132.59 mlp_w1:H=0.7129,top10E=0.33,eRank=146.8,q75/q25=13.74 mlp_w2:H=0.8416,top10E=0.19,eRank=287.6,q75/q25=13.47 vo_prod:H=0.4374,top10E=0.75,eRank=27.2,q75/q25=14218.88 train_time:749721ms step_avg:87.18ms +[2025-08-22 11:23:27] [Rank 0] step:8601/10000 train_time:749730ms step_avg:87.17ms +[2025-08-22 11:23:27] [Rank 0] step:8601/10000 train_time:749730ms step_avg:87.17ms +[2025-08-22 11:23:29] [Rank 0] step:8621/10000 train_time:751272ms step_avg:87.14ms +[2025-08-22 11:23:29] [Rank 0] step:8621/10000 train_time:751272ms step_avg:87.14ms +[2025-08-22 11:23:31] [Rank 0] step:8641/10000 train_time:753115ms step_avg:87.16ms +[2025-08-22 11:23:31] [Rank 0] step:8641/10000 train_time:753115ms step_avg:87.16ms +[2025-08-22 11:23:33] [Rank 0] step:8661/10000 train_time:754961ms step_avg:87.17ms +[2025-08-22 11:23:33] [Rank 0] step:8661/10000 train_time:754961ms step_avg:87.17ms +[2025-08-22 11:23:35] [Rank 0] step:8681/10000 train_time:756803ms step_avg:87.18ms +[2025-08-22 11:23:35] [Rank 0] step:8681/10000 train_time:756803ms step_avg:87.18ms +[2025-08-22 11:23:36] [Rank 0] step:8701/10000 train_time:758652ms step_avg:87.19ms +[2025-08-22 11:23:36] [Rank 0] step:8701/10000 train_time:758652ms step_avg:87.19ms +[2025-08-22 11:23:38] [Rank 0] step:8721/10000 train_time:760501ms step_avg:87.20ms +[2025-08-22 11:23:38] [Rank 0] step:8721/10000 train_time:760501ms step_avg:87.20ms +[2025-08-22 11:23:40] [Rank 0] step:8741/10000 train_time:762347ms step_avg:87.22ms +[2025-08-22 11:23:40] [Rank 0] step:8741/10000 train_time:762347ms step_avg:87.22ms +[2025-08-22 11:23:42] [Rank 0] step:8761/10000 train_time:764193ms step_avg:87.23ms +[2025-08-22 11:23:42] [Rank 0] step:8761/10000 train_time:764193ms step_avg:87.23ms +[2025-08-22 11:23:44] [Rank 0] step:8781/10000 train_time:766051ms step_avg:87.24ms +[2025-08-22 11:23:44] [Rank 0] step:8781/10000 train_time:766051ms step_avg:87.24ms +[2025-08-22 11:23:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:23:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:23:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.9125 svd_entropy: attn_qk:H=0.6729,top10E=0.36,eRank=101.6,q75/q25=99.59 attn_vo:H=0.5369,top10E=0.58,eRank=52.8,q75/q25=132.91 mlp_w1:H=0.7135,top10E=0.33,eRank=147.4,q75/q25=13.72 mlp_w2:H=0.8422,top10E=0.19,eRank=288.6,q75/q25=13.42 vo_prod:H=0.4384,top10E=0.75,eRank=27.4,q75/q25=14351.46 train_time:768219ms step_avg:87.30ms +[2025-08-22 11:23:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.9125 svd_entropy: attn_qk:H=0.6729,top10E=0.36,eRank=101.6,q75/q25=99.59 attn_vo:H=0.5369,top10E=0.58,eRank=52.8,q75/q25=132.91 mlp_w1:H=0.7135,top10E=0.33,eRank=147.4,q75/q25=13.72 mlp_w2:H=0.8422,top10E=0.19,eRank=288.6,q75/q25=13.42 vo_prod:H=0.4384,top10E=0.75,eRank=27.4,q75/q25=14351.46 train_time:768219ms step_avg:87.30ms +[2025-08-22 11:23:59] [Rank 0] step:8801/10000 train_time:768229ms step_avg:87.29ms +[2025-08-22 11:23:59] [Rank 0] step:8801/10000 train_time:768229ms step_avg:87.29ms +[2025-08-22 11:24:01] [Rank 0] step:8821/10000 train_time:769756ms step_avg:87.26ms +[2025-08-22 11:24:01] [Rank 0] step:8821/10000 train_time:769756ms step_avg:87.26ms +[2025-08-22 11:24:03] [Rank 0] step:8841/10000 train_time:771614ms step_avg:87.28ms +[2025-08-22 11:24:03] [Rank 0] step:8841/10000 train_time:771614ms step_avg:87.28ms +[2025-08-22 11:24:05] [Rank 0] step:8861/10000 train_time:773461ms step_avg:87.29ms +[2025-08-22 11:24:05] [Rank 0] step:8861/10000 train_time:773461ms step_avg:87.29ms +[2025-08-22 11:24:07] [Rank 0] step:8881/10000 train_time:775298ms step_avg:87.30ms +[2025-08-22 11:24:07] [Rank 0] step:8881/10000 train_time:775298ms step_avg:87.30ms +[2025-08-22 11:24:08] [Rank 0] step:8901/10000 train_time:777149ms step_avg:87.31ms +[2025-08-22 11:24:08] [Rank 0] step:8901/10000 train_time:777149ms step_avg:87.31ms +[2025-08-22 11:24:10] [Rank 0] step:8921/10000 train_time:779000ms step_avg:87.32ms +[2025-08-22 11:24:10] [Rank 0] step:8921/10000 train_time:779000ms step_avg:87.32ms +[2025-08-22 11:24:12] [Rank 0] step:8941/10000 train_time:780862ms step_avg:87.34ms +[2025-08-22 11:24:12] [Rank 0] step:8941/10000 train_time:780862ms step_avg:87.34ms +[2025-08-22 11:24:14] [Rank 0] step:8961/10000 train_time:782705ms step_avg:87.35ms +[2025-08-22 11:24:14] [Rank 0] step:8961/10000 train_time:782705ms step_avg:87.35ms +[2025-08-22 11:24:16] [Rank 0] step:8981/10000 train_time:784552ms step_avg:87.36ms +[2025-08-22 11:24:16] [Rank 0] step:8981/10000 train_time:784552ms step_avg:87.36ms +[2025-08-22 11:24:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:24:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:24:31] [Rank 0] PRINT: step:9000/10000 val_loss:3.9007 svd_entropy: attn_qk:H=0.6735,top10E=0.36,eRank=102.0,q75/q25=99.59 attn_vo:H=0.5377,top10E=0.58,eRank=53.2,q75/q25=133.63 mlp_w1:H=0.7139,top10E=0.33,eRank=147.9,q75/q25=13.70 mlp_w2:H=0.8428,top10E=0.19,eRank=289.6,q75/q25=13.36 vo_prod:H=0.4392,top10E=0.75,eRank=27.5,q75/q25=14378.86 train_time:786719ms step_avg:87.41ms +[2025-08-22 11:24:31] [Rank 0] PRINT: step:9000/10000 val_loss:3.9007 svd_entropy: attn_qk:H=0.6735,top10E=0.36,eRank=102.0,q75/q25=99.59 attn_vo:H=0.5377,top10E=0.58,eRank=53.2,q75/q25=133.63 mlp_w1:H=0.7139,top10E=0.33,eRank=147.9,q75/q25=13.70 mlp_w2:H=0.8428,top10E=0.19,eRank=289.6,q75/q25=13.36 vo_prod:H=0.4392,top10E=0.75,eRank=27.5,q75/q25=14378.86 train_time:786719ms step_avg:87.41ms +[2025-08-22 11:24:31] [Rank 0] step:9001/10000 train_time:786729ms step_avg:87.40ms +[2025-08-22 11:24:31] [Rank 0] step:9001/10000 train_time:786729ms step_avg:87.40ms +[2025-08-22 11:24:33] [Rank 0] step:9021/10000 train_time:788281ms step_avg:87.38ms +[2025-08-22 11:24:33] [Rank 0] step:9021/10000 train_time:788281ms step_avg:87.38ms +[2025-08-22 11:24:35] [Rank 0] step:9041/10000 train_time:790124ms step_avg:87.39ms +[2025-08-22 11:24:35] [Rank 0] step:9041/10000 train_time:790124ms step_avg:87.39ms +[2025-08-22 11:24:37] [Rank 0] step:9061/10000 train_time:791979ms step_avg:87.41ms +[2025-08-22 11:24:37] [Rank 0] step:9061/10000 train_time:791979ms step_avg:87.41ms +[2025-08-22 11:24:39] [Rank 0] step:9081/10000 train_time:793835ms step_avg:87.42ms +[2025-08-22 11:24:39] [Rank 0] step:9081/10000 train_time:793835ms step_avg:87.42ms +[2025-08-22 11:24:41] [Rank 0] step:9101/10000 train_time:795702ms step_avg:87.43ms +[2025-08-22 11:24:41] [Rank 0] step:9101/10000 train_time:795702ms step_avg:87.43ms +[2025-08-22 11:24:42] [Rank 0] step:9121/10000 train_time:797553ms step_avg:87.44ms +[2025-08-22 11:24:42] [Rank 0] step:9121/10000 train_time:797553ms step_avg:87.44ms +[2025-08-22 11:24:44] [Rank 0] step:9141/10000 train_time:799398ms step_avg:87.45ms +[2025-08-22 11:24:44] [Rank 0] step:9141/10000 train_time:799398ms step_avg:87.45ms +[2025-08-22 11:24:46] [Rank 0] step:9161/10000 train_time:801234ms step_avg:87.46ms +[2025-08-22 11:24:46] [Rank 0] step:9161/10000 train_time:801234ms step_avg:87.46ms +[2025-08-22 11:24:48] [Rank 0] step:9181/10000 train_time:803111ms step_avg:87.48ms +[2025-08-22 11:24:48] [Rank 0] step:9181/10000 train_time:803111ms step_avg:87.48ms +[2025-08-22 11:24:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:24:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:25:04] [Rank 0] PRINT: step:9200/10000 val_loss:3.8908 svd_entropy: attn_qk:H=0.6739,top10E=0.36,eRank=102.3,q75/q25=99.90 attn_vo:H=0.5383,top10E=0.58,eRank=53.4,q75/q25=134.06 mlp_w1:H=0.7144,top10E=0.33,eRank=148.4,q75/q25=13.71 mlp_w2:H=0.8432,top10E=0.19,eRank=290.4,q75/q25=13.31 vo_prod:H=0.4399,top10E=0.74,eRank=27.7,q75/q25=14383.85 train_time:805283ms step_avg:87.53ms +[2025-08-22 11:25:04] [Rank 0] PRINT: step:9200/10000 val_loss:3.8908 svd_entropy: attn_qk:H=0.6739,top10E=0.36,eRank=102.3,q75/q25=99.90 attn_vo:H=0.5383,top10E=0.58,eRank=53.4,q75/q25=134.06 mlp_w1:H=0.7144,top10E=0.33,eRank=148.4,q75/q25=13.71 mlp_w2:H=0.8432,top10E=0.19,eRank=290.4,q75/q25=13.31 vo_prod:H=0.4399,top10E=0.74,eRank=27.7,q75/q25=14383.85 train_time:805283ms step_avg:87.53ms +[2025-08-22 11:25:04] [Rank 0] step:9201/10000 train_time:805294ms step_avg:87.52ms +[2025-08-22 11:25:04] [Rank 0] step:9201/10000 train_time:805294ms step_avg:87.52ms +[2025-08-22 11:25:06] [Rank 0] step:9221/10000 train_time:806854ms step_avg:87.50ms +[2025-08-22 11:25:06] [Rank 0] step:9221/10000 train_time:806854ms step_avg:87.50ms +[2025-08-22 11:25:08] [Rank 0] step:9241/10000 train_time:808710ms step_avg:87.51ms +[2025-08-22 11:25:08] [Rank 0] step:9241/10000 train_time:808710ms step_avg:87.51ms +[2025-08-22 11:25:10] [Rank 0] step:9261/10000 train_time:810572ms step_avg:87.53ms +[2025-08-22 11:25:10] [Rank 0] step:9261/10000 train_time:810572ms step_avg:87.53ms +[2025-08-22 11:25:12] [Rank 0] step:9281/10000 train_time:812418ms step_avg:87.54ms +[2025-08-22 11:25:12] [Rank 0] step:9281/10000 train_time:812418ms step_avg:87.54ms +[2025-08-22 11:25:13] [Rank 0] step:9301/10000 train_time:814264ms step_avg:87.55ms +[2025-08-22 11:25:13] [Rank 0] step:9301/10000 train_time:814264ms step_avg:87.55ms +[2025-08-22 11:25:15] [Rank 0] step:9321/10000 train_time:816121ms step_avg:87.56ms +[2025-08-22 11:25:15] [Rank 0] step:9321/10000 train_time:816121ms step_avg:87.56ms +[2025-08-22 11:25:17] [Rank 0] step:9341/10000 train_time:817977ms step_avg:87.57ms +[2025-08-22 11:25:17] [Rank 0] step:9341/10000 train_time:817977ms step_avg:87.57ms +[2025-08-22 11:25:19] [Rank 0] step:9361/10000 train_time:819837ms step_avg:87.58ms +[2025-08-22 11:25:19] [Rank 0] step:9361/10000 train_time:819837ms step_avg:87.58ms +[2025-08-22 11:25:21] [Rank 0] step:9381/10000 train_time:821706ms step_avg:87.59ms +[2025-08-22 11:25:21] [Rank 0] step:9381/10000 train_time:821706ms step_avg:87.59ms +[2025-08-22 11:25:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:25:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:25:36] [Rank 0] PRINT: step:9400/10000 val_loss:3.8814 svd_entropy: attn_qk:H=0.6742,top10E=0.36,eRank=102.5,q75/q25=99.72 attn_vo:H=0.5389,top10E=0.58,eRank=53.6,q75/q25=134.18 mlp_w1:H=0.7147,top10E=0.33,eRank=148.7,q75/q25=13.70 mlp_w2:H=0.8436,top10E=0.19,eRank=291.1,q75/q25=13.24 vo_prod:H=0.4404,top10E=0.74,eRank=27.8,q75/q25=14553.74 train_time:823893ms step_avg:87.65ms +[2025-08-22 11:25:36] [Rank 0] PRINT: step:9400/10000 val_loss:3.8814 svd_entropy: attn_qk:H=0.6742,top10E=0.36,eRank=102.5,q75/q25=99.72 attn_vo:H=0.5389,top10E=0.58,eRank=53.6,q75/q25=134.18 mlp_w1:H=0.7147,top10E=0.33,eRank=148.7,q75/q25=13.70 mlp_w2:H=0.8436,top10E=0.19,eRank=291.1,q75/q25=13.24 vo_prod:H=0.4404,top10E=0.74,eRank=27.8,q75/q25=14553.74 train_time:823893ms step_avg:87.65ms +[2025-08-22 11:25:36] [Rank 0] step:9401/10000 train_time:823903ms step_avg:87.64ms +[2025-08-22 11:25:36] [Rank 0] step:9401/10000 train_time:823903ms step_avg:87.64ms +[2025-08-22 11:25:38] [Rank 0] step:9421/10000 train_time:825448ms step_avg:87.62ms +[2025-08-22 11:25:38] [Rank 0] step:9421/10000 train_time:825448ms step_avg:87.62ms +[2025-08-22 11:25:40] [Rank 0] step:9441/10000 train_time:827294ms step_avg:87.63ms +[2025-08-22 11:25:40] [Rank 0] step:9441/10000 train_time:827294ms step_avg:87.63ms +[2025-08-22 11:25:42] [Rank 0] step:9461/10000 train_time:829155ms step_avg:87.64ms +[2025-08-22 11:25:42] [Rank 0] step:9461/10000 train_time:829155ms step_avg:87.64ms +[2025-08-22 11:25:44] [Rank 0] step:9481/10000 train_time:831013ms step_avg:87.65ms +[2025-08-22 11:25:44] [Rank 0] step:9481/10000 train_time:831013ms step_avg:87.65ms +[2025-08-22 11:25:46] [Rank 0] step:9501/10000 train_time:832874ms step_avg:87.66ms +[2025-08-22 11:25:46] [Rank 0] step:9501/10000 train_time:832874ms step_avg:87.66ms +[2025-08-22 11:25:48] [Rank 0] step:9521/10000 train_time:834719ms step_avg:87.67ms +[2025-08-22 11:25:48] [Rank 0] step:9521/10000 train_time:834719ms step_avg:87.67ms +[2025-08-22 11:25:49] [Rank 0] step:9541/10000 train_time:836568ms step_avg:87.68ms +[2025-08-22 11:25:49] [Rank 0] step:9541/10000 train_time:836568ms step_avg:87.68ms +[2025-08-22 11:25:51] [Rank 0] step:9561/10000 train_time:838418ms step_avg:87.69ms +[2025-08-22 11:25:51] [Rank 0] step:9561/10000 train_time:838418ms step_avg:87.69ms +[2025-08-22 11:25:53] [Rank 0] step:9581/10000 train_time:840270ms step_avg:87.70ms +[2025-08-22 11:25:53] [Rank 0] step:9581/10000 train_time:840270ms step_avg:87.70ms +[2025-08-22 11:25:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:25:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:26:08] [Rank 0] PRINT: step:9600/10000 val_loss:3.8718 svd_entropy: attn_qk:H=0.6746,top10E=0.36,eRank=102.7,q75/q25=99.95 attn_vo:H=0.5394,top10E=0.58,eRank=53.8,q75/q25=134.44 mlp_w1:H=0.7150,top10E=0.33,eRank=149.0,q75/q25=13.69 mlp_w2:H=0.8440,top10E=0.19,eRank=291.7,q75/q25=13.21 vo_prod:H=0.4409,top10E=0.74,eRank=27.9,q75/q25=14613.03 train_time:842456ms step_avg:87.76ms +[2025-08-22 11:26:08] [Rank 0] PRINT: step:9600/10000 val_loss:3.8718 svd_entropy: attn_qk:H=0.6746,top10E=0.36,eRank=102.7,q75/q25=99.95 attn_vo:H=0.5394,top10E=0.58,eRank=53.8,q75/q25=134.44 mlp_w1:H=0.7150,top10E=0.33,eRank=149.0,q75/q25=13.69 mlp_w2:H=0.8440,top10E=0.19,eRank=291.7,q75/q25=13.21 vo_prod:H=0.4409,top10E=0.74,eRank=27.9,q75/q25=14613.03 train_time:842456ms step_avg:87.76ms +[2025-08-22 11:26:09] [Rank 0] step:9601/10000 train_time:842466ms step_avg:87.75ms +[2025-08-22 11:26:09] [Rank 0] step:9601/10000 train_time:842466ms step_avg:87.75ms +[2025-08-22 11:26:10] [Rank 0] step:9621/10000 train_time:844008ms step_avg:87.73ms +[2025-08-22 11:26:10] [Rank 0] step:9621/10000 train_time:844008ms step_avg:87.73ms +[2025-08-22 11:26:12] [Rank 0] step:9641/10000 train_time:845856ms step_avg:87.74ms +[2025-08-22 11:26:12] [Rank 0] step:9641/10000 train_time:845856ms step_avg:87.74ms +[2025-08-22 11:26:14] [Rank 0] step:9661/10000 train_time:847731ms step_avg:87.75ms +[2025-08-22 11:26:14] [Rank 0] step:9661/10000 train_time:847731ms step_avg:87.75ms +[2025-08-22 11:26:16] [Rank 0] step:9681/10000 train_time:849606ms step_avg:87.76ms +[2025-08-22 11:26:16] [Rank 0] step:9681/10000 train_time:849606ms step_avg:87.76ms +[2025-08-22 11:26:18] [Rank 0] step:9701/10000 train_time:851489ms step_avg:87.77ms +[2025-08-22 11:26:18] [Rank 0] step:9701/10000 train_time:851489ms step_avg:87.77ms +[2025-08-22 11:26:20] [Rank 0] step:9721/10000 train_time:853360ms step_avg:87.79ms +[2025-08-22 11:26:20] [Rank 0] step:9721/10000 train_time:853360ms step_avg:87.79ms +[2025-08-22 11:26:22] [Rank 0] step:9741/10000 train_time:855256ms step_avg:87.80ms +[2025-08-22 11:26:22] [Rank 0] step:9741/10000 train_time:855256ms step_avg:87.80ms +[2025-08-22 11:26:24] [Rank 0] step:9761/10000 train_time:857138ms step_avg:87.81ms +[2025-08-22 11:26:24] [Rank 0] step:9761/10000 train_time:857138ms step_avg:87.81ms +[2025-08-22 11:26:25] [Rank 0] step:9781/10000 train_time:859032ms step_avg:87.83ms +[2025-08-22 11:26:25] [Rank 0] step:9781/10000 train_time:859032ms step_avg:87.83ms +[2025-08-22 11:26:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:26:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:26:41] [Rank 0] PRINT: step:9800/10000 val_loss:3.8643 svd_entropy: attn_qk:H=0.6748,top10E=0.36,eRank=102.9,q75/q25=99.92 attn_vo:H=0.5397,top10E=0.58,eRank=54.0,q75/q25=134.67 mlp_w1:H=0.7152,top10E=0.33,eRank=149.2,q75/q25=13.67 mlp_w2:H=0.8443,top10E=0.19,eRank=292.2,q75/q25=13.17 vo_prod:H=0.4413,top10E=0.74,eRank=28.0,q75/q25=14823.90 train_time:861248ms step_avg:87.88ms +[2025-08-22 11:26:41] [Rank 0] PRINT: step:9800/10000 val_loss:3.8643 svd_entropy: attn_qk:H=0.6748,top10E=0.36,eRank=102.9,q75/q25=99.92 attn_vo:H=0.5397,top10E=0.58,eRank=54.0,q75/q25=134.67 mlp_w1:H=0.7152,top10E=0.33,eRank=149.2,q75/q25=13.67 mlp_w2:H=0.8443,top10E=0.19,eRank=292.2,q75/q25=13.17 vo_prod:H=0.4413,top10E=0.74,eRank=28.0,q75/q25=14823.90 train_time:861248ms step_avg:87.88ms +[2025-08-22 11:26:41] [Rank 0] step:9801/10000 train_time:861258ms step_avg:87.87ms +[2025-08-22 11:26:41] [Rank 0] step:9801/10000 train_time:861258ms step_avg:87.87ms +[2025-08-22 11:26:43] [Rank 0] step:9821/10000 train_time:862819ms step_avg:87.85ms +[2025-08-22 11:26:43] [Rank 0] step:9821/10000 train_time:862819ms step_avg:87.85ms +[2025-08-22 11:26:45] [Rank 0] step:9841/10000 train_time:864708ms step_avg:87.87ms +[2025-08-22 11:26:45] [Rank 0] step:9841/10000 train_time:864708ms step_avg:87.87ms +[2025-08-22 11:26:47] [Rank 0] step:9861/10000 train_time:866577ms step_avg:87.88ms +[2025-08-22 11:26:47] [Rank 0] step:9861/10000 train_time:866577ms step_avg:87.88ms +[2025-08-22 11:26:49] [Rank 0] step:9881/10000 train_time:868449ms step_avg:87.89ms +[2025-08-22 11:26:49] [Rank 0] step:9881/10000 train_time:868449ms step_avg:87.89ms +[2025-08-22 11:26:50] [Rank 0] step:9901/10000 train_time:870334ms step_avg:87.90ms +[2025-08-22 11:26:50] [Rank 0] step:9901/10000 train_time:870334ms step_avg:87.90ms +[2025-08-22 11:26:52] [Rank 0] step:9921/10000 train_time:872210ms step_avg:87.92ms +[2025-08-22 11:26:52] [Rank 0] step:9921/10000 train_time:872210ms step_avg:87.92ms +[2025-08-22 11:26:54] [Rank 0] step:9941/10000 train_time:874094ms step_avg:87.93ms +[2025-08-22 11:26:54] [Rank 0] step:9941/10000 train_time:874094ms step_avg:87.93ms +[2025-08-22 11:26:56] [Rank 0] step:9961/10000 train_time:875966ms step_avg:87.94ms +[2025-08-22 11:26:56] [Rank 0] step:9961/10000 train_time:875966ms step_avg:87.94ms +[2025-08-22 11:26:58] [Rank 0] step:9981/10000 train_time:877842ms step_avg:87.95ms +[2025-08-22 11:26:58] [Rank 0] step:9981/10000 train_time:877842ms step_avg:87.95ms +[2025-08-22 11:27:00] [Rank 0] step:10000/10000 train_time:879628ms step_avg:87.96ms +[2025-08-22 11:27:00] [Rank 0] step:10000/10000 train_time:879628ms step_avg:87.96ms +[2025-08-22 11:27:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:27:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:27:13] [Rank 0] PRINT: step:10000/10000 val_loss:3.8553 svd_entropy: attn_qk:H=0.6750,top10E=0.36,eRank=103.0,q75/q25=99.86 attn_vo:H=0.5400,top10E=0.58,eRank=54.1,q75/q25=134.69 mlp_w1:H=0.7154,top10E=0.33,eRank=149.4,q75/q25=13.67 mlp_w2:H=0.8445,top10E=0.19,eRank=292.5,q75/q25=13.13 vo_prod:H=0.4416,top10E=0.74,eRank=28.0,q75/q25=14644.64 train_time:880061ms step_avg:88.01ms +[2025-08-22 11:27:13] [Rank 0] PRINT: step:10000/10000 val_loss:3.8553 svd_entropy: attn_qk:H=0.6750,top10E=0.36,eRank=103.0,q75/q25=99.86 attn_vo:H=0.5400,top10E=0.58,eRank=54.1,q75/q25=134.69 mlp_w1:H=0.7154,top10E=0.33,eRank=149.4,q75/q25=13.67 mlp_w2:H=0.8445,top10E=0.19,eRank=292.5,q75/q25=13.13 vo_prod:H=0.4416,top10E=0.74,eRank=28.0,q75/q25=14644.64 train_time:880061ms step_avg:88.01ms +[2025-08-22 11:27:13] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 11:27:13 2025 --- +[2025-08-22 11:27:13] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 11:27:13 2025 --- +[2025-08-22 11:27:13] [Rank 0] PRINT: Peak memory allocated: 11559 MiB reserved: 11616 MiB +[2025-08-22 11:27:13] [Rank 0] PRINT: Peak memory allocated: 11559 MiB reserved: 11616 MiB diff --git a/logs_svd_gated/mode_5_param_gated_seed_42/config.json b/logs_svd_gated/mode_5_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..ec29c1ed629d19a818cb2e86b13d7ef8e99a62d8 --- /dev/null +++ b/logs_svd_gated/mode_5_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 5, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "9dda4f12-cceb-42db-983a-fb8b619a18f3", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_5_param_gated_seed_42/training_log_9dda4f12-cceb-42db-983a-fb8b619a18f3.txt b/logs_svd_gated/mode_5_param_gated_seed_42/training_log_9dda4f12-cceb-42db-983a-fb8b619a18f3.txt new file mode 100644 index 0000000000000000000000000000000000000000..2971ed3dd50f88b45fddba260e77c0e198324a38 --- /dev/null +++ b/logs_svd_gated/mode_5_param_gated_seed_42/training_log_9dda4f12-cceb-42db-983a-fb8b619a18f3.txt @@ -0,0 +1,2926 @@ +[2025-08-22 16:14:09] [Rank 0] PRINT: --- Script Start: Fri Aug 22 16:14:09 2025 --- +[2025-08-22 16:14:09] [Rank 0] PRINT: --- Script Start: Fri Aug 22 16:14:09 2025 --- +[2025-08-22 16:14:09] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=5, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 16:14:09] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=5, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 16:14:09] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 16:14:09] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 16:14:09] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 16:14:09] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 16:14:09] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_5_param_gated_seed_42 +[2025-08-22 16:14:09] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_5_param_gated_seed_42 +[2025-08-22 16:14:09] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 16:14:09] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 16:14:09] [Rank 0] PRINT: Constructing model... +[2025-08-22 16:14:09] [Rank 0] PRINT: Constructing model... +[2025-08-22 16:14:11] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 16:14:11] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 16:14:11] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 16:14:11] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 16:14:11] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 16:14:11] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 16:14:11] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-08-22 16:14:11] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-08-22 16:14:11] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.05). +[2025-08-22 16:14:11] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.05). +[2025-08-22 16:14:11] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-08-22 16:14:11] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-08-22 16:14:11] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-08-22 16:14:11] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-08-22 16:14:11] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 16:14:11] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 16:14:11] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 16:14:11] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 16:14:11] [Rank 0] PRINT: Starting warmup... +[2025-08-22 16:14:11] [Rank 0] PRINT: Starting warmup... +[2025-08-22 16:14:54] [Rank 0] PRINT: Warmup complete. +[2025-08-22 16:14:54] [Rank 0] PRINT: Warmup complete. +[2025-08-22 16:14:54] [Rank 0] PRINT: Starting training... +[2025-08-22 16:14:54] [Rank 0] PRINT: Starting training... +[2025-08-22 16:14:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:14:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:15:11] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 16:15:11] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 16:15:13] [Rank 0] step:21/10000 train_time:1387ms step_avg:66.05ms +[2025-08-22 16:15:13] [Rank 0] step:21/10000 train_time:1387ms step_avg:66.05ms +[2025-08-22 16:15:15] [Rank 0] step:41/10000 train_time:3006ms step_avg:73.32ms +[2025-08-22 16:15:15] [Rank 0] step:41/10000 train_time:3006ms step_avg:73.32ms +[2025-08-22 16:15:16] [Rank 0] step:61/10000 train_time:4626ms step_avg:75.83ms +[2025-08-22 16:15:16] [Rank 0] step:61/10000 train_time:4626ms step_avg:75.83ms +[2025-08-22 16:15:18] [Rank 0] step:81/10000 train_time:6248ms step_avg:77.13ms +[2025-08-22 16:15:18] [Rank 0] step:81/10000 train_time:6248ms step_avg:77.13ms +[2025-08-22 16:15:20] [Rank 0] step:101/10000 train_time:7873ms step_avg:77.95ms +[2025-08-22 16:15:20] [Rank 0] step:101/10000 train_time:7873ms step_avg:77.95ms +[2025-08-22 16:15:21] [Rank 0] step:121/10000 train_time:9500ms step_avg:78.52ms +[2025-08-22 16:15:21] [Rank 0] step:121/10000 train_time:9500ms step_avg:78.52ms +[2025-08-22 16:15:23] [Rank 0] step:141/10000 train_time:11129ms step_avg:78.93ms +[2025-08-22 16:15:23] [Rank 0] step:141/10000 train_time:11129ms step_avg:78.93ms +[2025-08-22 16:15:25] [Rank 0] step:161/10000 train_time:12759ms step_avg:79.25ms +[2025-08-22 16:15:25] [Rank 0] step:161/10000 train_time:12759ms step_avg:79.25ms +[2025-08-22 16:15:26] [Rank 0] step:181/10000 train_time:14391ms step_avg:79.51ms +[2025-08-22 16:15:26] [Rank 0] step:181/10000 train_time:14391ms step_avg:79.51ms +[2025-08-22 16:15:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:15:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:15:41] [Rank 0] PRINT: step:200/10000 val_loss:6.6681 svd_entropy: attn_qk:H=0.3037,top10E=0.89,eRank=12.3,q75/q25=15.01 attn_vo:H=0.1045,top10E=0.98,eRank=2.4,q75/q25=2512.55 mlp_w1:H=0.3179,top10E=0.91,eRank=8.5,q75/q25=4.58 mlp_w2:H=0.3967,top10E=0.82,eRank=14.1,q75/q25=5.95 vo_prod:H=0.0223,top10E=1.00,eRank=1.3,q75/q25=14714.06 train_time:16303ms step_avg:81.52ms +[2025-08-22 16:15:41] [Rank 0] PRINT: step:200/10000 val_loss:6.6681 svd_entropy: attn_qk:H=0.3037,top10E=0.89,eRank=12.3,q75/q25=15.01 attn_vo:H=0.1045,top10E=0.98,eRank=2.4,q75/q25=2512.55 mlp_w1:H=0.3179,top10E=0.91,eRank=8.5,q75/q25=4.58 mlp_w2:H=0.3967,top10E=0.82,eRank=14.1,q75/q25=5.95 vo_prod:H=0.0223,top10E=1.00,eRank=1.3,q75/q25=14714.06 train_time:16303ms step_avg:81.52ms +[2025-08-22 16:15:42] [Rank 0] step:201/10000 train_time:16313ms step_avg:81.16ms +[2025-08-22 16:15:42] [Rank 0] step:201/10000 train_time:16313ms step_avg:81.16ms +[2025-08-22 16:15:43] [Rank 0] step:221/10000 train_time:17662ms step_avg:79.92ms +[2025-08-22 16:15:43] [Rank 0] step:221/10000 train_time:17662ms step_avg:79.92ms +[2025-08-22 16:15:45] [Rank 0] step:241/10000 train_time:19286ms step_avg:80.03ms +[2025-08-22 16:15:45] [Rank 0] step:241/10000 train_time:19286ms step_avg:80.03ms +[2025-08-22 16:15:46] [Rank 0] step:261/10000 train_time:20914ms step_avg:80.13ms +[2025-08-22 16:15:46] [Rank 0] step:261/10000 train_time:20914ms step_avg:80.13ms +[2025-08-22 16:15:48] [Rank 0] step:281/10000 train_time:22542ms step_avg:80.22ms +[2025-08-22 16:15:48] [Rank 0] step:281/10000 train_time:22542ms step_avg:80.22ms +[2025-08-22 16:15:50] [Rank 0] step:301/10000 train_time:24171ms step_avg:80.30ms +[2025-08-22 16:15:50] [Rank 0] step:301/10000 train_time:24171ms step_avg:80.30ms +[2025-08-22 16:15:51] [Rank 0] step:321/10000 train_time:25799ms step_avg:80.37ms +[2025-08-22 16:15:51] [Rank 0] step:321/10000 train_time:25799ms step_avg:80.37ms +[2025-08-22 16:15:53] [Rank 0] step:341/10000 train_time:27426ms step_avg:80.43ms +[2025-08-22 16:15:53] [Rank 0] step:341/10000 train_time:27426ms step_avg:80.43ms +[2025-08-22 16:15:55] [Rank 0] step:361/10000 train_time:29052ms step_avg:80.48ms +[2025-08-22 16:15:55] [Rank 0] step:361/10000 train_time:29052ms step_avg:80.48ms +[2025-08-22 16:15:56] [Rank 0] step:381/10000 train_time:30680ms step_avg:80.52ms +[2025-08-22 16:15:56] [Rank 0] step:381/10000 train_time:30680ms step_avg:80.52ms +[2025-08-22 16:15:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:15:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:16:11] [Rank 0] PRINT: step:400/10000 val_loss:6.0818 svd_entropy: attn_qk:H=0.5221,top10E=0.65,eRank=35.3,q75/q25=67.48 attn_vo:H=0.2507,top10E=0.97,eRank=6.1,q75/q25=121.60 mlp_w1:H=0.5276,top10E=0.64,eRank=34.5,q75/q25=7.53 mlp_w2:H=0.6167,top10E=0.51,eRank=61.7,q75/q25=6.65 vo_prod:H=0.1213,top10E=1.00,eRank=2.6,q75/q25=1011.63 train_time:32588ms step_avg:81.47ms +[2025-08-22 16:16:11] [Rank 0] PRINT: step:400/10000 val_loss:6.0818 svd_entropy: attn_qk:H=0.5221,top10E=0.65,eRank=35.3,q75/q25=67.48 attn_vo:H=0.2507,top10E=0.97,eRank=6.1,q75/q25=121.60 mlp_w1:H=0.5276,top10E=0.64,eRank=34.5,q75/q25=7.53 mlp_w2:H=0.6167,top10E=0.51,eRank=61.7,q75/q25=6.65 vo_prod:H=0.1213,top10E=1.00,eRank=2.6,q75/q25=1011.63 train_time:32588ms step_avg:81.47ms +[2025-08-22 16:16:11] [Rank 0] step:401/10000 train_time:32597ms step_avg:81.29ms +[2025-08-22 16:16:11] [Rank 0] step:401/10000 train_time:32597ms step_avg:81.29ms +[2025-08-22 16:16:13] [Rank 0] step:421/10000 train_time:33955ms step_avg:80.65ms +[2025-08-22 16:16:13] [Rank 0] step:421/10000 train_time:33955ms step_avg:80.65ms +[2025-08-22 16:16:14] [Rank 0] step:441/10000 train_time:35573ms step_avg:80.67ms +[2025-08-22 16:16:14] [Rank 0] step:441/10000 train_time:35573ms step_avg:80.67ms +[2025-08-22 16:16:16] [Rank 0] step:461/10000 train_time:37194ms step_avg:80.68ms +[2025-08-22 16:16:16] [Rank 0] step:461/10000 train_time:37194ms step_avg:80.68ms +[2025-08-22 16:16:18] [Rank 0] step:481/10000 train_time:38816ms step_avg:80.70ms +[2025-08-22 16:16:18] [Rank 0] step:481/10000 train_time:38816ms step_avg:80.70ms +[2025-08-22 16:16:19] [Rank 0] step:501/10000 train_time:40439ms step_avg:80.72ms +[2025-08-22 16:16:19] [Rank 0] step:501/10000 train_time:40439ms step_avg:80.72ms +[2025-08-22 16:16:21] [Rank 0] step:521/10000 train_time:42065ms step_avg:80.74ms +[2025-08-22 16:16:21] [Rank 0] step:521/10000 train_time:42065ms step_avg:80.74ms +[2025-08-22 16:16:23] [Rank 0] step:541/10000 train_time:43690ms step_avg:80.76ms +[2025-08-22 16:16:23] [Rank 0] step:541/10000 train_time:43690ms step_avg:80.76ms +[2025-08-22 16:16:24] [Rank 0] step:561/10000 train_time:45315ms step_avg:80.78ms +[2025-08-22 16:16:24] [Rank 0] step:561/10000 train_time:45315ms step_avg:80.78ms +[2025-08-22 16:16:26] [Rank 0] step:581/10000 train_time:46943ms step_avg:80.80ms +[2025-08-22 16:16:26] [Rank 0] step:581/10000 train_time:46943ms step_avg:80.80ms +[2025-08-22 16:16:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:16:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:16:41] [Rank 0] PRINT: step:600/10000 val_loss:5.6985 svd_entropy: attn_qk:H=0.5719,top10E=0.54,eRank=47.5,q75/q25=70.03 attn_vo:H=0.3140,top10E=0.94,eRank=8.8,q75/q25=99.63 mlp_w1:H=0.6095,top10E=0.51,eRank=62.2,q75/q25=6.24 mlp_w2:H=0.7045,top10E=0.35,eRank=112.3,q75/q25=7.51 vo_prod:H=0.1889,top10E=0.99,eRank=3.9,q75/q25=965.45 train_time:48847ms step_avg:81.41ms +[2025-08-22 16:16:41] [Rank 0] PRINT: step:600/10000 val_loss:5.6985 svd_entropy: attn_qk:H=0.5719,top10E=0.54,eRank=47.5,q75/q25=70.03 attn_vo:H=0.3140,top10E=0.94,eRank=8.8,q75/q25=99.63 mlp_w1:H=0.6095,top10E=0.51,eRank=62.2,q75/q25=6.24 mlp_w2:H=0.7045,top10E=0.35,eRank=112.3,q75/q25=7.51 vo_prod:H=0.1889,top10E=0.99,eRank=3.9,q75/q25=965.45 train_time:48847ms step_avg:81.41ms +[2025-08-22 16:16:41] [Rank 0] step:601/10000 train_time:48857ms step_avg:81.29ms +[2025-08-22 16:16:41] [Rank 0] step:601/10000 train_time:48857ms step_avg:81.29ms +[2025-08-22 16:16:43] [Rank 0] step:621/10000 train_time:50215ms step_avg:80.86ms +[2025-08-22 16:16:43] [Rank 0] step:621/10000 train_time:50215ms step_avg:80.86ms +[2025-08-22 16:16:44] [Rank 0] step:641/10000 train_time:51839ms step_avg:80.87ms +[2025-08-22 16:16:44] [Rank 0] step:641/10000 train_time:51839ms step_avg:80.87ms +[2025-08-22 16:16:46] [Rank 0] step:661/10000 train_time:53464ms step_avg:80.88ms +[2025-08-22 16:16:46] [Rank 0] step:661/10000 train_time:53464ms step_avg:80.88ms +[2025-08-22 16:16:48] [Rank 0] step:681/10000 train_time:55089ms step_avg:80.89ms +[2025-08-22 16:16:48] [Rank 0] step:681/10000 train_time:55089ms step_avg:80.89ms +[2025-08-22 16:16:49] [Rank 0] step:701/10000 train_time:56712ms step_avg:80.90ms +[2025-08-22 16:16:49] [Rank 0] step:701/10000 train_time:56712ms step_avg:80.90ms +[2025-08-22 16:16:51] [Rank 0] step:721/10000 train_time:58338ms step_avg:80.91ms +[2025-08-22 16:16:51] [Rank 0] step:721/10000 train_time:58338ms step_avg:80.91ms +[2025-08-22 16:16:52] [Rank 0] step:741/10000 train_time:59964ms step_avg:80.92ms +[2025-08-22 16:16:52] [Rank 0] step:741/10000 train_time:59964ms step_avg:80.92ms +[2025-08-22 16:16:54] [Rank 0] step:761/10000 train_time:61601ms step_avg:80.95ms +[2025-08-22 16:16:54] [Rank 0] step:761/10000 train_time:61601ms step_avg:80.95ms +[2025-08-22 16:16:56] [Rank 0] step:781/10000 train_time:63242ms step_avg:80.98ms +[2025-08-22 16:16:56] [Rank 0] step:781/10000 train_time:63242ms step_avg:80.98ms +[2025-08-22 16:16:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:16:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:17:11] [Rank 0] PRINT: step:800/10000 val_loss:5.4525 svd_entropy: attn_qk:H=0.5984,top10E=0.49,eRank=56.1,q75/q25=65.84 attn_vo:H=0.3521,top10E=0.91,eRank=11.3,q75/q25=92.20 mlp_w1:H=0.6406,top10E=0.45,eRank=78.0,q75/q25=6.44 mlp_w2:H=0.7449,top10E=0.28,eRank=147.1,q75/q25=9.02 vo_prod:H=0.2300,top10E=0.99,eRank=5.1,q75/q25=1106.94 train_time:65165ms step_avg:81.46ms +[2025-08-22 16:17:11] [Rank 0] PRINT: step:800/10000 val_loss:5.4525 svd_entropy: attn_qk:H=0.5984,top10E=0.49,eRank=56.1,q75/q25=65.84 attn_vo:H=0.3521,top10E=0.91,eRank=11.3,q75/q25=92.20 mlp_w1:H=0.6406,top10E=0.45,eRank=78.0,q75/q25=6.44 mlp_w2:H=0.7449,top10E=0.28,eRank=147.1,q75/q25=9.02 vo_prod:H=0.2300,top10E=0.99,eRank=5.1,q75/q25=1106.94 train_time:65165ms step_avg:81.46ms +[2025-08-22 16:17:11] [Rank 0] step:801/10000 train_time:65174ms step_avg:81.37ms +[2025-08-22 16:17:11] [Rank 0] step:801/10000 train_time:65174ms step_avg:81.37ms +[2025-08-22 16:17:12] [Rank 0] step:821/10000 train_time:66559ms step_avg:81.07ms +[2025-08-22 16:17:12] [Rank 0] step:821/10000 train_time:66559ms step_avg:81.07ms +[2025-08-22 16:17:14] [Rank 0] step:841/10000 train_time:68196ms step_avg:81.09ms +[2025-08-22 16:17:14] [Rank 0] step:841/10000 train_time:68196ms step_avg:81.09ms +[2025-08-22 16:17:16] [Rank 0] step:861/10000 train_time:69836ms step_avg:81.11ms +[2025-08-22 16:17:16] [Rank 0] step:861/10000 train_time:69836ms step_avg:81.11ms +[2025-08-22 16:17:17] [Rank 0] step:881/10000 train_time:71476ms step_avg:81.13ms +[2025-08-22 16:17:17] [Rank 0] step:881/10000 train_time:71476ms step_avg:81.13ms +[2025-08-22 16:17:19] [Rank 0] step:901/10000 train_time:73118ms step_avg:81.15ms +[2025-08-22 16:17:19] [Rank 0] step:901/10000 train_time:73118ms step_avg:81.15ms +[2025-08-22 16:17:21] [Rank 0] step:921/10000 train_time:74760ms step_avg:81.17ms +[2025-08-22 16:17:21] [Rank 0] step:921/10000 train_time:74760ms step_avg:81.17ms +[2025-08-22 16:17:22] [Rank 0] step:941/10000 train_time:76401ms step_avg:81.19ms +[2025-08-22 16:17:22] [Rank 0] step:941/10000 train_time:76401ms step_avg:81.19ms +[2025-08-22 16:17:24] [Rank 0] step:961/10000 train_time:78043ms step_avg:81.21ms +[2025-08-22 16:17:24] [Rank 0] step:961/10000 train_time:78043ms step_avg:81.21ms +[2025-08-22 16:17:26] [Rank 0] step:981/10000 train_time:79687ms step_avg:81.23ms +[2025-08-22 16:17:26] [Rank 0] step:981/10000 train_time:79687ms step_avg:81.23ms +[2025-08-22 16:17:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:17:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:17:41] [Rank 0] PRINT: step:1000/10000 val_loss:5.2871 svd_entropy: attn_qk:H=0.6162,top10E=0.46,eRank=62.8,q75/q25=65.33 attn_vo:H=0.3807,top10E=0.87,eRank=13.6,q75/q25=88.44 mlp_w1:H=0.6599,top10E=0.42,eRank=89.7,q75/q25=6.93 mlp_w2:H=0.7718,top10E=0.25,eRank=175.8,q75/q25=10.57 vo_prod:H=0.2640,top10E=0.97,eRank=6.3,q75/q25=1318.27 train_time:81616ms step_avg:81.62ms +[2025-08-22 16:17:41] [Rank 0] PRINT: step:1000/10000 val_loss:5.2871 svd_entropy: attn_qk:H=0.6162,top10E=0.46,eRank=62.8,q75/q25=65.33 attn_vo:H=0.3807,top10E=0.87,eRank=13.6,q75/q25=88.44 mlp_w1:H=0.6599,top10E=0.42,eRank=89.7,q75/q25=6.93 mlp_w2:H=0.7718,top10E=0.25,eRank=175.8,q75/q25=10.57 vo_prod:H=0.2640,top10E=0.97,eRank=6.3,q75/q25=1318.27 train_time:81616ms step_avg:81.62ms +[2025-08-22 16:17:41] [Rank 0] step:1001/10000 train_time:81625ms step_avg:81.54ms +[2025-08-22 16:17:41] [Rank 0] step:1001/10000 train_time:81625ms step_avg:81.54ms +[2025-08-22 16:17:42] [Rank 0] step:1021/10000 train_time:83001ms step_avg:81.29ms +[2025-08-22 16:17:42] [Rank 0] step:1021/10000 train_time:83001ms step_avg:81.29ms +[2025-08-22 16:17:44] [Rank 0] step:1041/10000 train_time:84636ms step_avg:81.30ms +[2025-08-22 16:17:44] [Rank 0] step:1041/10000 train_time:84636ms step_avg:81.30ms +[2025-08-22 16:17:46] [Rank 0] step:1061/10000 train_time:86275ms step_avg:81.31ms +[2025-08-22 16:17:46] [Rank 0] step:1061/10000 train_time:86275ms step_avg:81.31ms +[2025-08-22 16:17:47] [Rank 0] step:1081/10000 train_time:87915ms step_avg:81.33ms +[2025-08-22 16:17:47] [Rank 0] step:1081/10000 train_time:87915ms step_avg:81.33ms +[2025-08-22 16:17:49] [Rank 0] step:1101/10000 train_time:89554ms step_avg:81.34ms +[2025-08-22 16:17:49] [Rank 0] step:1101/10000 train_time:89554ms step_avg:81.34ms +[2025-08-22 16:17:51] [Rank 0] step:1121/10000 train_time:91193ms step_avg:81.35ms +[2025-08-22 16:17:51] [Rank 0] step:1121/10000 train_time:91193ms step_avg:81.35ms +[2025-08-22 16:17:52] [Rank 0] step:1141/10000 train_time:92833ms step_avg:81.36ms +[2025-08-22 16:17:52] [Rank 0] step:1141/10000 train_time:92833ms step_avg:81.36ms +[2025-08-22 16:17:54] [Rank 0] step:1161/10000 train_time:94471ms step_avg:81.37ms +[2025-08-22 16:17:54] [Rank 0] step:1161/10000 train_time:94471ms step_avg:81.37ms +[2025-08-22 16:17:55] [Rank 0] step:1181/10000 train_time:96110ms step_avg:81.38ms +[2025-08-22 16:17:55] [Rank 0] step:1181/10000 train_time:96110ms step_avg:81.38ms +[2025-08-22 16:17:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:17:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:18:11] [Rank 0] PRINT: step:1200/10000 val_loss:5.1380 svd_entropy: attn_qk:H=0.6299,top10E=0.44,eRank=68.6,q75/q25=68.52 attn_vo:H=0.4044,top10E=0.83,eRank=15.9,q75/q25=86.73 mlp_w1:H=0.6745,top10E=0.40,eRank=99.7,q75/q25=7.58 mlp_w2:H=0.7920,top10E=0.22,eRank=201.3,q75/q25=11.79 vo_prod:H=0.2906,top10E=0.96,eRank=7.6,q75/q25=1573.66 train_time:98032ms step_avg:81.69ms +[2025-08-22 16:18:11] [Rank 0] PRINT: step:1200/10000 val_loss:5.1380 svd_entropy: attn_qk:H=0.6299,top10E=0.44,eRank=68.6,q75/q25=68.52 attn_vo:H=0.4044,top10E=0.83,eRank=15.9,q75/q25=86.73 mlp_w1:H=0.6745,top10E=0.40,eRank=99.7,q75/q25=7.58 mlp_w2:H=0.7920,top10E=0.22,eRank=201.3,q75/q25=11.79 vo_prod:H=0.2906,top10E=0.96,eRank=7.6,q75/q25=1573.66 train_time:98032ms step_avg:81.69ms +[2025-08-22 16:18:11] [Rank 0] step:1201/10000 train_time:98043ms step_avg:81.63ms +[2025-08-22 16:18:11] [Rank 0] step:1201/10000 train_time:98043ms step_avg:81.63ms +[2025-08-22 16:18:12] [Rank 0] step:1221/10000 train_time:99400ms step_avg:81.41ms +[2025-08-22 16:18:12] [Rank 0] step:1221/10000 train_time:99400ms step_avg:81.41ms +[2025-08-22 16:18:14] [Rank 0] step:1241/10000 train_time:101036ms step_avg:81.42ms +[2025-08-22 16:18:14] [Rank 0] step:1241/10000 train_time:101036ms step_avg:81.42ms +[2025-08-22 16:18:16] [Rank 0] step:1261/10000 train_time:102671ms step_avg:81.42ms +[2025-08-22 16:18:16] [Rank 0] step:1261/10000 train_time:102671ms step_avg:81.42ms +[2025-08-22 16:18:17] [Rank 0] step:1281/10000 train_time:104309ms step_avg:81.43ms +[2025-08-22 16:18:17] [Rank 0] step:1281/10000 train_time:104309ms step_avg:81.43ms +[2025-08-22 16:18:19] [Rank 0] step:1301/10000 train_time:105949ms step_avg:81.44ms +[2025-08-22 16:18:19] [Rank 0] step:1301/10000 train_time:105949ms step_avg:81.44ms +[2025-08-22 16:18:21] [Rank 0] step:1321/10000 train_time:107589ms step_avg:81.45ms +[2025-08-22 16:18:21] [Rank 0] step:1321/10000 train_time:107589ms step_avg:81.45ms +[2025-08-22 16:18:22] [Rank 0] step:1341/10000 train_time:109234ms step_avg:81.46ms +[2025-08-22 16:18:22] [Rank 0] step:1341/10000 train_time:109234ms step_avg:81.46ms +[2025-08-22 16:18:24] [Rank 0] step:1361/10000 train_time:110875ms step_avg:81.47ms +[2025-08-22 16:18:24] [Rank 0] step:1361/10000 train_time:110875ms step_avg:81.47ms +[2025-08-22 16:18:25] [Rank 0] step:1381/10000 train_time:112519ms step_avg:81.48ms +[2025-08-22 16:18:25] [Rank 0] step:1381/10000 train_time:112519ms step_avg:81.48ms +[2025-08-22 16:18:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:18:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:18:41] [Rank 0] PRINT: step:1400/10000 val_loss:5.0369 svd_entropy: attn_qk:H=0.6407,top10E=0.42,eRank=73.7,q75/q25=73.34 attn_vo:H=0.4247,top10E=0.79,eRank=18.4,q75/q25=87.78 mlp_w1:H=0.6867,top10E=0.38,eRank=108.9,q75/q25=8.24 mlp_w2:H=0.8081,top10E=0.20,eRank=224.0,q75/q25=12.48 vo_prod:H=0.3129,top10E=0.93,eRank=8.9,q75/q25=2001.00 train_time:114445ms step_avg:81.75ms +[2025-08-22 16:18:41] [Rank 0] PRINT: step:1400/10000 val_loss:5.0369 svd_entropy: attn_qk:H=0.6407,top10E=0.42,eRank=73.7,q75/q25=73.34 attn_vo:H=0.4247,top10E=0.79,eRank=18.4,q75/q25=87.78 mlp_w1:H=0.6867,top10E=0.38,eRank=108.9,q75/q25=8.24 mlp_w2:H=0.8081,top10E=0.20,eRank=224.0,q75/q25=12.48 vo_prod:H=0.3129,top10E=0.93,eRank=8.9,q75/q25=2001.00 train_time:114445ms step_avg:81.75ms +[2025-08-22 16:18:41] [Rank 0] step:1401/10000 train_time:114454ms step_avg:81.69ms +[2025-08-22 16:18:41] [Rank 0] step:1401/10000 train_time:114454ms step_avg:81.69ms +[2025-08-22 16:18:42] [Rank 0] step:1421/10000 train_time:115825ms step_avg:81.51ms +[2025-08-22 16:18:42] [Rank 0] step:1421/10000 train_time:115825ms step_avg:81.51ms +[2025-08-22 16:18:44] [Rank 0] step:1441/10000 train_time:117464ms step_avg:81.52ms +[2025-08-22 16:18:44] [Rank 0] step:1441/10000 train_time:117464ms step_avg:81.52ms +[2025-08-22 16:18:46] [Rank 0] step:1461/10000 train_time:119105ms step_avg:81.52ms +[2025-08-22 16:18:46] [Rank 0] step:1461/10000 train_time:119105ms step_avg:81.52ms +[2025-08-22 16:18:47] [Rank 0] step:1481/10000 train_time:120746ms step_avg:81.53ms +[2025-08-22 16:18:47] [Rank 0] step:1481/10000 train_time:120746ms step_avg:81.53ms +[2025-08-22 16:18:49] [Rank 0] step:1501/10000 train_time:122396ms step_avg:81.54ms +[2025-08-22 16:18:49] [Rank 0] step:1501/10000 train_time:122396ms step_avg:81.54ms +[2025-08-22 16:18:51] [Rank 0] step:1521/10000 train_time:124051ms step_avg:81.56ms +[2025-08-22 16:18:51] [Rank 0] step:1521/10000 train_time:124051ms step_avg:81.56ms +[2025-08-22 16:18:52] [Rank 0] step:1541/10000 train_time:125706ms step_avg:81.57ms +[2025-08-22 16:18:52] [Rank 0] step:1541/10000 train_time:125706ms step_avg:81.57ms +[2025-08-22 16:18:54] [Rank 0] step:1561/10000 train_time:127360ms step_avg:81.59ms +[2025-08-22 16:18:54] [Rank 0] step:1561/10000 train_time:127360ms step_avg:81.59ms +[2025-08-22 16:18:56] [Rank 0] step:1581/10000 train_time:129017ms step_avg:81.60ms +[2025-08-22 16:18:56] [Rank 0] step:1581/10000 train_time:129017ms step_avg:81.60ms +[2025-08-22 16:18:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:18:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:19:11] [Rank 0] PRINT: step:1600/10000 val_loss:4.8992 svd_entropy: attn_qk:H=0.6493,top10E=0.40,eRank=78.0,q75/q25=77.48 attn_vo:H=0.4427,top10E=0.75,eRank=20.9,q75/q25=89.83 mlp_w1:H=0.6972,top10E=0.36,eRank=117.5,q75/q25=8.87 mlp_w2:H=0.8206,top10E=0.19,eRank=243.6,q75/q25=12.72 vo_prod:H=0.3321,top10E=0.91,eRank=10.1,q75/q25=2560.45 train_time:130960ms step_avg:81.85ms +[2025-08-22 16:19:11] [Rank 0] PRINT: step:1600/10000 val_loss:4.8992 svd_entropy: attn_qk:H=0.6493,top10E=0.40,eRank=78.0,q75/q25=77.48 attn_vo:H=0.4427,top10E=0.75,eRank=20.9,q75/q25=89.83 mlp_w1:H=0.6972,top10E=0.36,eRank=117.5,q75/q25=8.87 mlp_w2:H=0.8206,top10E=0.19,eRank=243.6,q75/q25=12.72 vo_prod:H=0.3321,top10E=0.91,eRank=10.1,q75/q25=2560.45 train_time:130960ms step_avg:81.85ms +[2025-08-22 16:19:11] [Rank 0] step:1601/10000 train_time:130969ms step_avg:81.80ms +[2025-08-22 16:19:11] [Rank 0] step:1601/10000 train_time:130969ms step_avg:81.80ms +[2025-08-22 16:19:12] [Rank 0] step:1621/10000 train_time:132346ms step_avg:81.64ms +[2025-08-22 16:19:12] [Rank 0] step:1621/10000 train_time:132346ms step_avg:81.64ms +[2025-08-22 16:19:14] [Rank 0] step:1641/10000 train_time:133995ms step_avg:81.65ms +[2025-08-22 16:19:14] [Rank 0] step:1641/10000 train_time:133995ms step_avg:81.65ms +[2025-08-22 16:19:16] [Rank 0] step:1661/10000 train_time:135644ms step_avg:81.66ms +[2025-08-22 16:19:16] [Rank 0] step:1661/10000 train_time:135644ms step_avg:81.66ms +[2025-08-22 16:19:17] [Rank 0] step:1681/10000 train_time:137293ms step_avg:81.67ms +[2025-08-22 16:19:17] [Rank 0] step:1681/10000 train_time:137293ms step_avg:81.67ms +[2025-08-22 16:19:19] [Rank 0] step:1701/10000 train_time:138945ms step_avg:81.68ms +[2025-08-22 16:19:19] [Rank 0] step:1701/10000 train_time:138945ms step_avg:81.68ms +[2025-08-22 16:19:21] [Rank 0] step:1721/10000 train_time:140594ms step_avg:81.69ms +[2025-08-22 16:19:21] [Rank 0] step:1721/10000 train_time:140594ms step_avg:81.69ms +[2025-08-22 16:19:22] [Rank 0] step:1741/10000 train_time:142248ms step_avg:81.70ms +[2025-08-22 16:19:22] [Rank 0] step:1741/10000 train_time:142248ms step_avg:81.70ms +[2025-08-22 16:19:24] [Rank 0] step:1761/10000 train_time:143899ms step_avg:81.71ms +[2025-08-22 16:19:24] [Rank 0] step:1761/10000 train_time:143899ms step_avg:81.71ms +[2025-08-22 16:19:26] [Rank 0] step:1781/10000 train_time:145553ms step_avg:81.73ms +[2025-08-22 16:19:26] [Rank 0] step:1781/10000 train_time:145553ms step_avg:81.73ms +[2025-08-22 16:19:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:19:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:19:41] [Rank 0] PRINT: step:1800/10000 val_loss:4.7630 svd_entropy: attn_qk:H=0.6565,top10E=0.39,eRank=81.9,q75/q25=79.98 attn_vo:H=0.4594,top10E=0.72,eRank=23.6,q75/q25=92.02 mlp_w1:H=0.7061,top10E=0.35,eRank=125.3,q75/q25=9.43 mlp_w2:H=0.8308,top10E=0.18,eRank=260.8,q75/q25=12.65 vo_prod:H=0.3504,top10E=0.89,eRank=11.5,q75/q25=3359.24 train_time:147490ms step_avg:81.94ms +[2025-08-22 16:19:41] [Rank 0] PRINT: step:1800/10000 val_loss:4.7630 svd_entropy: attn_qk:H=0.6565,top10E=0.39,eRank=81.9,q75/q25=79.98 attn_vo:H=0.4594,top10E=0.72,eRank=23.6,q75/q25=92.02 mlp_w1:H=0.7061,top10E=0.35,eRank=125.3,q75/q25=9.43 mlp_w2:H=0.8308,top10E=0.18,eRank=260.8,q75/q25=12.65 vo_prod:H=0.3504,top10E=0.89,eRank=11.5,q75/q25=3359.24 train_time:147490ms step_avg:81.94ms +[2025-08-22 16:19:41] [Rank 0] step:1801/10000 train_time:147500ms step_avg:81.90ms +[2025-08-22 16:19:41] [Rank 0] step:1801/10000 train_time:147500ms step_avg:81.90ms +[2025-08-22 16:19:43] [Rank 0] step:1821/10000 train_time:148875ms step_avg:81.75ms +[2025-08-22 16:19:43] [Rank 0] step:1821/10000 train_time:148875ms step_avg:81.75ms +[2025-08-22 16:19:44] [Rank 0] step:1841/10000 train_time:150518ms step_avg:81.76ms +[2025-08-22 16:19:44] [Rank 0] step:1841/10000 train_time:150518ms step_avg:81.76ms +[2025-08-22 16:19:46] [Rank 0] step:1861/10000 train_time:152163ms step_avg:81.76ms +[2025-08-22 16:19:46] [Rank 0] step:1861/10000 train_time:152163ms step_avg:81.76ms +[2025-08-22 16:19:47] [Rank 0] step:1881/10000 train_time:153812ms step_avg:81.77ms +[2025-08-22 16:19:47] [Rank 0] step:1881/10000 train_time:153812ms step_avg:81.77ms +[2025-08-22 16:19:49] [Rank 0] step:1901/10000 train_time:155459ms step_avg:81.78ms +[2025-08-22 16:19:49] [Rank 0] step:1901/10000 train_time:155459ms step_avg:81.78ms +[2025-08-22 16:19:51] [Rank 0] step:1921/10000 train_time:157105ms step_avg:81.78ms +[2025-08-22 16:19:51] [Rank 0] step:1921/10000 train_time:157105ms step_avg:81.78ms +[2025-08-22 16:19:52] [Rank 0] step:1941/10000 train_time:158756ms step_avg:81.79ms +[2025-08-22 16:19:52] [Rank 0] step:1941/10000 train_time:158756ms step_avg:81.79ms +[2025-08-22 16:19:54] [Rank 0] step:1961/10000 train_time:160405ms step_avg:81.80ms +[2025-08-22 16:19:54] [Rank 0] step:1961/10000 train_time:160405ms step_avg:81.80ms +[2025-08-22 16:19:56] [Rank 0] step:1981/10000 train_time:162057ms step_avg:81.81ms +[2025-08-22 16:19:56] [Rank 0] step:1981/10000 train_time:162057ms step_avg:81.81ms +[2025-08-22 16:19:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:19:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:20:11] [Rank 0] PRINT: step:2000/10000 val_loss:4.6968 svd_entropy: attn_qk:H=0.6627,top10E=0.38,eRank=85.4,q75/q25=83.08 attn_vo:H=0.4741,top10E=0.69,eRank=26.2,q75/q25=96.10 mlp_w1:H=0.7137,top10E=0.34,eRank=132.5,q75/q25=9.92 mlp_w2:H=0.8389,top10E=0.17,eRank=275.4,q75/q25=12.40 vo_prod:H=0.3666,top10E=0.87,eRank=12.9,q75/q25=4201.64 train_time:163992ms step_avg:82.00ms +[2025-08-22 16:20:11] [Rank 0] PRINT: step:2000/10000 val_loss:4.6968 svd_entropy: attn_qk:H=0.6627,top10E=0.38,eRank=85.4,q75/q25=83.08 attn_vo:H=0.4741,top10E=0.69,eRank=26.2,q75/q25=96.10 mlp_w1:H=0.7137,top10E=0.34,eRank=132.5,q75/q25=9.92 mlp_w2:H=0.8389,top10E=0.17,eRank=275.4,q75/q25=12.40 vo_prod:H=0.3666,top10E=0.87,eRank=12.9,q75/q25=4201.64 train_time:163992ms step_avg:82.00ms +[2025-08-22 16:20:11] [Rank 0] step:2001/10000 train_time:164001ms step_avg:81.96ms +[2025-08-22 16:20:11] [Rank 0] step:2001/10000 train_time:164001ms step_avg:81.96ms +[2025-08-22 16:20:13] [Rank 0] step:2021/10000 train_time:165369ms step_avg:81.83ms +[2025-08-22 16:20:13] [Rank 0] step:2021/10000 train_time:165369ms step_avg:81.83ms +[2025-08-22 16:20:15] [Rank 0] step:2041/10000 train_time:167420ms step_avg:82.03ms +[2025-08-22 16:20:15] [Rank 0] step:2041/10000 train_time:167420ms step_avg:82.03ms +[2025-08-22 16:20:16] [Rank 0] step:2061/10000 train_time:169066ms step_avg:82.03ms +[2025-08-22 16:20:16] [Rank 0] step:2061/10000 train_time:169066ms step_avg:82.03ms +[2025-08-22 16:20:18] [Rank 0] step:2081/10000 train_time:170712ms step_avg:82.03ms +[2025-08-22 16:20:18] [Rank 0] step:2081/10000 train_time:170712ms step_avg:82.03ms +[2025-08-22 16:20:20] [Rank 0] step:2101/10000 train_time:172360ms step_avg:82.04ms +[2025-08-22 16:20:20] [Rank 0] step:2101/10000 train_time:172360ms step_avg:82.04ms +[2025-08-22 16:20:21] [Rank 0] step:2121/10000 train_time:174009ms step_avg:82.04ms +[2025-08-22 16:20:21] [Rank 0] step:2121/10000 train_time:174009ms step_avg:82.04ms +[2025-08-22 16:20:23] [Rank 0] step:2141/10000 train_time:175658ms step_avg:82.04ms +[2025-08-22 16:20:23] [Rank 0] step:2141/10000 train_time:175658ms step_avg:82.04ms +[2025-08-22 16:20:25] [Rank 0] step:2161/10000 train_time:177306ms step_avg:82.05ms +[2025-08-22 16:20:25] [Rank 0] step:2161/10000 train_time:177306ms step_avg:82.05ms +[2025-08-22 16:20:26] [Rank 0] step:2181/10000 train_time:178955ms step_avg:82.05ms +[2025-08-22 16:20:26] [Rank 0] step:2181/10000 train_time:178955ms step_avg:82.05ms +[2025-08-22 16:20:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:20:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:20:41] [Rank 0] PRINT: step:2200/10000 val_loss:4.5991 svd_entropy: attn_qk:H=0.6679,top10E=0.37,eRank=88.5,q75/q25=84.89 attn_vo:H=0.4857,top10E=0.66,eRank=28.6,q75/q25=97.96 mlp_w1:H=0.7202,top10E=0.33,eRank=138.9,q75/q25=10.29 mlp_w2:H=0.8455,top10E=0.17,eRank=288.0,q75/q25=12.08 vo_prod:H=0.3793,top10E=0.85,eRank=14.2,q75/q25=4851.32 train_time:180890ms step_avg:82.22ms +[2025-08-22 16:20:41] [Rank 0] PRINT: step:2200/10000 val_loss:4.5991 svd_entropy: attn_qk:H=0.6679,top10E=0.37,eRank=88.5,q75/q25=84.89 attn_vo:H=0.4857,top10E=0.66,eRank=28.6,q75/q25=97.96 mlp_w1:H=0.7202,top10E=0.33,eRank=138.9,q75/q25=10.29 mlp_w2:H=0.8455,top10E=0.17,eRank=288.0,q75/q25=12.08 vo_prod:H=0.3793,top10E=0.85,eRank=14.2,q75/q25=4851.32 train_time:180890ms step_avg:82.22ms +[2025-08-22 16:20:41] [Rank 0] step:2201/10000 train_time:180900ms step_avg:82.19ms +[2025-08-22 16:20:41] [Rank 0] step:2201/10000 train_time:180900ms step_avg:82.19ms +[2025-08-22 16:20:43] [Rank 0] step:2221/10000 train_time:182271ms step_avg:82.07ms +[2025-08-22 16:20:43] [Rank 0] step:2221/10000 train_time:182271ms step_avg:82.07ms +[2025-08-22 16:20:45] [Rank 0] step:2241/10000 train_time:183950ms step_avg:82.08ms +[2025-08-22 16:20:45] [Rank 0] step:2241/10000 train_time:183950ms step_avg:82.08ms +[2025-08-22 16:20:46] [Rank 0] step:2261/10000 train_time:185643ms step_avg:82.11ms +[2025-08-22 16:20:46] [Rank 0] step:2261/10000 train_time:185643ms step_avg:82.11ms +[2025-08-22 16:20:48] [Rank 0] step:2281/10000 train_time:187337ms step_avg:82.13ms +[2025-08-22 16:20:48] [Rank 0] step:2281/10000 train_time:187337ms step_avg:82.13ms +[2025-08-22 16:20:50] [Rank 0] step:2301/10000 train_time:189034ms step_avg:82.15ms +[2025-08-22 16:20:50] [Rank 0] step:2301/10000 train_time:189034ms step_avg:82.15ms +[2025-08-22 16:20:52] [Rank 0] step:2321/10000 train_time:190731ms step_avg:82.18ms +[2025-08-22 16:20:52] [Rank 0] step:2321/10000 train_time:190731ms step_avg:82.18ms +[2025-08-22 16:20:53] [Rank 0] step:2341/10000 train_time:192429ms step_avg:82.20ms +[2025-08-22 16:20:53] [Rank 0] step:2341/10000 train_time:192429ms step_avg:82.20ms +[2025-08-22 16:20:55] [Rank 0] step:2361/10000 train_time:194128ms step_avg:82.22ms +[2025-08-22 16:20:55] [Rank 0] step:2361/10000 train_time:194128ms step_avg:82.22ms +[2025-08-22 16:20:57] [Rank 0] step:2381/10000 train_time:195828ms step_avg:82.25ms +[2025-08-22 16:20:57] [Rank 0] step:2381/10000 train_time:195828ms step_avg:82.25ms +[2025-08-22 16:20:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:20:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:21:12] [Rank 0] PRINT: step:2400/10000 val_loss:4.5219 svd_entropy: attn_qk:H=0.6722,top10E=0.37,eRank=91.1,q75/q25=87.38 attn_vo:H=0.4958,top10E=0.64,eRank=30.8,q75/q25=100.51 mlp_w1:H=0.7260,top10E=0.32,eRank=145.0,q75/q25=10.62 mlp_w2:H=0.8513,top10E=0.16,eRank=299.2,q75/q25=11.63 vo_prod:H=0.3901,top10E=0.83,eRank=15.4,q75/q25=5844.67 train_time:197824ms step_avg:82.43ms +[2025-08-22 16:21:12] [Rank 0] PRINT: step:2400/10000 val_loss:4.5219 svd_entropy: attn_qk:H=0.6722,top10E=0.37,eRank=91.1,q75/q25=87.38 attn_vo:H=0.4958,top10E=0.64,eRank=30.8,q75/q25=100.51 mlp_w1:H=0.7260,top10E=0.32,eRank=145.0,q75/q25=10.62 mlp_w2:H=0.8513,top10E=0.16,eRank=299.2,q75/q25=11.63 vo_prod:H=0.3901,top10E=0.83,eRank=15.4,q75/q25=5844.67 train_time:197824ms step_avg:82.43ms +[2025-08-22 16:21:12] [Rank 0] step:2401/10000 train_time:197834ms step_avg:82.40ms +[2025-08-22 16:21:12] [Rank 0] step:2401/10000 train_time:197834ms step_avg:82.40ms +[2025-08-22 16:21:14] [Rank 0] step:2421/10000 train_time:199257ms step_avg:82.30ms +[2025-08-22 16:21:14] [Rank 0] step:2421/10000 train_time:199257ms step_avg:82.30ms +[2025-08-22 16:21:15] [Rank 0] step:2441/10000 train_time:200952ms step_avg:82.32ms +[2025-08-22 16:21:15] [Rank 0] step:2441/10000 train_time:200952ms step_avg:82.32ms +[2025-08-22 16:21:17] [Rank 0] step:2461/10000 train_time:202644ms step_avg:82.34ms +[2025-08-22 16:21:17] [Rank 0] step:2461/10000 train_time:202644ms step_avg:82.34ms +[2025-08-22 16:21:19] [Rank 0] step:2481/10000 train_time:204339ms step_avg:82.36ms +[2025-08-22 16:21:19] [Rank 0] step:2481/10000 train_time:204339ms step_avg:82.36ms +[2025-08-22 16:21:21] [Rank 0] step:2501/10000 train_time:206033ms step_avg:82.38ms +[2025-08-22 16:21:21] [Rank 0] step:2501/10000 train_time:206033ms step_avg:82.38ms +[2025-08-22 16:21:22] [Rank 0] step:2521/10000 train_time:207729ms step_avg:82.40ms +[2025-08-22 16:21:22] [Rank 0] step:2521/10000 train_time:207729ms step_avg:82.40ms +[2025-08-22 16:21:24] [Rank 0] step:2541/10000 train_time:209424ms step_avg:82.42ms +[2025-08-22 16:21:24] [Rank 0] step:2541/10000 train_time:209424ms step_avg:82.42ms +[2025-08-22 16:21:26] [Rank 0] step:2561/10000 train_time:211121ms step_avg:82.44ms +[2025-08-22 16:21:26] [Rank 0] step:2561/10000 train_time:211121ms step_avg:82.44ms +[2025-08-22 16:21:27] [Rank 0] step:2581/10000 train_time:212818ms step_avg:82.46ms +[2025-08-22 16:21:27] [Rank 0] step:2581/10000 train_time:212818ms step_avg:82.46ms +[2025-08-22 16:21:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:21:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:21:42] [Rank 0] PRINT: step:2600/10000 val_loss:4.4575 svd_entropy: attn_qk:H=0.6763,top10E=0.36,eRank=93.7,q75/q25=89.07 attn_vo:H=0.5050,top10E=0.62,eRank=33.0,q75/q25=102.99 mlp_w1:H=0.7312,top10E=0.31,eRank=150.7,q75/q25=10.87 mlp_w2:H=0.8561,top10E=0.16,eRank=309.1,q75/q25=11.26 vo_prod:H=0.4002,top10E=0.81,eRank=16.6,q75/q25=6746.76 train_time:214809ms step_avg:82.62ms +[2025-08-22 16:21:42] [Rank 0] PRINT: step:2600/10000 val_loss:4.4575 svd_entropy: attn_qk:H=0.6763,top10E=0.36,eRank=93.7,q75/q25=89.07 attn_vo:H=0.5050,top10E=0.62,eRank=33.0,q75/q25=102.99 mlp_w1:H=0.7312,top10E=0.31,eRank=150.7,q75/q25=10.87 mlp_w2:H=0.8561,top10E=0.16,eRank=309.1,q75/q25=11.26 vo_prod:H=0.4002,top10E=0.81,eRank=16.6,q75/q25=6746.76 train_time:214809ms step_avg:82.62ms +[2025-08-22 16:21:43] [Rank 0] step:2601/10000 train_time:214817ms step_avg:82.59ms +[2025-08-22 16:21:43] [Rank 0] step:2601/10000 train_time:214817ms step_avg:82.59ms +[2025-08-22 16:21:44] [Rank 0] step:2621/10000 train_time:216224ms step_avg:82.50ms +[2025-08-22 16:21:44] [Rank 0] step:2621/10000 train_time:216224ms step_avg:82.50ms +[2025-08-22 16:21:46] [Rank 0] step:2641/10000 train_time:217915ms step_avg:82.51ms +[2025-08-22 16:21:46] [Rank 0] step:2641/10000 train_time:217915ms step_avg:82.51ms +[2025-08-22 16:21:48] [Rank 0] step:2661/10000 train_time:219608ms step_avg:82.53ms +[2025-08-22 16:21:48] [Rank 0] step:2661/10000 train_time:219608ms step_avg:82.53ms +[2025-08-22 16:21:49] [Rank 0] step:2681/10000 train_time:221302ms step_avg:82.54ms +[2025-08-22 16:21:49] [Rank 0] step:2681/10000 train_time:221302ms step_avg:82.54ms +[2025-08-22 16:21:51] [Rank 0] step:2701/10000 train_time:222994ms step_avg:82.56ms +[2025-08-22 16:21:51] [Rank 0] step:2701/10000 train_time:222994ms step_avg:82.56ms +[2025-08-22 16:21:53] [Rank 0] step:2721/10000 train_time:224689ms step_avg:82.58ms +[2025-08-22 16:21:53] [Rank 0] step:2721/10000 train_time:224689ms step_avg:82.58ms +[2025-08-22 16:21:54] [Rank 0] step:2741/10000 train_time:226383ms step_avg:82.59ms +[2025-08-22 16:21:54] [Rank 0] step:2741/10000 train_time:226383ms step_avg:82.59ms +[2025-08-22 16:21:56] [Rank 0] step:2761/10000 train_time:228080ms step_avg:82.61ms +[2025-08-22 16:21:56] [Rank 0] step:2761/10000 train_time:228080ms step_avg:82.61ms +[2025-08-22 16:21:58] [Rank 0] step:2781/10000 train_time:229778ms step_avg:82.62ms +[2025-08-22 16:21:58] [Rank 0] step:2781/10000 train_time:229778ms step_avg:82.62ms +[2025-08-22 16:21:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:21:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:22:13] [Rank 0] PRINT: step:2800/10000 val_loss:4.4262 svd_entropy: attn_qk:H=0.6803,top10E=0.35,eRank=96.4,q75/q25=90.54 attn_vo:H=0.5132,top10E=0.60,eRank=35.0,q75/q25=105.72 mlp_w1:H=0.7360,top10E=0.30,eRank=156.2,q75/q25=11.06 mlp_w2:H=0.8606,top10E=0.15,eRank=318.3,q75/q25=10.85 vo_prod:H=0.4085,top10E=0.79,eRank=17.7,q75/q25=7592.23 train_time:231769ms step_avg:82.77ms +[2025-08-22 16:22:13] [Rank 0] PRINT: step:2800/10000 val_loss:4.4262 svd_entropy: attn_qk:H=0.6803,top10E=0.35,eRank=96.4,q75/q25=90.54 attn_vo:H=0.5132,top10E=0.60,eRank=35.0,q75/q25=105.72 mlp_w1:H=0.7360,top10E=0.30,eRank=156.2,q75/q25=11.06 mlp_w2:H=0.8606,top10E=0.15,eRank=318.3,q75/q25=10.85 vo_prod:H=0.4085,top10E=0.79,eRank=17.7,q75/q25=7592.23 train_time:231769ms step_avg:82.77ms +[2025-08-22 16:22:13] [Rank 0] step:2801/10000 train_time:231779ms step_avg:82.75ms +[2025-08-22 16:22:13] [Rank 0] step:2801/10000 train_time:231779ms step_avg:82.75ms +[2025-08-22 16:22:15] [Rank 0] step:2821/10000 train_time:233208ms step_avg:82.67ms +[2025-08-22 16:22:15] [Rank 0] step:2821/10000 train_time:233208ms step_avg:82.67ms +[2025-08-22 16:22:17] [Rank 0] step:2841/10000 train_time:234906ms step_avg:82.68ms +[2025-08-22 16:22:17] [Rank 0] step:2841/10000 train_time:234906ms step_avg:82.68ms +[2025-08-22 16:22:18] [Rank 0] step:2861/10000 train_time:236601ms step_avg:82.70ms +[2025-08-22 16:22:18] [Rank 0] step:2861/10000 train_time:236601ms step_avg:82.70ms +[2025-08-22 16:22:20] [Rank 0] step:2881/10000 train_time:238299ms step_avg:82.71ms +[2025-08-22 16:22:20] [Rank 0] step:2881/10000 train_time:238299ms step_avg:82.71ms +[2025-08-22 16:22:22] [Rank 0] step:2901/10000 train_time:239997ms step_avg:82.73ms +[2025-08-22 16:22:22] [Rank 0] step:2901/10000 train_time:239997ms step_avg:82.73ms +[2025-08-22 16:22:23] [Rank 0] step:2921/10000 train_time:241696ms step_avg:82.74ms +[2025-08-22 16:22:23] [Rank 0] step:2921/10000 train_time:241696ms step_avg:82.74ms +[2025-08-22 16:22:25] [Rank 0] step:2941/10000 train_time:243396ms step_avg:82.76ms +[2025-08-22 16:22:25] [Rank 0] step:2941/10000 train_time:243396ms step_avg:82.76ms +[2025-08-22 16:22:27] [Rank 0] step:2961/10000 train_time:245097ms step_avg:82.78ms +[2025-08-22 16:22:27] [Rank 0] step:2961/10000 train_time:245097ms step_avg:82.78ms +[2025-08-22 16:22:28] [Rank 0] step:2981/10000 train_time:246804ms step_avg:82.79ms +[2025-08-22 16:22:28] [Rank 0] step:2981/10000 train_time:246804ms step_avg:82.79ms +[2025-08-22 16:22:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:22:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:22:44] [Rank 0] PRINT: step:3000/10000 val_loss:4.3766 svd_entropy: attn_qk:H=0.6841,top10E=0.35,eRank=98.8,q75/q25=91.42 attn_vo:H=0.5206,top10E=0.59,eRank=37.0,q75/q25=106.99 mlp_w1:H=0.7404,top10E=0.30,eRank=161.3,q75/q25=11.21 mlp_w2:H=0.8643,top10E=0.15,eRank=326.3,q75/q25=10.46 vo_prod:H=0.4163,top10E=0.78,eRank=18.7,q75/q25=8308.83 train_time:248812ms step_avg:82.94ms +[2025-08-22 16:22:44] [Rank 0] PRINT: step:3000/10000 val_loss:4.3766 svd_entropy: attn_qk:H=0.6841,top10E=0.35,eRank=98.8,q75/q25=91.42 attn_vo:H=0.5206,top10E=0.59,eRank=37.0,q75/q25=106.99 mlp_w1:H=0.7404,top10E=0.30,eRank=161.3,q75/q25=11.21 mlp_w2:H=0.8643,top10E=0.15,eRank=326.3,q75/q25=10.46 vo_prod:H=0.4163,top10E=0.78,eRank=18.7,q75/q25=8308.83 train_time:248812ms step_avg:82.94ms +[2025-08-22 16:22:44] [Rank 0] step:3001/10000 train_time:248821ms step_avg:82.91ms +[2025-08-22 16:22:44] [Rank 0] step:3001/10000 train_time:248821ms step_avg:82.91ms +[2025-08-22 16:22:46] [Rank 0] step:3021/10000 train_time:250254ms step_avg:82.84ms +[2025-08-22 16:22:46] [Rank 0] step:3021/10000 train_time:250254ms step_avg:82.84ms +[2025-08-22 16:22:47] [Rank 0] step:3041/10000 train_time:251951ms step_avg:82.85ms +[2025-08-22 16:22:47] [Rank 0] step:3041/10000 train_time:251951ms step_avg:82.85ms +[2025-08-22 16:22:49] [Rank 0] step:3061/10000 train_time:253652ms step_avg:82.87ms +[2025-08-22 16:22:49] [Rank 0] step:3061/10000 train_time:253652ms step_avg:82.87ms +[2025-08-22 16:22:51] [Rank 0] step:3081/10000 train_time:255352ms step_avg:82.88ms +[2025-08-22 16:22:51] [Rank 0] step:3081/10000 train_time:255352ms step_avg:82.88ms +[2025-08-22 16:22:52] [Rank 0] step:3101/10000 train_time:257055ms step_avg:82.89ms +[2025-08-22 16:22:52] [Rank 0] step:3101/10000 train_time:257055ms step_avg:82.89ms +[2025-08-22 16:22:54] [Rank 0] step:3121/10000 train_time:258757ms step_avg:82.91ms +[2025-08-22 16:22:54] [Rank 0] step:3121/10000 train_time:258757ms step_avg:82.91ms +[2025-08-22 16:22:56] [Rank 0] step:3141/10000 train_time:260458ms step_avg:82.92ms +[2025-08-22 16:22:56] [Rank 0] step:3141/10000 train_time:260458ms step_avg:82.92ms +[2025-08-22 16:22:57] [Rank 0] step:3161/10000 train_time:262164ms step_avg:82.94ms +[2025-08-22 16:22:57] [Rank 0] step:3161/10000 train_time:262164ms step_avg:82.94ms +[2025-08-22 16:22:59] [Rank 0] step:3181/10000 train_time:263869ms step_avg:82.95ms +[2025-08-22 16:22:59] [Rank 0] step:3181/10000 train_time:263869ms step_avg:82.95ms +[2025-08-22 16:23:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:23:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:23:14] [Rank 0] PRINT: step:3200/10000 val_loss:4.3430 svd_entropy: attn_qk:H=0.6874,top10E=0.34,eRank=101.0,q75/q25=92.65 attn_vo:H=0.5277,top10E=0.57,eRank=39.0,q75/q25=109.55 mlp_w1:H=0.7444,top10E=0.29,eRank=166.1,q75/q25=11.31 mlp_w2:H=0.8679,top10E=0.15,eRank=333.8,q75/q25=10.11 vo_prod:H=0.4239,top10E=0.76,eRank=19.8,q75/q25=9064.05 train_time:265869ms step_avg:83.08ms +[2025-08-22 16:23:14] [Rank 0] PRINT: step:3200/10000 val_loss:4.3430 svd_entropy: attn_qk:H=0.6874,top10E=0.34,eRank=101.0,q75/q25=92.65 attn_vo:H=0.5277,top10E=0.57,eRank=39.0,q75/q25=109.55 mlp_w1:H=0.7444,top10E=0.29,eRank=166.1,q75/q25=11.31 mlp_w2:H=0.8679,top10E=0.15,eRank=333.8,q75/q25=10.11 vo_prod:H=0.4239,top10E=0.76,eRank=19.8,q75/q25=9064.05 train_time:265869ms step_avg:83.08ms +[2025-08-22 16:23:15] [Rank 0] step:3201/10000 train_time:265879ms step_avg:83.06ms +[2025-08-22 16:23:15] [Rank 0] step:3201/10000 train_time:265879ms step_avg:83.06ms +[2025-08-22 16:23:16] [Rank 0] step:3221/10000 train_time:267310ms step_avg:82.99ms +[2025-08-22 16:23:16] [Rank 0] step:3221/10000 train_time:267310ms step_avg:82.99ms +[2025-08-22 16:23:18] [Rank 0] step:3241/10000 train_time:269007ms step_avg:83.00ms +[2025-08-22 16:23:18] [Rank 0] step:3241/10000 train_time:269007ms step_avg:83.00ms +[2025-08-22 16:23:20] [Rank 0] step:3261/10000 train_time:270706ms step_avg:83.01ms +[2025-08-22 16:23:20] [Rank 0] step:3261/10000 train_time:270706ms step_avg:83.01ms +[2025-08-22 16:23:21] [Rank 0] step:3281/10000 train_time:272464ms step_avg:83.04ms +[2025-08-22 16:23:21] [Rank 0] step:3281/10000 train_time:272464ms step_avg:83.04ms +[2025-08-22 16:23:23] [Rank 0] step:3301/10000 train_time:274168ms step_avg:83.06ms +[2025-08-22 16:23:23] [Rank 0] step:3301/10000 train_time:274168ms step_avg:83.06ms +[2025-08-22 16:23:25] [Rank 0] step:3321/10000 train_time:275872ms step_avg:83.07ms +[2025-08-22 16:23:25] [Rank 0] step:3321/10000 train_time:275872ms step_avg:83.07ms +[2025-08-22 16:23:26] [Rank 0] step:3341/10000 train_time:277577ms step_avg:83.08ms +[2025-08-22 16:23:26] [Rank 0] step:3341/10000 train_time:277577ms step_avg:83.08ms +[2025-08-22 16:23:28] [Rank 0] step:3361/10000 train_time:279282ms step_avg:83.09ms +[2025-08-22 16:23:28] [Rank 0] step:3361/10000 train_time:279282ms step_avg:83.09ms +[2025-08-22 16:23:30] [Rank 0] step:3381/10000 train_time:280986ms step_avg:83.11ms +[2025-08-22 16:23:30] [Rank 0] step:3381/10000 train_time:280986ms step_avg:83.11ms +[2025-08-22 16:23:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:23:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:23:45] [Rank 0] PRINT: step:3400/10000 val_loss:4.3163 svd_entropy: attn_qk:H=0.6910,top10E=0.34,eRank=103.5,q75/q25=93.27 attn_vo:H=0.5341,top10E=0.56,eRank=40.9,q75/q25=111.97 mlp_w1:H=0.7480,top10E=0.29,eRank=170.6,q75/q25=11.36 mlp_w2:H=0.8710,top10E=0.15,eRank=340.6,q75/q25=9.77 vo_prod:H=0.4302,top10E=0.75,eRank=20.8,q75/q25=9751.34 train_time:282989ms step_avg:83.23ms +[2025-08-22 16:23:45] [Rank 0] PRINT: step:3400/10000 val_loss:4.3163 svd_entropy: attn_qk:H=0.6910,top10E=0.34,eRank=103.5,q75/q25=93.27 attn_vo:H=0.5341,top10E=0.56,eRank=40.9,q75/q25=111.97 mlp_w1:H=0.7480,top10E=0.29,eRank=170.6,q75/q25=11.36 mlp_w2:H=0.8710,top10E=0.15,eRank=340.6,q75/q25=9.77 vo_prod:H=0.4302,top10E=0.75,eRank=20.8,q75/q25=9751.34 train_time:282989ms step_avg:83.23ms +[2025-08-22 16:23:45] [Rank 0] step:3401/10000 train_time:282999ms step_avg:83.21ms +[2025-08-22 16:23:45] [Rank 0] step:3401/10000 train_time:282999ms step_avg:83.21ms +[2025-08-22 16:23:47] [Rank 0] step:3421/10000 train_time:284424ms step_avg:83.14ms +[2025-08-22 16:23:47] [Rank 0] step:3421/10000 train_time:284424ms step_avg:83.14ms +[2025-08-22 16:23:49] [Rank 0] step:3441/10000 train_time:286126ms step_avg:83.15ms +[2025-08-22 16:23:49] [Rank 0] step:3441/10000 train_time:286126ms step_avg:83.15ms +[2025-08-22 16:23:50] [Rank 0] step:3461/10000 train_time:287831ms step_avg:83.16ms +[2025-08-22 16:23:50] [Rank 0] step:3461/10000 train_time:287831ms step_avg:83.16ms +[2025-08-22 16:23:52] [Rank 0] step:3481/10000 train_time:289537ms step_avg:83.18ms +[2025-08-22 16:23:52] [Rank 0] step:3481/10000 train_time:289537ms step_avg:83.18ms +[2025-08-22 16:23:54] [Rank 0] step:3501/10000 train_time:291246ms step_avg:83.19ms +[2025-08-22 16:23:54] [Rank 0] step:3501/10000 train_time:291246ms step_avg:83.19ms +[2025-08-22 16:23:55] [Rank 0] step:3521/10000 train_time:292954ms step_avg:83.20ms +[2025-08-22 16:23:55] [Rank 0] step:3521/10000 train_time:292954ms step_avg:83.20ms +[2025-08-22 16:23:57] [Rank 0] step:3541/10000 train_time:294664ms step_avg:83.21ms +[2025-08-22 16:23:57] [Rank 0] step:3541/10000 train_time:294664ms step_avg:83.21ms +[2025-08-22 16:23:59] [Rank 0] step:3561/10000 train_time:296372ms step_avg:83.23ms +[2025-08-22 16:23:59] [Rank 0] step:3561/10000 train_time:296372ms step_avg:83.23ms +[2025-08-22 16:24:01] [Rank 0] step:3581/10000 train_time:298083ms step_avg:83.24ms +[2025-08-22 16:24:01] [Rank 0] step:3581/10000 train_time:298083ms step_avg:83.24ms +[2025-08-22 16:24:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:24:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:24:16] [Rank 0] PRINT: step:3600/10000 val_loss:4.2979 svd_entropy: attn_qk:H=0.6940,top10E=0.33,eRank=105.7,q75/q25=94.70 attn_vo:H=0.5398,top10E=0.55,eRank=42.7,q75/q25=112.31 mlp_w1:H=0.7514,top10E=0.28,eRank=174.9,q75/q25=11.41 mlp_w2:H=0.8738,top10E=0.14,eRank=346.8,q75/q25=9.44 vo_prod:H=0.4362,top10E=0.74,eRank=21.7,q75/q25=10440.61 train_time:300092ms step_avg:83.36ms +[2025-08-22 16:24:16] [Rank 0] PRINT: step:3600/10000 val_loss:4.2979 svd_entropy: attn_qk:H=0.6940,top10E=0.33,eRank=105.7,q75/q25=94.70 attn_vo:H=0.5398,top10E=0.55,eRank=42.7,q75/q25=112.31 mlp_w1:H=0.7514,top10E=0.28,eRank=174.9,q75/q25=11.41 mlp_w2:H=0.8738,top10E=0.14,eRank=346.8,q75/q25=9.44 vo_prod:H=0.4362,top10E=0.74,eRank=21.7,q75/q25=10440.61 train_time:300092ms step_avg:83.36ms +[2025-08-22 16:24:16] [Rank 0] step:3601/10000 train_time:300100ms step_avg:83.34ms +[2025-08-22 16:24:16] [Rank 0] step:3601/10000 train_time:300100ms step_avg:83.34ms +[2025-08-22 16:24:18] [Rank 0] step:3621/10000 train_time:301525ms step_avg:83.27ms +[2025-08-22 16:24:18] [Rank 0] step:3621/10000 train_time:301525ms step_avg:83.27ms +[2025-08-22 16:24:19] [Rank 0] step:3641/10000 train_time:303226ms step_avg:83.28ms +[2025-08-22 16:24:19] [Rank 0] step:3641/10000 train_time:303226ms step_avg:83.28ms +[2025-08-22 16:24:21] [Rank 0] step:3661/10000 train_time:304930ms step_avg:83.29ms +[2025-08-22 16:24:21] [Rank 0] step:3661/10000 train_time:304930ms step_avg:83.29ms +[2025-08-22 16:24:23] [Rank 0] step:3681/10000 train_time:306632ms step_avg:83.30ms +[2025-08-22 16:24:23] [Rank 0] step:3681/10000 train_time:306632ms step_avg:83.30ms +[2025-08-22 16:24:24] [Rank 0] step:3701/10000 train_time:308395ms step_avg:83.33ms +[2025-08-22 16:24:24] [Rank 0] step:3701/10000 train_time:308395ms step_avg:83.33ms +[2025-08-22 16:24:26] [Rank 0] step:3721/10000 train_time:310123ms step_avg:83.34ms +[2025-08-22 16:24:26] [Rank 0] step:3721/10000 train_time:310123ms step_avg:83.34ms +[2025-08-22 16:24:28] [Rank 0] step:3741/10000 train_time:311863ms step_avg:83.36ms +[2025-08-22 16:24:28] [Rank 0] step:3741/10000 train_time:311863ms step_avg:83.36ms +[2025-08-22 16:24:30] [Rank 0] step:3761/10000 train_time:313604ms step_avg:83.38ms +[2025-08-22 16:24:30] [Rank 0] step:3761/10000 train_time:313604ms step_avg:83.38ms +[2025-08-22 16:24:31] [Rank 0] step:3781/10000 train_time:315350ms step_avg:83.40ms +[2025-08-22 16:24:31] [Rank 0] step:3781/10000 train_time:315350ms step_avg:83.40ms +[2025-08-22 16:24:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:24:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:24:47] [Rank 0] PRINT: step:3800/10000 val_loss:4.2337 svd_entropy: attn_qk:H=0.6968,top10E=0.33,eRank=107.7,q75/q25=94.45 attn_vo:H=0.5454,top10E=0.54,eRank=44.6,q75/q25=113.04 mlp_w1:H=0.7546,top10E=0.28,eRank=178.9,q75/q25=11.43 mlp_w2:H=0.8763,top10E=0.14,eRank=352.4,q75/q25=9.18 vo_prod:H=0.4420,top10E=0.73,eRank=22.7,q75/q25=10952.71 train_time:317393ms step_avg:83.52ms +[2025-08-22 16:24:47] [Rank 0] PRINT: step:3800/10000 val_loss:4.2337 svd_entropy: attn_qk:H=0.6968,top10E=0.33,eRank=107.7,q75/q25=94.45 attn_vo:H=0.5454,top10E=0.54,eRank=44.6,q75/q25=113.04 mlp_w1:H=0.7546,top10E=0.28,eRank=178.9,q75/q25=11.43 mlp_w2:H=0.8763,top10E=0.14,eRank=352.4,q75/q25=9.18 vo_prod:H=0.4420,top10E=0.73,eRank=22.7,q75/q25=10952.71 train_time:317393ms step_avg:83.52ms +[2025-08-22 16:24:47] [Rank 0] step:3801/10000 train_time:317403ms step_avg:83.51ms +[2025-08-22 16:24:47] [Rank 0] step:3801/10000 train_time:317403ms step_avg:83.51ms +[2025-08-22 16:24:49] [Rank 0] step:3821/10000 train_time:318847ms step_avg:83.45ms +[2025-08-22 16:24:49] [Rank 0] step:3821/10000 train_time:318847ms step_avg:83.45ms +[2025-08-22 16:24:50] [Rank 0] step:3841/10000 train_time:320586ms step_avg:83.46ms +[2025-08-22 16:24:50] [Rank 0] step:3841/10000 train_time:320586ms step_avg:83.46ms +[2025-08-22 16:24:52] [Rank 0] step:3861/10000 train_time:322327ms step_avg:83.48ms +[2025-08-22 16:24:52] [Rank 0] step:3861/10000 train_time:322327ms step_avg:83.48ms +[2025-08-22 16:24:54] [Rank 0] step:3881/10000 train_time:324066ms step_avg:83.50ms +[2025-08-22 16:24:54] [Rank 0] step:3881/10000 train_time:324066ms step_avg:83.50ms +[2025-08-22 16:24:56] [Rank 0] step:3901/10000 train_time:325804ms step_avg:83.52ms +[2025-08-22 16:24:56] [Rank 0] step:3901/10000 train_time:325804ms step_avg:83.52ms +[2025-08-22 16:24:57] [Rank 0] step:3921/10000 train_time:327544ms step_avg:83.54ms +[2025-08-22 16:24:57] [Rank 0] step:3921/10000 train_time:327544ms step_avg:83.54ms +[2025-08-22 16:24:59] [Rank 0] step:3941/10000 train_time:329285ms step_avg:83.55ms +[2025-08-22 16:24:59] [Rank 0] step:3941/10000 train_time:329285ms step_avg:83.55ms +[2025-08-22 16:25:01] [Rank 0] step:3961/10000 train_time:331024ms step_avg:83.57ms +[2025-08-22 16:25:01] [Rank 0] step:3961/10000 train_time:331024ms step_avg:83.57ms +[2025-08-22 16:25:03] [Rank 0] step:3981/10000 train_time:332764ms step_avg:83.59ms +[2025-08-22 16:25:03] [Rank 0] step:3981/10000 train_time:332764ms step_avg:83.59ms +[2025-08-22 16:25:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:25:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:25:18] [Rank 0] PRINT: step:4000/10000 val_loss:4.2027 svd_entropy: attn_qk:H=0.6995,top10E=0.32,eRank=109.7,q75/q25=94.71 attn_vo:H=0.5503,top10E=0.53,eRank=46.2,q75/q25=112.06 mlp_w1:H=0.7576,top10E=0.28,eRank=183.0,q75/q25=11.41 mlp_w2:H=0.8786,top10E=0.14,eRank=357.6,q75/q25=8.93 vo_prod:H=0.4464,top10E=0.72,eRank=23.6,q75/q25=10615.01 train_time:334806ms step_avg:83.70ms +[2025-08-22 16:25:18] [Rank 0] PRINT: step:4000/10000 val_loss:4.2027 svd_entropy: attn_qk:H=0.6995,top10E=0.32,eRank=109.7,q75/q25=94.71 attn_vo:H=0.5503,top10E=0.53,eRank=46.2,q75/q25=112.06 mlp_w1:H=0.7576,top10E=0.28,eRank=183.0,q75/q25=11.41 mlp_w2:H=0.8786,top10E=0.14,eRank=357.6,q75/q25=8.93 vo_prod:H=0.4464,top10E=0.72,eRank=23.6,q75/q25=10615.01 train_time:334806ms step_avg:83.70ms +[2025-08-22 16:25:18] [Rank 0] step:4001/10000 train_time:334815ms step_avg:83.68ms +[2025-08-22 16:25:18] [Rank 0] step:4001/10000 train_time:334815ms step_avg:83.68ms +[2025-08-22 16:25:20] [Rank 0] step:4021/10000 train_time:336269ms step_avg:83.63ms +[2025-08-22 16:25:20] [Rank 0] step:4021/10000 train_time:336269ms step_avg:83.63ms +[2025-08-22 16:25:21] [Rank 0] step:4041/10000 train_time:338005ms step_avg:83.64ms +[2025-08-22 16:25:21] [Rank 0] step:4041/10000 train_time:338005ms step_avg:83.64ms +[2025-08-22 16:25:23] [Rank 0] step:4061/10000 train_time:339743ms step_avg:83.66ms +[2025-08-22 16:25:23] [Rank 0] step:4061/10000 train_time:339743ms step_avg:83.66ms +[2025-08-22 16:25:25] [Rank 0] step:4081/10000 train_time:341875ms step_avg:83.77ms +[2025-08-22 16:25:25] [Rank 0] step:4081/10000 train_time:341875ms step_avg:83.77ms +[2025-08-22 16:25:27] [Rank 0] step:4101/10000 train_time:343614ms step_avg:83.79ms +[2025-08-22 16:25:27] [Rank 0] step:4101/10000 train_time:343614ms step_avg:83.79ms +[2025-08-22 16:25:29] [Rank 0] step:4121/10000 train_time:345354ms step_avg:83.80ms +[2025-08-22 16:25:29] [Rank 0] step:4121/10000 train_time:345354ms step_avg:83.80ms +[2025-08-22 16:25:30] [Rank 0] step:4141/10000 train_time:347094ms step_avg:83.82ms +[2025-08-22 16:25:30] [Rank 0] step:4141/10000 train_time:347094ms step_avg:83.82ms +[2025-08-22 16:25:32] [Rank 0] step:4161/10000 train_time:348833ms step_avg:83.83ms +[2025-08-22 16:25:32] [Rank 0] step:4161/10000 train_time:348833ms step_avg:83.83ms +[2025-08-22 16:25:34] [Rank 0] step:4181/10000 train_time:350576ms step_avg:83.85ms +[2025-08-22 16:25:34] [Rank 0] step:4181/10000 train_time:350576ms step_avg:83.85ms +[2025-08-22 16:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:25:49] [Rank 0] PRINT: step:4200/10000 val_loss:4.1866 svd_entropy: attn_qk:H=0.7019,top10E=0.32,eRank=111.5,q75/q25=94.82 attn_vo:H=0.5553,top10E=0.52,eRank=48.0,q75/q25=112.44 mlp_w1:H=0.7605,top10E=0.27,eRank=186.7,q75/q25=11.39 mlp_w2:H=0.8807,top10E=0.14,eRank=362.4,q75/q25=8.70 vo_prod:H=0.4520,top10E=0.71,eRank=24.5,q75/q25=11290.21 train_time:352617ms step_avg:83.96ms +[2025-08-22 16:25:49] [Rank 0] PRINT: step:4200/10000 val_loss:4.1866 svd_entropy: attn_qk:H=0.7019,top10E=0.32,eRank=111.5,q75/q25=94.82 attn_vo:H=0.5553,top10E=0.52,eRank=48.0,q75/q25=112.44 mlp_w1:H=0.7605,top10E=0.27,eRank=186.7,q75/q25=11.39 mlp_w2:H=0.8807,top10E=0.14,eRank=362.4,q75/q25=8.70 vo_prod:H=0.4520,top10E=0.71,eRank=24.5,q75/q25=11290.21 train_time:352617ms step_avg:83.96ms +[2025-08-22 16:25:49] [Rank 0] step:4201/10000 train_time:352627ms step_avg:83.94ms +[2025-08-22 16:25:49] [Rank 0] step:4201/10000 train_time:352627ms step_avg:83.94ms +[2025-08-22 16:25:51] [Rank 0] step:4221/10000 train_time:354085ms step_avg:83.89ms +[2025-08-22 16:25:51] [Rank 0] step:4221/10000 train_time:354085ms step_avg:83.89ms +[2025-08-22 16:25:53] [Rank 0] step:4241/10000 train_time:355824ms step_avg:83.90ms +[2025-08-22 16:25:53] [Rank 0] step:4241/10000 train_time:355824ms step_avg:83.90ms +[2025-08-22 16:25:54] [Rank 0] step:4261/10000 train_time:357562ms step_avg:83.92ms +[2025-08-22 16:25:54] [Rank 0] step:4261/10000 train_time:357562ms step_avg:83.92ms +[2025-08-22 16:25:56] [Rank 0] step:4281/10000 train_time:359303ms step_avg:83.93ms +[2025-08-22 16:25:56] [Rank 0] step:4281/10000 train_time:359303ms step_avg:83.93ms +[2025-08-22 16:25:58] [Rank 0] step:4301/10000 train_time:361045ms step_avg:83.94ms +[2025-08-22 16:25:58] [Rank 0] step:4301/10000 train_time:361045ms step_avg:83.94ms +[2025-08-22 16:26:00] [Rank 0] step:4321/10000 train_time:362789ms step_avg:83.96ms +[2025-08-22 16:26:00] [Rank 0] step:4321/10000 train_time:362789ms step_avg:83.96ms +[2025-08-22 16:26:01] [Rank 0] step:4341/10000 train_time:364530ms step_avg:83.97ms +[2025-08-22 16:26:01] [Rank 0] step:4341/10000 train_time:364530ms step_avg:83.97ms +[2025-08-22 16:26:03] [Rank 0] step:4361/10000 train_time:366274ms step_avg:83.99ms +[2025-08-22 16:26:03] [Rank 0] step:4361/10000 train_time:366274ms step_avg:83.99ms +[2025-08-22 16:26:05] [Rank 0] step:4381/10000 train_time:368021ms step_avg:84.00ms +[2025-08-22 16:26:05] [Rank 0] step:4381/10000 train_time:368021ms step_avg:84.00ms +[2025-08-22 16:26:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:26:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:26:20] [Rank 0] PRINT: step:4400/10000 val_loss:4.1657 svd_entropy: attn_qk:H=0.7043,top10E=0.32,eRank=113.3,q75/q25=94.95 attn_vo:H=0.5598,top10E=0.51,eRank=49.6,q75/q25=114.07 mlp_w1:H=0.7631,top10E=0.27,eRank=190.3,q75/q25=11.39 mlp_w2:H=0.8827,top10E=0.14,eRank=367.0,q75/q25=8.48 vo_prod:H=0.4564,top10E=0.70,eRank=25.4,q75/q25=11621.07 train_time:370068ms step_avg:84.11ms +[2025-08-22 16:26:20] [Rank 0] PRINT: step:4400/10000 val_loss:4.1657 svd_entropy: attn_qk:H=0.7043,top10E=0.32,eRank=113.3,q75/q25=94.95 attn_vo:H=0.5598,top10E=0.51,eRank=49.6,q75/q25=114.07 mlp_w1:H=0.7631,top10E=0.27,eRank=190.3,q75/q25=11.39 mlp_w2:H=0.8827,top10E=0.14,eRank=367.0,q75/q25=8.48 vo_prod:H=0.4564,top10E=0.70,eRank=25.4,q75/q25=11621.07 train_time:370068ms step_avg:84.11ms +[2025-08-22 16:26:20] [Rank 0] step:4401/10000 train_time:370078ms step_avg:84.09ms +[2025-08-22 16:26:20] [Rank 0] step:4401/10000 train_time:370078ms step_avg:84.09ms +[2025-08-22 16:26:22] [Rank 0] step:4421/10000 train_time:371522ms step_avg:84.04ms +[2025-08-22 16:26:22] [Rank 0] step:4421/10000 train_time:371522ms step_avg:84.04ms +[2025-08-22 16:26:24] [Rank 0] step:4441/10000 train_time:373261ms step_avg:84.05ms +[2025-08-22 16:26:24] [Rank 0] step:4441/10000 train_time:373261ms step_avg:84.05ms +[2025-08-22 16:26:25] [Rank 0] step:4461/10000 train_time:375005ms step_avg:84.06ms +[2025-08-22 16:26:25] [Rank 0] step:4461/10000 train_time:375005ms step_avg:84.06ms +[2025-08-22 16:26:27] [Rank 0] step:4481/10000 train_time:376750ms step_avg:84.08ms +[2025-08-22 16:26:27] [Rank 0] step:4481/10000 train_time:376750ms step_avg:84.08ms +[2025-08-22 16:26:29] [Rank 0] step:4501/10000 train_time:378497ms step_avg:84.09ms +[2025-08-22 16:26:29] [Rank 0] step:4501/10000 train_time:378497ms step_avg:84.09ms +[2025-08-22 16:26:31] [Rank 0] step:4521/10000 train_time:380242ms step_avg:84.11ms +[2025-08-22 16:26:31] [Rank 0] step:4521/10000 train_time:380242ms step_avg:84.11ms +[2025-08-22 16:26:32] [Rank 0] step:4541/10000 train_time:381987ms step_avg:84.12ms +[2025-08-22 16:26:32] [Rank 0] step:4541/10000 train_time:381987ms step_avg:84.12ms +[2025-08-22 16:26:34] [Rank 0] step:4561/10000 train_time:383735ms step_avg:84.13ms +[2025-08-22 16:26:34] [Rank 0] step:4561/10000 train_time:383735ms step_avg:84.13ms +[2025-08-22 16:26:36] [Rank 0] step:4581/10000 train_time:385485ms step_avg:84.15ms +[2025-08-22 16:26:36] [Rank 0] step:4581/10000 train_time:385485ms step_avg:84.15ms +[2025-08-22 16:26:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:26:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:26:51] [Rank 0] PRINT: step:4600/10000 val_loss:4.1281 svd_entropy: attn_qk:H=0.7064,top10E=0.31,eRank=115.0,q75/q25=94.69 attn_vo:H=0.5643,top10E=0.50,eRank=51.3,q75/q25=115.31 mlp_w1:H=0.7656,top10E=0.27,eRank=193.8,q75/q25=11.39 mlp_w2:H=0.8845,top10E=0.14,eRank=371.4,q75/q25=8.29 vo_prod:H=0.4611,top10E=0.69,eRank=26.3,q75/q25=12210.36 train_time:387539ms step_avg:84.25ms +[2025-08-22 16:26:51] [Rank 0] PRINT: step:4600/10000 val_loss:4.1281 svd_entropy: attn_qk:H=0.7064,top10E=0.31,eRank=115.0,q75/q25=94.69 attn_vo:H=0.5643,top10E=0.50,eRank=51.3,q75/q25=115.31 mlp_w1:H=0.7656,top10E=0.27,eRank=193.8,q75/q25=11.39 mlp_w2:H=0.8845,top10E=0.14,eRank=371.4,q75/q25=8.29 vo_prod:H=0.4611,top10E=0.69,eRank=26.3,q75/q25=12210.36 train_time:387539ms step_avg:84.25ms +[2025-08-22 16:26:51] [Rank 0] step:4601/10000 train_time:387548ms step_avg:84.23ms +[2025-08-22 16:26:51] [Rank 0] step:4601/10000 train_time:387548ms step_avg:84.23ms +[2025-08-22 16:26:53] [Rank 0] step:4621/10000 train_time:389001ms step_avg:84.18ms +[2025-08-22 16:26:53] [Rank 0] step:4621/10000 train_time:389001ms step_avg:84.18ms +[2025-08-22 16:26:55] [Rank 0] step:4641/10000 train_time:390747ms step_avg:84.19ms +[2025-08-22 16:26:55] [Rank 0] step:4641/10000 train_time:390747ms step_avg:84.19ms +[2025-08-22 16:26:56] [Rank 0] step:4661/10000 train_time:392488ms step_avg:84.21ms +[2025-08-22 16:26:56] [Rank 0] step:4661/10000 train_time:392488ms step_avg:84.21ms +[2025-08-22 16:26:58] [Rank 0] step:4681/10000 train_time:394233ms step_avg:84.22ms +[2025-08-22 16:26:58] [Rank 0] step:4681/10000 train_time:394233ms step_avg:84.22ms +[2025-08-22 16:27:00] [Rank 0] step:4701/10000 train_time:395978ms step_avg:84.23ms +[2025-08-22 16:27:00] [Rank 0] step:4701/10000 train_time:395978ms step_avg:84.23ms +[2025-08-22 16:27:02] [Rank 0] step:4721/10000 train_time:397720ms step_avg:84.24ms +[2025-08-22 16:27:02] [Rank 0] step:4721/10000 train_time:397720ms step_avg:84.24ms +[2025-08-22 16:27:03] [Rank 0] step:4741/10000 train_time:399467ms step_avg:84.26ms +[2025-08-22 16:27:03] [Rank 0] step:4741/10000 train_time:399467ms step_avg:84.26ms +[2025-08-22 16:27:05] [Rank 0] step:4761/10000 train_time:401213ms step_avg:84.27ms +[2025-08-22 16:27:05] [Rank 0] step:4761/10000 train_time:401213ms step_avg:84.27ms +[2025-08-22 16:27:07] [Rank 0] step:4781/10000 train_time:402959ms step_avg:84.28ms +[2025-08-22 16:27:07] [Rank 0] step:4781/10000 train_time:402959ms step_avg:84.28ms +[2025-08-22 16:27:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:27:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:27:22] [Rank 0] PRINT: step:4800/10000 val_loss:4.1228 svd_entropy: attn_qk:H=0.7084,top10E=0.31,eRank=116.5,q75/q25=95.20 attn_vo:H=0.5685,top10E=0.49,eRank=52.9,q75/q25=116.24 mlp_w1:H=0.7681,top10E=0.26,eRank=197.2,q75/q25=11.36 mlp_w2:H=0.8863,top10E=0.13,eRank=375.4,q75/q25=8.11 vo_prod:H=0.4654,top10E=0.68,eRank=27.1,q75/q25=12570.13 train_time:405008ms step_avg:84.38ms +[2025-08-22 16:27:22] [Rank 0] PRINT: step:4800/10000 val_loss:4.1228 svd_entropy: attn_qk:H=0.7084,top10E=0.31,eRank=116.5,q75/q25=95.20 attn_vo:H=0.5685,top10E=0.49,eRank=52.9,q75/q25=116.24 mlp_w1:H=0.7681,top10E=0.26,eRank=197.2,q75/q25=11.36 mlp_w2:H=0.8863,top10E=0.13,eRank=375.4,q75/q25=8.11 vo_prod:H=0.4654,top10E=0.68,eRank=27.1,q75/q25=12570.13 train_time:405008ms step_avg:84.38ms +[2025-08-22 16:27:22] [Rank 0] step:4801/10000 train_time:405019ms step_avg:84.36ms +[2025-08-22 16:27:22] [Rank 0] step:4801/10000 train_time:405019ms step_avg:84.36ms +[2025-08-22 16:27:24] [Rank 0] step:4821/10000 train_time:406474ms step_avg:84.31ms +[2025-08-22 16:27:24] [Rank 0] step:4821/10000 train_time:406474ms step_avg:84.31ms +[2025-08-22 16:27:26] [Rank 0] step:4841/10000 train_time:408220ms step_avg:84.33ms +[2025-08-22 16:27:26] [Rank 0] step:4841/10000 train_time:408220ms step_avg:84.33ms +[2025-08-22 16:27:27] [Rank 0] step:4861/10000 train_time:409969ms step_avg:84.34ms +[2025-08-22 16:27:27] [Rank 0] step:4861/10000 train_time:409969ms step_avg:84.34ms +[2025-08-22 16:27:29] [Rank 0] step:4881/10000 train_time:411715ms step_avg:84.35ms +[2025-08-22 16:27:29] [Rank 0] step:4881/10000 train_time:411715ms step_avg:84.35ms +[2025-08-22 16:27:31] [Rank 0] step:4901/10000 train_time:413463ms step_avg:84.36ms +[2025-08-22 16:27:31] [Rank 0] step:4901/10000 train_time:413463ms step_avg:84.36ms +[2025-08-22 16:27:33] [Rank 0] step:4921/10000 train_time:415214ms step_avg:84.38ms +[2025-08-22 16:27:33] [Rank 0] step:4921/10000 train_time:415214ms step_avg:84.38ms +[2025-08-22 16:27:34] [Rank 0] step:4941/10000 train_time:416967ms step_avg:84.39ms +[2025-08-22 16:27:34] [Rank 0] step:4941/10000 train_time:416967ms step_avg:84.39ms +[2025-08-22 16:27:36] [Rank 0] step:4961/10000 train_time:418751ms step_avg:84.41ms +[2025-08-22 16:27:36] [Rank 0] step:4961/10000 train_time:418751ms step_avg:84.41ms +[2025-08-22 16:27:38] [Rank 0] step:4981/10000 train_time:420505ms step_avg:84.42ms +[2025-08-22 16:27:38] [Rank 0] step:4981/10000 train_time:420505ms step_avg:84.42ms +[2025-08-22 16:27:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:27:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:27:53] [Rank 0] PRINT: step:5000/10000 val_loss:4.0984 svd_entropy: attn_qk:H=0.7105,top10E=0.31,eRank=118.2,q75/q25=94.89 attn_vo:H=0.5725,top10E=0.48,eRank=54.5,q75/q25=116.58 mlp_w1:H=0.7703,top10E=0.26,eRank=200.4,q75/q25=11.33 mlp_w2:H=0.8878,top10E=0.13,eRank=379.1,q75/q25=7.97 vo_prod:H=0.4697,top10E=0.67,eRank=28.0,q75/q25=12718.06 train_time:422562ms step_avg:84.51ms +[2025-08-22 16:27:53] [Rank 0] PRINT: step:5000/10000 val_loss:4.0984 svd_entropy: attn_qk:H=0.7105,top10E=0.31,eRank=118.2,q75/q25=94.89 attn_vo:H=0.5725,top10E=0.48,eRank=54.5,q75/q25=116.58 mlp_w1:H=0.7703,top10E=0.26,eRank=200.4,q75/q25=11.33 mlp_w2:H=0.8878,top10E=0.13,eRank=379.1,q75/q25=7.97 vo_prod:H=0.4697,top10E=0.67,eRank=28.0,q75/q25=12718.06 train_time:422562ms step_avg:84.51ms +[2025-08-22 16:27:53] [Rank 0] step:5001/10000 train_time:422570ms step_avg:84.50ms +[2025-08-22 16:27:53] [Rank 0] step:5001/10000 train_time:422570ms step_avg:84.50ms +[2025-08-22 16:27:55] [Rank 0] step:5021/10000 train_time:424034ms step_avg:84.45ms +[2025-08-22 16:27:55] [Rank 0] step:5021/10000 train_time:424034ms step_avg:84.45ms +[2025-08-22 16:27:57] [Rank 0] step:5041/10000 train_time:425781ms step_avg:84.46ms +[2025-08-22 16:27:57] [Rank 0] step:5041/10000 train_time:425781ms step_avg:84.46ms +[2025-08-22 16:27:59] [Rank 0] step:5061/10000 train_time:427523ms step_avg:84.47ms +[2025-08-22 16:27:59] [Rank 0] step:5061/10000 train_time:427523ms step_avg:84.47ms +[2025-08-22 16:28:00] [Rank 0] step:5081/10000 train_time:429268ms step_avg:84.48ms +[2025-08-22 16:28:00] [Rank 0] step:5081/10000 train_time:429268ms step_avg:84.48ms +[2025-08-22 16:28:02] [Rank 0] step:5101/10000 train_time:431014ms step_avg:84.50ms +[2025-08-22 16:28:02] [Rank 0] step:5101/10000 train_time:431014ms step_avg:84.50ms +[2025-08-22 16:28:04] [Rank 0] step:5121/10000 train_time:432760ms step_avg:84.51ms +[2025-08-22 16:28:04] [Rank 0] step:5121/10000 train_time:432760ms step_avg:84.51ms +[2025-08-22 16:28:06] [Rank 0] step:5141/10000 train_time:434511ms step_avg:84.52ms +[2025-08-22 16:28:06] [Rank 0] step:5141/10000 train_time:434511ms step_avg:84.52ms +[2025-08-22 16:28:07] [Rank 0] step:5161/10000 train_time:436262ms step_avg:84.53ms +[2025-08-22 16:28:07] [Rank 0] step:5161/10000 train_time:436262ms step_avg:84.53ms +[2025-08-22 16:28:09] [Rank 0] step:5181/10000 train_time:438014ms step_avg:84.54ms +[2025-08-22 16:28:09] [Rank 0] step:5181/10000 train_time:438014ms step_avg:84.54ms +[2025-08-22 16:28:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:28:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:28:25] [Rank 0] PRINT: step:5200/10000 val_loss:4.0778 svd_entropy: attn_qk:H=0.7124,top10E=0.31,eRank=119.7,q75/q25=95.01 attn_vo:H=0.5762,top10E=0.47,eRank=56.0,q75/q25=116.45 mlp_w1:H=0.7724,top10E=0.26,eRank=203.4,q75/q25=11.30 mlp_w2:H=0.8893,top10E=0.13,eRank=382.6,q75/q25=7.80 vo_prod:H=0.4738,top10E=0.67,eRank=28.9,q75/q25=12786.56 train_time:440088ms step_avg:84.63ms +[2025-08-22 16:28:25] [Rank 0] PRINT: step:5200/10000 val_loss:4.0778 svd_entropy: attn_qk:H=0.7124,top10E=0.31,eRank=119.7,q75/q25=95.01 attn_vo:H=0.5762,top10E=0.47,eRank=56.0,q75/q25=116.45 mlp_w1:H=0.7724,top10E=0.26,eRank=203.4,q75/q25=11.30 mlp_w2:H=0.8893,top10E=0.13,eRank=382.6,q75/q25=7.80 vo_prod:H=0.4738,top10E=0.67,eRank=28.9,q75/q25=12786.56 train_time:440088ms step_avg:84.63ms +[2025-08-22 16:28:25] [Rank 0] step:5201/10000 train_time:440098ms step_avg:84.62ms +[2025-08-22 16:28:25] [Rank 0] step:5201/10000 train_time:440098ms step_avg:84.62ms +[2025-08-22 16:28:26] [Rank 0] step:5221/10000 train_time:441567ms step_avg:84.58ms +[2025-08-22 16:28:26] [Rank 0] step:5221/10000 train_time:441567ms step_avg:84.58ms +[2025-08-22 16:28:28] [Rank 0] step:5241/10000 train_time:443341ms step_avg:84.59ms +[2025-08-22 16:28:28] [Rank 0] step:5241/10000 train_time:443341ms step_avg:84.59ms +[2025-08-22 16:28:30] [Rank 0] step:5261/10000 train_time:445115ms step_avg:84.61ms +[2025-08-22 16:28:30] [Rank 0] step:5261/10000 train_time:445115ms step_avg:84.61ms +[2025-08-22 16:28:32] [Rank 0] step:5281/10000 train_time:446892ms step_avg:84.62ms +[2025-08-22 16:28:32] [Rank 0] step:5281/10000 train_time:446892ms step_avg:84.62ms +[2025-08-22 16:28:34] [Rank 0] step:5301/10000 train_time:448679ms step_avg:84.64ms +[2025-08-22 16:28:34] [Rank 0] step:5301/10000 train_time:448679ms step_avg:84.64ms +[2025-08-22 16:28:35] [Rank 0] step:5321/10000 train_time:450456ms step_avg:84.66ms +[2025-08-22 16:28:35] [Rank 0] step:5321/10000 train_time:450456ms step_avg:84.66ms +[2025-08-22 16:28:37] [Rank 0] step:5341/10000 train_time:452232ms step_avg:84.67ms +[2025-08-22 16:28:37] [Rank 0] step:5341/10000 train_time:452232ms step_avg:84.67ms +[2025-08-22 16:28:39] [Rank 0] step:5361/10000 train_time:454012ms step_avg:84.69ms +[2025-08-22 16:28:39] [Rank 0] step:5361/10000 train_time:454012ms step_avg:84.69ms +[2025-08-22 16:28:41] [Rank 0] step:5381/10000 train_time:455788ms step_avg:84.70ms +[2025-08-22 16:28:41] [Rank 0] step:5381/10000 train_time:455788ms step_avg:84.70ms +[2025-08-22 16:28:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:28:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:28:56] [Rank 0] PRINT: step:5400/10000 val_loss:4.0564 svd_entropy: attn_qk:H=0.7140,top10E=0.30,eRank=121.1,q75/q25=94.77 attn_vo:H=0.5798,top10E=0.47,eRank=57.5,q75/q25=116.00 mlp_w1:H=0.7744,top10E=0.25,eRank=206.3,q75/q25=11.27 mlp_w2:H=0.8907,top10E=0.13,eRank=386.0,q75/q25=7.68 vo_prod:H=0.4771,top10E=0.66,eRank=29.6,q75/q25=13141.58 train_time:457871ms step_avg:84.79ms +[2025-08-22 16:28:56] [Rank 0] PRINT: step:5400/10000 val_loss:4.0564 svd_entropy: attn_qk:H=0.7140,top10E=0.30,eRank=121.1,q75/q25=94.77 attn_vo:H=0.5798,top10E=0.47,eRank=57.5,q75/q25=116.00 mlp_w1:H=0.7744,top10E=0.25,eRank=206.3,q75/q25=11.27 mlp_w2:H=0.8907,top10E=0.13,eRank=386.0,q75/q25=7.68 vo_prod:H=0.4771,top10E=0.66,eRank=29.6,q75/q25=13141.58 train_time:457871ms step_avg:84.79ms +[2025-08-22 16:28:56] [Rank 0] step:5401/10000 train_time:457882ms step_avg:84.78ms +[2025-08-22 16:28:56] [Rank 0] step:5401/10000 train_time:457882ms step_avg:84.78ms +[2025-08-22 16:28:58] [Rank 0] step:5421/10000 train_time:459354ms step_avg:84.74ms +[2025-08-22 16:28:58] [Rank 0] step:5421/10000 train_time:459354ms step_avg:84.74ms +[2025-08-22 16:28:59] [Rank 0] step:5441/10000 train_time:461130ms step_avg:84.75ms +[2025-08-22 16:28:59] [Rank 0] step:5441/10000 train_time:461130ms step_avg:84.75ms +[2025-08-22 16:29:01] [Rank 0] step:5461/10000 train_time:462906ms step_avg:84.77ms +[2025-08-22 16:29:01] [Rank 0] step:5461/10000 train_time:462906ms step_avg:84.77ms +[2025-08-22 16:29:03] [Rank 0] step:5481/10000 train_time:464686ms step_avg:84.78ms +[2025-08-22 16:29:03] [Rank 0] step:5481/10000 train_time:464686ms step_avg:84.78ms +[2025-08-22 16:29:05] [Rank 0] step:5501/10000 train_time:466466ms step_avg:84.80ms +[2025-08-22 16:29:05] [Rank 0] step:5501/10000 train_time:466466ms step_avg:84.80ms +[2025-08-22 16:29:07] [Rank 0] step:5521/10000 train_time:468250ms step_avg:84.81ms +[2025-08-22 16:29:07] [Rank 0] step:5521/10000 train_time:468250ms step_avg:84.81ms +[2025-08-22 16:29:08] [Rank 0] step:5541/10000 train_time:470027ms step_avg:84.83ms +[2025-08-22 16:29:08] [Rank 0] step:5541/10000 train_time:470027ms step_avg:84.83ms +[2025-08-22 16:29:10] [Rank 0] step:5561/10000 train_time:471807ms step_avg:84.84ms +[2025-08-22 16:29:10] [Rank 0] step:5561/10000 train_time:471807ms step_avg:84.84ms +[2025-08-22 16:29:12] [Rank 0] step:5581/10000 train_time:473586ms step_avg:84.86ms +[2025-08-22 16:29:12] [Rank 0] step:5581/10000 train_time:473586ms step_avg:84.86ms +[2025-08-22 16:29:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:29:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:29:27] [Rank 0] PRINT: step:5600/10000 val_loss:4.0445 svd_entropy: attn_qk:H=0.7158,top10E=0.30,eRank=122.5,q75/q25=94.76 attn_vo:H=0.5832,top10E=0.46,eRank=58.9,q75/q25=116.89 mlp_w1:H=0.7763,top10E=0.25,eRank=209.2,q75/q25=11.22 mlp_w2:H=0.8920,top10E=0.13,eRank=389.2,q75/q25=7.54 vo_prod:H=0.4809,top10E=0.65,eRank=30.4,q75/q25=13175.85 train_time:475674ms step_avg:84.94ms +[2025-08-22 16:29:27] [Rank 0] PRINT: step:5600/10000 val_loss:4.0445 svd_entropy: attn_qk:H=0.7158,top10E=0.30,eRank=122.5,q75/q25=94.76 attn_vo:H=0.5832,top10E=0.46,eRank=58.9,q75/q25=116.89 mlp_w1:H=0.7763,top10E=0.25,eRank=209.2,q75/q25=11.22 mlp_w2:H=0.8920,top10E=0.13,eRank=389.2,q75/q25=7.54 vo_prod:H=0.4809,top10E=0.65,eRank=30.4,q75/q25=13175.85 train_time:475674ms step_avg:84.94ms +[2025-08-22 16:29:27] [Rank 0] step:5601/10000 train_time:475683ms step_avg:84.93ms +[2025-08-22 16:29:27] [Rank 0] step:5601/10000 train_time:475683ms step_avg:84.93ms +[2025-08-22 16:29:29] [Rank 0] step:5621/10000 train_time:477167ms step_avg:84.89ms +[2025-08-22 16:29:29] [Rank 0] step:5621/10000 train_time:477167ms step_avg:84.89ms +[2025-08-22 16:29:31] [Rank 0] step:5641/10000 train_time:478944ms step_avg:84.90ms +[2025-08-22 16:29:31] [Rank 0] step:5641/10000 train_time:478944ms step_avg:84.90ms +[2025-08-22 16:29:33] [Rank 0] step:5661/10000 train_time:480714ms step_avg:84.92ms +[2025-08-22 16:29:33] [Rank 0] step:5661/10000 train_time:480714ms step_avg:84.92ms +[2025-08-22 16:29:34] [Rank 0] step:5681/10000 train_time:482493ms step_avg:84.93ms +[2025-08-22 16:29:34] [Rank 0] step:5681/10000 train_time:482493ms step_avg:84.93ms +[2025-08-22 16:29:36] [Rank 0] step:5701/10000 train_time:484268ms step_avg:84.94ms +[2025-08-22 16:29:36] [Rank 0] step:5701/10000 train_time:484268ms step_avg:84.94ms +[2025-08-22 16:29:38] [Rank 0] step:5721/10000 train_time:486048ms step_avg:84.96ms +[2025-08-22 16:29:38] [Rank 0] step:5721/10000 train_time:486048ms step_avg:84.96ms +[2025-08-22 16:29:40] [Rank 0] step:5741/10000 train_time:487832ms step_avg:84.97ms +[2025-08-22 16:29:40] [Rank 0] step:5741/10000 train_time:487832ms step_avg:84.97ms +[2025-08-22 16:29:42] [Rank 0] step:5761/10000 train_time:489614ms step_avg:84.99ms +[2025-08-22 16:29:42] [Rank 0] step:5761/10000 train_time:489614ms step_avg:84.99ms +[2025-08-22 16:29:43] [Rank 0] step:5781/10000 train_time:491394ms step_avg:85.00ms +[2025-08-22 16:29:43] [Rank 0] step:5781/10000 train_time:491394ms step_avg:85.00ms +[2025-08-22 16:29:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:29:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:29:59] [Rank 0] PRINT: step:5800/10000 val_loss:4.0344 svd_entropy: attn_qk:H=0.7173,top10E=0.30,eRank=123.8,q75/q25=94.37 attn_vo:H=0.5865,top10E=0.45,eRank=60.3,q75/q25=116.57 mlp_w1:H=0.7782,top10E=0.25,eRank=211.9,q75/q25=11.18 mlp_w2:H=0.8932,top10E=0.13,eRank=392.0,q75/q25=7.41 vo_prod:H=0.4842,top10E=0.65,eRank=31.1,q75/q25=13153.89 train_time:493485ms step_avg:85.08ms +[2025-08-22 16:29:59] [Rank 0] PRINT: step:5800/10000 val_loss:4.0344 svd_entropy: attn_qk:H=0.7173,top10E=0.30,eRank=123.8,q75/q25=94.37 attn_vo:H=0.5865,top10E=0.45,eRank=60.3,q75/q25=116.57 mlp_w1:H=0.7782,top10E=0.25,eRank=211.9,q75/q25=11.18 mlp_w2:H=0.8932,top10E=0.13,eRank=392.0,q75/q25=7.41 vo_prod:H=0.4842,top10E=0.65,eRank=31.1,q75/q25=13153.89 train_time:493485ms step_avg:85.08ms +[2025-08-22 16:29:59] [Rank 0] step:5801/10000 train_time:493496ms step_avg:85.07ms +[2025-08-22 16:29:59] [Rank 0] step:5801/10000 train_time:493496ms step_avg:85.07ms +[2025-08-22 16:30:01] [Rank 0] step:5821/10000 train_time:494986ms step_avg:85.03ms +[2025-08-22 16:30:01] [Rank 0] step:5821/10000 train_time:494986ms step_avg:85.03ms +[2025-08-22 16:30:02] [Rank 0] step:5841/10000 train_time:496760ms step_avg:85.05ms +[2025-08-22 16:30:02] [Rank 0] step:5841/10000 train_time:496760ms step_avg:85.05ms +[2025-08-22 16:30:04] [Rank 0] step:5861/10000 train_time:498541ms step_avg:85.06ms +[2025-08-22 16:30:04] [Rank 0] step:5861/10000 train_time:498541ms step_avg:85.06ms +[2025-08-22 16:30:06] [Rank 0] step:5881/10000 train_time:500319ms step_avg:85.07ms +[2025-08-22 16:30:06] [Rank 0] step:5881/10000 train_time:500319ms step_avg:85.07ms +[2025-08-22 16:30:08] [Rank 0] step:5901/10000 train_time:502096ms step_avg:85.09ms +[2025-08-22 16:30:08] [Rank 0] step:5901/10000 train_time:502096ms step_avg:85.09ms +[2025-08-22 16:30:09] [Rank 0] step:5921/10000 train_time:503876ms step_avg:85.10ms +[2025-08-22 16:30:09] [Rank 0] step:5921/10000 train_time:503876ms step_avg:85.10ms +[2025-08-22 16:30:11] [Rank 0] step:5941/10000 train_time:505657ms step_avg:85.11ms +[2025-08-22 16:30:11] [Rank 0] step:5941/10000 train_time:505657ms step_avg:85.11ms +[2025-08-22 16:30:13] [Rank 0] step:5961/10000 train_time:507441ms step_avg:85.13ms +[2025-08-22 16:30:13] [Rank 0] step:5961/10000 train_time:507441ms step_avg:85.13ms +[2025-08-22 16:30:15] [Rank 0] step:5981/10000 train_time:509220ms step_avg:85.14ms +[2025-08-22 16:30:15] [Rank 0] step:5981/10000 train_time:509220ms step_avg:85.14ms +[2025-08-22 16:30:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:30:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:30:30] [Rank 0] PRINT: step:6000/10000 val_loss:4.0098 svd_entropy: attn_qk:H=0.7189,top10E=0.30,eRank=125.1,q75/q25=94.44 attn_vo:H=0.5896,top10E=0.45,eRank=61.7,q75/q25=116.62 mlp_w1:H=0.7800,top10E=0.25,eRank=214.6,q75/q25=11.13 mlp_w2:H=0.8944,top10E=0.13,eRank=394.9,q75/q25=7.31 vo_prod:H=0.4872,top10E=0.64,eRank=31.9,q75/q25=13113.13 train_time:511307ms step_avg:85.22ms +[2025-08-22 16:30:30] [Rank 0] PRINT: step:6000/10000 val_loss:4.0098 svd_entropy: attn_qk:H=0.7189,top10E=0.30,eRank=125.1,q75/q25=94.44 attn_vo:H=0.5896,top10E=0.45,eRank=61.7,q75/q25=116.62 mlp_w1:H=0.7800,top10E=0.25,eRank=214.6,q75/q25=11.13 mlp_w2:H=0.8944,top10E=0.13,eRank=394.9,q75/q25=7.31 vo_prod:H=0.4872,top10E=0.64,eRank=31.9,q75/q25=13113.13 train_time:511307ms step_avg:85.22ms +[2025-08-22 16:30:30] [Rank 0] step:6001/10000 train_time:511316ms step_avg:85.21ms +[2025-08-22 16:30:30] [Rank 0] step:6001/10000 train_time:511316ms step_avg:85.21ms +[2025-08-22 16:30:32] [Rank 0] step:6021/10000 train_time:512799ms step_avg:85.17ms +[2025-08-22 16:30:32] [Rank 0] step:6021/10000 train_time:512799ms step_avg:85.17ms +[2025-08-22 16:30:34] [Rank 0] step:6041/10000 train_time:514577ms step_avg:85.18ms +[2025-08-22 16:30:34] [Rank 0] step:6041/10000 train_time:514577ms step_avg:85.18ms +[2025-08-22 16:30:36] [Rank 0] step:6061/10000 train_time:516362ms step_avg:85.19ms +[2025-08-22 16:30:36] [Rank 0] step:6061/10000 train_time:516362ms step_avg:85.19ms +[2025-08-22 16:30:37] [Rank 0] step:6081/10000 train_time:518142ms step_avg:85.21ms +[2025-08-22 16:30:37] [Rank 0] step:6081/10000 train_time:518142ms step_avg:85.21ms +[2025-08-22 16:30:39] [Rank 0] step:6101/10000 train_time:519930ms step_avg:85.22ms +[2025-08-22 16:30:39] [Rank 0] step:6101/10000 train_time:519930ms step_avg:85.22ms +[2025-08-22 16:30:41] [Rank 0] step:6121/10000 train_time:521975ms step_avg:85.28ms +[2025-08-22 16:30:41] [Rank 0] step:6121/10000 train_time:521975ms step_avg:85.28ms +[2025-08-22 16:30:43] [Rank 0] step:6141/10000 train_time:523771ms step_avg:85.29ms +[2025-08-22 16:30:43] [Rank 0] step:6141/10000 train_time:523771ms step_avg:85.29ms +[2025-08-22 16:30:45] [Rank 0] step:6161/10000 train_time:525556ms step_avg:85.30ms +[2025-08-22 16:30:45] [Rank 0] step:6161/10000 train_time:525556ms step_avg:85.30ms +[2025-08-22 16:30:47] [Rank 0] step:6181/10000 train_time:527338ms step_avg:85.32ms +[2025-08-22 16:30:47] [Rank 0] step:6181/10000 train_time:527338ms step_avg:85.32ms +[2025-08-22 16:30:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:30:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:31:02] [Rank 0] PRINT: step:6200/10000 val_loss:3.9924 svd_entropy: attn_qk:H=0.7204,top10E=0.29,eRank=126.4,q75/q25=94.17 attn_vo:H=0.5926,top10E=0.44,eRank=63.1,q75/q25=116.86 mlp_w1:H=0.7817,top10E=0.25,eRank=217.1,q75/q25=11.09 mlp_w2:H=0.8955,top10E=0.13,eRank=397.7,q75/q25=7.24 vo_prod:H=0.4903,top10E=0.64,eRank=32.6,q75/q25=13125.30 train_time:529443ms step_avg:85.39ms +[2025-08-22 16:31:02] [Rank 0] PRINT: step:6200/10000 val_loss:3.9924 svd_entropy: attn_qk:H=0.7204,top10E=0.29,eRank=126.4,q75/q25=94.17 attn_vo:H=0.5926,top10E=0.44,eRank=63.1,q75/q25=116.86 mlp_w1:H=0.7817,top10E=0.25,eRank=217.1,q75/q25=11.09 mlp_w2:H=0.8955,top10E=0.13,eRank=397.7,q75/q25=7.24 vo_prod:H=0.4903,top10E=0.64,eRank=32.6,q75/q25=13125.30 train_time:529443ms step_avg:85.39ms +[2025-08-22 16:31:02] [Rank 0] step:6201/10000 train_time:529453ms step_avg:85.38ms +[2025-08-22 16:31:02] [Rank 0] step:6201/10000 train_time:529453ms step_avg:85.38ms +[2025-08-22 16:31:04] [Rank 0] step:6221/10000 train_time:530953ms step_avg:85.35ms +[2025-08-22 16:31:04] [Rank 0] step:6221/10000 train_time:530953ms step_avg:85.35ms +[2025-08-22 16:31:06] [Rank 0] step:6241/10000 train_time:532731ms step_avg:85.36ms +[2025-08-22 16:31:06] [Rank 0] step:6241/10000 train_time:532731ms step_avg:85.36ms +[2025-08-22 16:31:07] [Rank 0] step:6261/10000 train_time:534515ms step_avg:85.37ms +[2025-08-22 16:31:07] [Rank 0] step:6261/10000 train_time:534515ms step_avg:85.37ms +[2025-08-22 16:31:09] [Rank 0] step:6281/10000 train_time:536301ms step_avg:85.38ms +[2025-08-22 16:31:09] [Rank 0] step:6281/10000 train_time:536301ms step_avg:85.38ms +[2025-08-22 16:31:11] [Rank 0] step:6301/10000 train_time:538087ms step_avg:85.40ms +[2025-08-22 16:31:11] [Rank 0] step:6301/10000 train_time:538087ms step_avg:85.40ms +[2025-08-22 16:31:13] [Rank 0] step:6321/10000 train_time:539875ms step_avg:85.41ms +[2025-08-22 16:31:13] [Rank 0] step:6321/10000 train_time:539875ms step_avg:85.41ms +[2025-08-22 16:31:14] [Rank 0] step:6341/10000 train_time:541662ms step_avg:85.42ms +[2025-08-22 16:31:14] [Rank 0] step:6341/10000 train_time:541662ms step_avg:85.42ms +[2025-08-22 16:31:16] [Rank 0] step:6361/10000 train_time:543451ms step_avg:85.43ms +[2025-08-22 16:31:16] [Rank 0] step:6361/10000 train_time:543451ms step_avg:85.43ms +[2025-08-22 16:31:18] [Rank 0] step:6381/10000 train_time:545247ms step_avg:85.45ms +[2025-08-22 16:31:18] [Rank 0] step:6381/10000 train_time:545247ms step_avg:85.45ms +[2025-08-22 16:31:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:31:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:31:33] [Rank 0] PRINT: step:6400/10000 val_loss:3.9749 svd_entropy: attn_qk:H=0.7218,top10E=0.29,eRank=127.6,q75/q25=94.52 attn_vo:H=0.5953,top10E=0.44,eRank=64.3,q75/q25=117.42 mlp_w1:H=0.7832,top10E=0.24,eRank=219.4,q75/q25=11.05 mlp_w2:H=0.8965,top10E=0.12,eRank=400.1,q75/q25=7.11 vo_prod:H=0.4928,top10E=0.63,eRank=33.3,q75/q25=13323.02 train_time:547340ms step_avg:85.52ms +[2025-08-22 16:31:33] [Rank 0] PRINT: step:6400/10000 val_loss:3.9749 svd_entropy: attn_qk:H=0.7218,top10E=0.29,eRank=127.6,q75/q25=94.52 attn_vo:H=0.5953,top10E=0.44,eRank=64.3,q75/q25=117.42 mlp_w1:H=0.7832,top10E=0.24,eRank=219.4,q75/q25=11.05 mlp_w2:H=0.8965,top10E=0.12,eRank=400.1,q75/q25=7.11 vo_prod:H=0.4928,top10E=0.63,eRank=33.3,q75/q25=13323.02 train_time:547340ms step_avg:85.52ms +[2025-08-22 16:31:33] [Rank 0] step:6401/10000 train_time:547350ms step_avg:85.51ms +[2025-08-22 16:31:33] [Rank 0] step:6401/10000 train_time:547350ms step_avg:85.51ms +[2025-08-22 16:31:35] [Rank 0] step:6421/10000 train_time:548838ms step_avg:85.48ms +[2025-08-22 16:31:35] [Rank 0] step:6421/10000 train_time:548838ms step_avg:85.48ms +[2025-08-22 16:31:37] [Rank 0] step:6441/10000 train_time:550618ms step_avg:85.49ms +[2025-08-22 16:31:37] [Rank 0] step:6441/10000 train_time:550618ms step_avg:85.49ms +[2025-08-22 16:31:39] [Rank 0] step:6461/10000 train_time:552402ms step_avg:85.50ms +[2025-08-22 16:31:39] [Rank 0] step:6461/10000 train_time:552402ms step_avg:85.50ms +[2025-08-22 16:31:41] [Rank 0] step:6481/10000 train_time:554188ms step_avg:85.51ms +[2025-08-22 16:31:41] [Rank 0] step:6481/10000 train_time:554188ms step_avg:85.51ms +[2025-08-22 16:31:42] [Rank 0] step:6501/10000 train_time:555966ms step_avg:85.52ms +[2025-08-22 16:31:42] [Rank 0] step:6501/10000 train_time:555966ms step_avg:85.52ms +[2025-08-22 16:31:44] [Rank 0] step:6521/10000 train_time:557746ms step_avg:85.53ms +[2025-08-22 16:31:44] [Rank 0] step:6521/10000 train_time:557746ms step_avg:85.53ms +[2025-08-22 16:31:46] [Rank 0] step:6541/10000 train_time:559531ms step_avg:85.54ms +[2025-08-22 16:31:46] [Rank 0] step:6541/10000 train_time:559531ms step_avg:85.54ms +[2025-08-22 16:31:48] [Rank 0] step:6561/10000 train_time:561315ms step_avg:85.55ms +[2025-08-22 16:31:48] [Rank 0] step:6561/10000 train_time:561315ms step_avg:85.55ms +[2025-08-22 16:31:49] [Rank 0] step:6581/10000 train_time:563095ms step_avg:85.56ms +[2025-08-22 16:31:49] [Rank 0] step:6581/10000 train_time:563095ms step_avg:85.56ms +[2025-08-22 16:31:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:31:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:32:05] [Rank 0] PRINT: step:6600/10000 val_loss:3.9648 svd_entropy: attn_qk:H=0.7231,top10E=0.29,eRank=128.7,q75/q25=94.27 attn_vo:H=0.5979,top10E=0.43,eRank=65.5,q75/q25=117.55 mlp_w1:H=0.7846,top10E=0.24,eRank=221.5,q75/q25=11.01 mlp_w2:H=0.8974,top10E=0.12,eRank=402.3,q75/q25=7.02 vo_prod:H=0.4955,top10E=0.63,eRank=33.9,q75/q25=13587.72 train_time:565188ms step_avg:85.63ms +[2025-08-22 16:32:05] [Rank 0] PRINT: step:6600/10000 val_loss:3.9648 svd_entropy: attn_qk:H=0.7231,top10E=0.29,eRank=128.7,q75/q25=94.27 attn_vo:H=0.5979,top10E=0.43,eRank=65.5,q75/q25=117.55 mlp_w1:H=0.7846,top10E=0.24,eRank=221.5,q75/q25=11.01 mlp_w2:H=0.8974,top10E=0.12,eRank=402.3,q75/q25=7.02 vo_prod:H=0.4955,top10E=0.63,eRank=33.9,q75/q25=13587.72 train_time:565188ms step_avg:85.63ms +[2025-08-22 16:32:05] [Rank 0] step:6601/10000 train_time:565197ms step_avg:85.62ms +[2025-08-22 16:32:05] [Rank 0] step:6601/10000 train_time:565197ms step_avg:85.62ms +[2025-08-22 16:32:07] [Rank 0] step:6621/10000 train_time:566692ms step_avg:85.59ms +[2025-08-22 16:32:07] [Rank 0] step:6621/10000 train_time:566692ms step_avg:85.59ms +[2025-08-22 16:32:08] [Rank 0] step:6641/10000 train_time:568476ms step_avg:85.60ms +[2025-08-22 16:32:08] [Rank 0] step:6641/10000 train_time:568476ms step_avg:85.60ms +[2025-08-22 16:32:10] [Rank 0] step:6661/10000 train_time:570258ms step_avg:85.61ms +[2025-08-22 16:32:10] [Rank 0] step:6661/10000 train_time:570258ms step_avg:85.61ms +[2025-08-22 16:32:12] [Rank 0] step:6681/10000 train_time:572051ms step_avg:85.62ms +[2025-08-22 16:32:12] [Rank 0] step:6681/10000 train_time:572051ms step_avg:85.62ms +[2025-08-22 16:32:14] [Rank 0] step:6701/10000 train_time:573863ms step_avg:85.64ms +[2025-08-22 16:32:14] [Rank 0] step:6701/10000 train_time:573863ms step_avg:85.64ms +[2025-08-22 16:32:16] [Rank 0] step:6721/10000 train_time:575681ms step_avg:85.65ms +[2025-08-22 16:32:16] [Rank 0] step:6721/10000 train_time:575681ms step_avg:85.65ms +[2025-08-22 16:32:17] [Rank 0] step:6741/10000 train_time:577487ms step_avg:85.67ms +[2025-08-22 16:32:17] [Rank 0] step:6741/10000 train_time:577487ms step_avg:85.67ms +[2025-08-22 16:32:19] [Rank 0] step:6761/10000 train_time:579295ms step_avg:85.68ms +[2025-08-22 16:32:19] [Rank 0] step:6761/10000 train_time:579295ms step_avg:85.68ms +[2025-08-22 16:32:21] [Rank 0] step:6781/10000 train_time:581112ms step_avg:85.70ms +[2025-08-22 16:32:21] [Rank 0] step:6781/10000 train_time:581112ms step_avg:85.70ms +[2025-08-22 16:32:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:32:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:32:36] [Rank 0] PRINT: step:6800/10000 val_loss:3.9451 svd_entropy: attn_qk:H=0.7243,top10E=0.29,eRank=129.7,q75/q25=94.43 attn_vo:H=0.6002,top10E=0.43,eRank=66.6,q75/q25=117.99 mlp_w1:H=0.7858,top10E=0.24,eRank=223.5,q75/q25=10.99 mlp_w2:H=0.8983,top10E=0.12,eRank=404.4,q75/q25=6.96 vo_prod:H=0.4979,top10E=0.62,eRank=34.5,q75/q25=13770.41 train_time:583242ms step_avg:85.77ms +[2025-08-22 16:32:36] [Rank 0] PRINT: step:6800/10000 val_loss:3.9451 svd_entropy: attn_qk:H=0.7243,top10E=0.29,eRank=129.7,q75/q25=94.43 attn_vo:H=0.6002,top10E=0.43,eRank=66.6,q75/q25=117.99 mlp_w1:H=0.7858,top10E=0.24,eRank=223.5,q75/q25=10.99 mlp_w2:H=0.8983,top10E=0.12,eRank=404.4,q75/q25=6.96 vo_prod:H=0.4979,top10E=0.62,eRank=34.5,q75/q25=13770.41 train_time:583242ms step_avg:85.77ms +[2025-08-22 16:32:37] [Rank 0] step:6801/10000 train_time:583252ms step_avg:85.76ms +[2025-08-22 16:32:37] [Rank 0] step:6801/10000 train_time:583252ms step_avg:85.76ms +[2025-08-22 16:32:38] [Rank 0] step:6821/10000 train_time:584772ms step_avg:85.73ms +[2025-08-22 16:32:38] [Rank 0] step:6821/10000 train_time:584772ms step_avg:85.73ms +[2025-08-22 16:32:40] [Rank 0] step:6841/10000 train_time:586573ms step_avg:85.74ms +[2025-08-22 16:32:40] [Rank 0] step:6841/10000 train_time:586573ms step_avg:85.74ms +[2025-08-22 16:32:42] [Rank 0] step:6861/10000 train_time:588389ms step_avg:85.76ms +[2025-08-22 16:32:42] [Rank 0] step:6861/10000 train_time:588389ms step_avg:85.76ms +[2025-08-22 16:32:44] [Rank 0] step:6881/10000 train_time:590198ms step_avg:85.77ms +[2025-08-22 16:32:44] [Rank 0] step:6881/10000 train_time:590198ms step_avg:85.77ms +[2025-08-22 16:32:46] [Rank 0] step:6901/10000 train_time:592008ms step_avg:85.79ms +[2025-08-22 16:32:46] [Rank 0] step:6901/10000 train_time:592008ms step_avg:85.79ms +[2025-08-22 16:32:47] [Rank 0] step:6921/10000 train_time:593809ms step_avg:85.80ms +[2025-08-22 16:32:47] [Rank 0] step:6921/10000 train_time:593809ms step_avg:85.80ms +[2025-08-22 16:32:49] [Rank 0] step:6941/10000 train_time:595642ms step_avg:85.81ms +[2025-08-22 16:32:49] [Rank 0] step:6941/10000 train_time:595642ms step_avg:85.81ms +[2025-08-22 16:32:51] [Rank 0] step:6961/10000 train_time:597456ms step_avg:85.83ms +[2025-08-22 16:32:51] [Rank 0] step:6961/10000 train_time:597456ms step_avg:85.83ms +[2025-08-22 16:32:53] [Rank 0] step:6981/10000 train_time:599286ms step_avg:85.85ms +[2025-08-22 16:32:53] [Rank 0] step:6981/10000 train_time:599286ms step_avg:85.85ms +[2025-08-22 16:32:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:32:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:33:08] [Rank 0] PRINT: step:7000/10000 val_loss:3.9246 svd_entropy: attn_qk:H=0.7253,top10E=0.29,eRank=130.7,q75/q25=94.08 attn_vo:H=0.6024,top10E=0.43,eRank=67.6,q75/q25=118.33 mlp_w1:H=0.7870,top10E=0.24,eRank=225.3,q75/q25=10.96 mlp_w2:H=0.8991,top10E=0.12,eRank=406.4,q75/q25=6.90 vo_prod:H=0.5001,top10E=0.62,eRank=35.1,q75/q25=13950.43 train_time:601416ms step_avg:85.92ms +[2025-08-22 16:33:08] [Rank 0] PRINT: step:7000/10000 val_loss:3.9246 svd_entropy: attn_qk:H=0.7253,top10E=0.29,eRank=130.7,q75/q25=94.08 attn_vo:H=0.6024,top10E=0.43,eRank=67.6,q75/q25=118.33 mlp_w1:H=0.7870,top10E=0.24,eRank=225.3,q75/q25=10.96 mlp_w2:H=0.8991,top10E=0.12,eRank=406.4,q75/q25=6.90 vo_prod:H=0.5001,top10E=0.62,eRank=35.1,q75/q25=13950.43 train_time:601416ms step_avg:85.92ms +[2025-08-22 16:33:08] [Rank 0] step:7001/10000 train_time:601426ms step_avg:85.91ms +[2025-08-22 16:33:08] [Rank 0] step:7001/10000 train_time:601426ms step_avg:85.91ms +[2025-08-22 16:33:10] [Rank 0] step:7021/10000 train_time:602937ms step_avg:85.88ms +[2025-08-22 16:33:10] [Rank 0] step:7021/10000 train_time:602937ms step_avg:85.88ms +[2025-08-22 16:33:12] [Rank 0] step:7041/10000 train_time:604752ms step_avg:85.89ms +[2025-08-22 16:33:12] [Rank 0] step:7041/10000 train_time:604752ms step_avg:85.89ms +[2025-08-22 16:33:14] [Rank 0] step:7061/10000 train_time:606560ms step_avg:85.90ms +[2025-08-22 16:33:14] [Rank 0] step:7061/10000 train_time:606560ms step_avg:85.90ms +[2025-08-22 16:33:16] [Rank 0] step:7081/10000 train_time:608379ms step_avg:85.92ms +[2025-08-22 16:33:16] [Rank 0] step:7081/10000 train_time:608379ms step_avg:85.92ms +[2025-08-22 16:33:17] [Rank 0] step:7101/10000 train_time:610195ms step_avg:85.93ms +[2025-08-22 16:33:17] [Rank 0] step:7101/10000 train_time:610195ms step_avg:85.93ms +[2025-08-22 16:33:19] [Rank 0] step:7121/10000 train_time:612014ms step_avg:85.94ms +[2025-08-22 16:33:19] [Rank 0] step:7121/10000 train_time:612014ms step_avg:85.94ms +[2025-08-22 16:33:21] [Rank 0] step:7141/10000 train_time:613830ms step_avg:85.96ms +[2025-08-22 16:33:21] [Rank 0] step:7141/10000 train_time:613830ms step_avg:85.96ms +[2025-08-22 16:33:23] [Rank 0] step:7161/10000 train_time:615651ms step_avg:85.97ms +[2025-08-22 16:33:23] [Rank 0] step:7161/10000 train_time:615651ms step_avg:85.97ms +[2025-08-22 16:33:25] [Rank 0] step:7181/10000 train_time:617468ms step_avg:85.99ms +[2025-08-22 16:33:25] [Rank 0] step:7181/10000 train_time:617468ms step_avg:85.99ms +[2025-08-22 16:33:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:33:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:33:40] [Rank 0] PRINT: step:7200/10000 val_loss:3.9126 svd_entropy: attn_qk:H=0.7264,top10E=0.29,eRank=131.6,q75/q25=94.48 attn_vo:H=0.6044,top10E=0.42,eRank=68.6,q75/q25=118.94 mlp_w1:H=0.7881,top10E=0.24,eRank=227.0,q75/q25=10.93 mlp_w2:H=0.8998,top10E=0.12,eRank=408.3,q75/q25=6.84 vo_prod:H=0.5023,top10E=0.61,eRank=35.6,q75/q25=13973.97 train_time:619608ms step_avg:86.06ms +[2025-08-22 16:33:40] [Rank 0] PRINT: step:7200/10000 val_loss:3.9126 svd_entropy: attn_qk:H=0.7264,top10E=0.29,eRank=131.6,q75/q25=94.48 attn_vo:H=0.6044,top10E=0.42,eRank=68.6,q75/q25=118.94 mlp_w1:H=0.7881,top10E=0.24,eRank=227.0,q75/q25=10.93 mlp_w2:H=0.8998,top10E=0.12,eRank=408.3,q75/q25=6.84 vo_prod:H=0.5023,top10E=0.61,eRank=35.6,q75/q25=13973.97 train_time:619608ms step_avg:86.06ms +[2025-08-22 16:33:40] [Rank 0] step:7201/10000 train_time:619617ms step_avg:86.05ms +[2025-08-22 16:33:40] [Rank 0] step:7201/10000 train_time:619617ms step_avg:86.05ms +[2025-08-22 16:33:42] [Rank 0] step:7221/10000 train_time:621140ms step_avg:86.02ms +[2025-08-22 16:33:42] [Rank 0] step:7221/10000 train_time:621140ms step_avg:86.02ms +[2025-08-22 16:33:44] [Rank 0] step:7241/10000 train_time:622948ms step_avg:86.03ms +[2025-08-22 16:33:44] [Rank 0] step:7241/10000 train_time:622948ms step_avg:86.03ms +[2025-08-22 16:33:46] [Rank 0] step:7261/10000 train_time:624755ms step_avg:86.04ms +[2025-08-22 16:33:46] [Rank 0] step:7261/10000 train_time:624755ms step_avg:86.04ms +[2025-08-22 16:33:47] [Rank 0] step:7281/10000 train_time:626614ms step_avg:86.06ms +[2025-08-22 16:33:47] [Rank 0] step:7281/10000 train_time:626614ms step_avg:86.06ms +[2025-08-22 16:33:49] [Rank 0] step:7301/10000 train_time:628426ms step_avg:86.07ms +[2025-08-22 16:33:49] [Rank 0] step:7301/10000 train_time:628426ms step_avg:86.07ms +[2025-08-22 16:33:51] [Rank 0] step:7321/10000 train_time:630245ms step_avg:86.09ms +[2025-08-22 16:33:51] [Rank 0] step:7321/10000 train_time:630245ms step_avg:86.09ms +[2025-08-22 16:33:53] [Rank 0] step:7341/10000 train_time:632059ms step_avg:86.10ms +[2025-08-22 16:33:53] [Rank 0] step:7341/10000 train_time:632059ms step_avg:86.10ms +[2025-08-22 16:33:55] [Rank 0] step:7361/10000 train_time:633880ms step_avg:86.11ms +[2025-08-22 16:33:55] [Rank 0] step:7361/10000 train_time:633880ms step_avg:86.11ms +[2025-08-22 16:33:57] [Rank 0] step:7381/10000 train_time:635701ms step_avg:86.13ms +[2025-08-22 16:33:57] [Rank 0] step:7381/10000 train_time:635701ms step_avg:86.13ms +[2025-08-22 16:33:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:33:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:34:12] [Rank 0] PRINT: step:7400/10000 val_loss:3.8846 svd_entropy: attn_qk:H=0.7273,top10E=0.29,eRank=132.4,q75/q25=93.70 attn_vo:H=0.6061,top10E=0.42,eRank=69.4,q75/q25=118.45 mlp_w1:H=0.7891,top10E=0.24,eRank=228.5,q75/q25=10.88 mlp_w2:H=0.9005,top10E=0.12,eRank=409.9,q75/q25=6.79 vo_prod:H=0.5039,top10E=0.61,eRank=36.1,q75/q25=13592.27 train_time:637816ms step_avg:86.19ms +[2025-08-22 16:34:12] [Rank 0] PRINT: step:7400/10000 val_loss:3.8846 svd_entropy: attn_qk:H=0.7273,top10E=0.29,eRank=132.4,q75/q25=93.70 attn_vo:H=0.6061,top10E=0.42,eRank=69.4,q75/q25=118.45 mlp_w1:H=0.7891,top10E=0.24,eRank=228.5,q75/q25=10.88 mlp_w2:H=0.9005,top10E=0.12,eRank=409.9,q75/q25=6.79 vo_prod:H=0.5039,top10E=0.61,eRank=36.1,q75/q25=13592.27 train_time:637816ms step_avg:86.19ms +[2025-08-22 16:34:12] [Rank 0] step:7401/10000 train_time:637825ms step_avg:86.18ms +[2025-08-22 16:34:12] [Rank 0] step:7401/10000 train_time:637825ms step_avg:86.18ms +[2025-08-22 16:34:14] [Rank 0] step:7421/10000 train_time:639351ms step_avg:86.15ms +[2025-08-22 16:34:14] [Rank 0] step:7421/10000 train_time:639351ms step_avg:86.15ms +[2025-08-22 16:34:16] [Rank 0] step:7441/10000 train_time:641157ms step_avg:86.17ms +[2025-08-22 16:34:16] [Rank 0] step:7441/10000 train_time:641157ms step_avg:86.17ms +[2025-08-22 16:34:17] [Rank 0] step:7461/10000 train_time:642971ms step_avg:86.18ms +[2025-08-22 16:34:17] [Rank 0] step:7461/10000 train_time:642971ms step_avg:86.18ms +[2025-08-22 16:34:19] [Rank 0] step:7481/10000 train_time:644788ms step_avg:86.19ms +[2025-08-22 16:34:19] [Rank 0] step:7481/10000 train_time:644788ms step_avg:86.19ms +[2025-08-22 16:34:21] [Rank 0] step:7501/10000 train_time:646605ms step_avg:86.20ms +[2025-08-22 16:34:21] [Rank 0] step:7501/10000 train_time:646605ms step_avg:86.20ms +[2025-08-22 16:34:23] [Rank 0] step:7521/10000 train_time:648421ms step_avg:86.21ms +[2025-08-22 16:34:23] [Rank 0] step:7521/10000 train_time:648421ms step_avg:86.21ms +[2025-08-22 16:34:25] [Rank 0] step:7541/10000 train_time:650250ms step_avg:86.23ms +[2025-08-22 16:34:25] [Rank 0] step:7541/10000 train_time:650250ms step_avg:86.23ms +[2025-08-22 16:34:27] [Rank 0] step:7561/10000 train_time:652059ms step_avg:86.24ms +[2025-08-22 16:34:27] [Rank 0] step:7561/10000 train_time:652059ms step_avg:86.24ms +[2025-08-22 16:34:28] [Rank 0] step:7581/10000 train_time:653884ms step_avg:86.25ms +[2025-08-22 16:34:28] [Rank 0] step:7581/10000 train_time:653884ms step_avg:86.25ms +[2025-08-22 16:34:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:34:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:34:44] [Rank 0] PRINT: step:7600/10000 val_loss:3.8817 svd_entropy: attn_qk:H=0.7281,top10E=0.28,eRank=133.1,q75/q25=93.47 attn_vo:H=0.6077,top10E=0.42,eRank=70.2,q75/q25=116.36 mlp_w1:H=0.7899,top10E=0.24,eRank=229.9,q75/q25=10.82 mlp_w2:H=0.9011,top10E=0.12,eRank=411.4,q75/q25=6.74 vo_prod:H=0.5056,top10E=0.61,eRank=36.5,q75/q25=13082.63 train_time:656029ms step_avg:86.32ms +[2025-08-22 16:34:44] [Rank 0] PRINT: step:7600/10000 val_loss:3.8817 svd_entropy: attn_qk:H=0.7281,top10E=0.28,eRank=133.1,q75/q25=93.47 attn_vo:H=0.6077,top10E=0.42,eRank=70.2,q75/q25=116.36 mlp_w1:H=0.7899,top10E=0.24,eRank=229.9,q75/q25=10.82 mlp_w2:H=0.9011,top10E=0.12,eRank=411.4,q75/q25=6.74 vo_prod:H=0.5056,top10E=0.61,eRank=36.5,q75/q25=13082.63 train_time:656029ms step_avg:86.32ms +[2025-08-22 16:34:44] [Rank 0] step:7601/10000 train_time:656040ms step_avg:86.31ms +[2025-08-22 16:34:44] [Rank 0] step:7601/10000 train_time:656040ms step_avg:86.31ms +[2025-08-22 16:34:46] [Rank 0] step:7621/10000 train_time:657574ms step_avg:86.28ms +[2025-08-22 16:34:46] [Rank 0] step:7621/10000 train_time:657574ms step_avg:86.28ms +[2025-08-22 16:34:48] [Rank 0] step:7641/10000 train_time:659386ms step_avg:86.30ms +[2025-08-22 16:34:48] [Rank 0] step:7641/10000 train_time:659386ms step_avg:86.30ms +[2025-08-22 16:34:49] [Rank 0] step:7661/10000 train_time:661209ms step_avg:86.31ms +[2025-08-22 16:34:49] [Rank 0] step:7661/10000 train_time:661209ms step_avg:86.31ms +[2025-08-22 16:34:51] [Rank 0] step:7681/10000 train_time:663029ms step_avg:86.32ms +[2025-08-22 16:34:51] [Rank 0] step:7681/10000 train_time:663029ms step_avg:86.32ms +[2025-08-22 16:34:53] [Rank 0] step:7701/10000 train_time:664845ms step_avg:86.33ms +[2025-08-22 16:34:53] [Rank 0] step:7701/10000 train_time:664845ms step_avg:86.33ms +[2025-08-22 16:34:55] [Rank 0] step:7721/10000 train_time:666679ms step_avg:86.35ms +[2025-08-22 16:34:55] [Rank 0] step:7721/10000 train_time:666679ms step_avg:86.35ms +[2025-08-22 16:34:57] [Rank 0] step:7741/10000 train_time:668501ms step_avg:86.36ms +[2025-08-22 16:34:57] [Rank 0] step:7741/10000 train_time:668501ms step_avg:86.36ms +[2025-08-22 16:34:59] [Rank 0] step:7761/10000 train_time:670326ms step_avg:86.37ms +[2025-08-22 16:34:59] [Rank 0] step:7761/10000 train_time:670326ms step_avg:86.37ms +[2025-08-22 16:35:00] [Rank 0] step:7781/10000 train_time:672157ms step_avg:86.38ms +[2025-08-22 16:35:00] [Rank 0] step:7781/10000 train_time:672157ms step_avg:86.38ms +[2025-08-22 16:35:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:35:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:35:16] [Rank 0] PRINT: step:7800/10000 val_loss:3.8625 svd_entropy: attn_qk:H=0.7289,top10E=0.28,eRank=133.8,q75/q25=93.42 attn_vo:H=0.6092,top10E=0.41,eRank=71.0,q75/q25=116.26 mlp_w1:H=0.7907,top10E=0.23,eRank=231.2,q75/q25=10.78 mlp_w2:H=0.9017,top10E=0.12,eRank=412.9,q75/q25=6.70 vo_prod:H=0.5072,top10E=0.60,eRank=36.9,q75/q25=13179.20 train_time:674304ms step_avg:86.45ms +[2025-08-22 16:35:16] [Rank 0] PRINT: step:7800/10000 val_loss:3.8625 svd_entropy: attn_qk:H=0.7289,top10E=0.28,eRank=133.8,q75/q25=93.42 attn_vo:H=0.6092,top10E=0.41,eRank=71.0,q75/q25=116.26 mlp_w1:H=0.7907,top10E=0.23,eRank=231.2,q75/q25=10.78 mlp_w2:H=0.9017,top10E=0.12,eRank=412.9,q75/q25=6.70 vo_prod:H=0.5072,top10E=0.60,eRank=36.9,q75/q25=13179.20 train_time:674304ms step_avg:86.45ms +[2025-08-22 16:35:16] [Rank 0] step:7801/10000 train_time:674315ms step_avg:86.44ms +[2025-08-22 16:35:16] [Rank 0] step:7801/10000 train_time:674315ms step_avg:86.44ms +[2025-08-22 16:35:17] [Rank 0] step:7821/10000 train_time:675829ms step_avg:86.41ms +[2025-08-22 16:35:17] [Rank 0] step:7821/10000 train_time:675829ms step_avg:86.41ms +[2025-08-22 16:35:19] [Rank 0] step:7841/10000 train_time:677641ms step_avg:86.42ms +[2025-08-22 16:35:19] [Rank 0] step:7841/10000 train_time:677641ms step_avg:86.42ms +[2025-08-22 16:35:21] [Rank 0] step:7861/10000 train_time:679455ms step_avg:86.43ms +[2025-08-22 16:35:21] [Rank 0] step:7861/10000 train_time:679455ms step_avg:86.43ms +[2025-08-22 16:35:23] [Rank 0] step:7881/10000 train_time:681277ms step_avg:86.45ms +[2025-08-22 16:35:23] [Rank 0] step:7881/10000 train_time:681277ms step_avg:86.45ms +[2025-08-22 16:35:25] [Rank 0] step:7901/10000 train_time:683087ms step_avg:86.46ms +[2025-08-22 16:35:25] [Rank 0] step:7901/10000 train_time:683087ms step_avg:86.46ms +[2025-08-22 16:35:27] [Rank 0] step:7921/10000 train_time:684907ms step_avg:86.47ms +[2025-08-22 16:35:27] [Rank 0] step:7921/10000 train_time:684907ms step_avg:86.47ms +[2025-08-22 16:35:28] [Rank 0] step:7941/10000 train_time:686732ms step_avg:86.48ms +[2025-08-22 16:35:28] [Rank 0] step:7941/10000 train_time:686732ms step_avg:86.48ms +[2025-08-22 16:35:30] [Rank 0] step:7961/10000 train_time:688555ms step_avg:86.49ms +[2025-08-22 16:35:30] [Rank 0] step:7961/10000 train_time:688555ms step_avg:86.49ms +[2025-08-22 16:35:32] [Rank 0] step:7981/10000 train_time:690368ms step_avg:86.50ms +[2025-08-22 16:35:32] [Rank 0] step:7981/10000 train_time:690368ms step_avg:86.50ms +[2025-08-22 16:35:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:35:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:35:47] [Rank 0] PRINT: step:8000/10000 val_loss:3.8417 svd_entropy: attn_qk:H=0.7296,top10E=0.28,eRank=134.5,q75/q25=93.29 attn_vo:H=0.6106,top10E=0.41,eRank=71.7,q75/q25=116.61 mlp_w1:H=0.7915,top10E=0.23,eRank=232.4,q75/q25=10.73 mlp_w2:H=0.9022,top10E=0.12,eRank=414.2,q75/q25=6.66 vo_prod:H=0.5087,top10E=0.60,eRank=37.3,q75/q25=13066.74 train_time:692501ms step_avg:86.56ms +[2025-08-22 16:35:47] [Rank 0] PRINT: step:8000/10000 val_loss:3.8417 svd_entropy: attn_qk:H=0.7296,top10E=0.28,eRank=134.5,q75/q25=93.29 attn_vo:H=0.6106,top10E=0.41,eRank=71.7,q75/q25=116.61 mlp_w1:H=0.7915,top10E=0.23,eRank=232.4,q75/q25=10.73 mlp_w2:H=0.9022,top10E=0.12,eRank=414.2,q75/q25=6.66 vo_prod:H=0.5087,top10E=0.60,eRank=37.3,q75/q25=13066.74 train_time:692501ms step_avg:86.56ms +[2025-08-22 16:35:47] [Rank 0] step:8001/10000 train_time:692511ms step_avg:86.55ms +[2025-08-22 16:35:47] [Rank 0] step:8001/10000 train_time:692511ms step_avg:86.55ms +[2025-08-22 16:35:49] [Rank 0] step:8021/10000 train_time:694022ms step_avg:86.53ms +[2025-08-22 16:35:49] [Rank 0] step:8021/10000 train_time:694022ms step_avg:86.53ms +[2025-08-22 16:35:51] [Rank 0] step:8041/10000 train_time:695847ms step_avg:86.54ms +[2025-08-22 16:35:51] [Rank 0] step:8041/10000 train_time:695847ms step_avg:86.54ms +[2025-08-22 16:35:53] [Rank 0] step:8061/10000 train_time:697660ms step_avg:86.55ms +[2025-08-22 16:35:53] [Rank 0] step:8061/10000 train_time:697660ms step_avg:86.55ms +[2025-08-22 16:35:55] [Rank 0] step:8081/10000 train_time:699472ms step_avg:86.56ms +[2025-08-22 16:35:55] [Rank 0] step:8081/10000 train_time:699472ms step_avg:86.56ms +[2025-08-22 16:35:57] [Rank 0] step:8101/10000 train_time:701293ms step_avg:86.57ms +[2025-08-22 16:35:57] [Rank 0] step:8101/10000 train_time:701293ms step_avg:86.57ms +[2025-08-22 16:35:58] [Rank 0] step:8121/10000 train_time:703106ms step_avg:86.58ms +[2025-08-22 16:35:58] [Rank 0] step:8121/10000 train_time:703106ms step_avg:86.58ms +[2025-08-22 16:36:01] [Rank 0] step:8141/10000 train_time:704925ms step_avg:86.59ms +[2025-08-22 16:36:01] [Rank 0] step:8141/10000 train_time:704925ms step_avg:86.59ms +[2025-08-22 16:36:02] [Rank 0] step:8161/10000 train_time:707089ms step_avg:86.64ms +[2025-08-22 16:36:02] [Rank 0] step:8161/10000 train_time:707089ms step_avg:86.64ms +[2025-08-22 16:36:04] [Rank 0] step:8181/10000 train_time:708941ms step_avg:86.66ms +[2025-08-22 16:36:04] [Rank 0] step:8181/10000 train_time:708941ms step_avg:86.66ms +[2025-08-22 16:36:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:36:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:36:20] [Rank 0] PRINT: step:8200/10000 val_loss:3.8314 svd_entropy: attn_qk:H=0.7303,top10E=0.28,eRank=135.1,q75/q25=93.05 attn_vo:H=0.6118,top10E=0.41,eRank=72.3,q75/q25=116.73 mlp_w1:H=0.7921,top10E=0.23,eRank=233.5,q75/q25=10.69 mlp_w2:H=0.9027,top10E=0.12,eRank=415.5,q75/q25=6.63 vo_prod:H=0.5101,top10E=0.60,eRank=37.6,q75/q25=13024.43 train_time:711129ms step_avg:86.72ms +[2025-08-22 16:36:20] [Rank 0] PRINT: step:8200/10000 val_loss:3.8314 svd_entropy: attn_qk:H=0.7303,top10E=0.28,eRank=135.1,q75/q25=93.05 attn_vo:H=0.6118,top10E=0.41,eRank=72.3,q75/q25=116.73 mlp_w1:H=0.7921,top10E=0.23,eRank=233.5,q75/q25=10.69 mlp_w2:H=0.9027,top10E=0.12,eRank=415.5,q75/q25=6.63 vo_prod:H=0.5101,top10E=0.60,eRank=37.6,q75/q25=13024.43 train_time:711129ms step_avg:86.72ms +[2025-08-22 16:36:20] [Rank 0] step:8201/10000 train_time:711138ms step_avg:86.71ms +[2025-08-22 16:36:20] [Rank 0] step:8201/10000 train_time:711138ms step_avg:86.71ms +[2025-08-22 16:36:22] [Rank 0] step:8221/10000 train_time:712676ms step_avg:86.69ms +[2025-08-22 16:36:22] [Rank 0] step:8221/10000 train_time:712676ms step_avg:86.69ms +[2025-08-22 16:36:23] [Rank 0] step:8241/10000 train_time:714521ms step_avg:86.70ms +[2025-08-22 16:36:23] [Rank 0] step:8241/10000 train_time:714521ms step_avg:86.70ms +[2025-08-22 16:36:25] [Rank 0] step:8261/10000 train_time:716363ms step_avg:86.72ms +[2025-08-22 16:36:25] [Rank 0] step:8261/10000 train_time:716363ms step_avg:86.72ms +[2025-08-22 16:36:27] [Rank 0] step:8281/10000 train_time:718213ms step_avg:86.73ms +[2025-08-22 16:36:27] [Rank 0] step:8281/10000 train_time:718213ms step_avg:86.73ms +[2025-08-22 16:36:29] [Rank 0] step:8301/10000 train_time:720050ms step_avg:86.74ms +[2025-08-22 16:36:29] [Rank 0] step:8301/10000 train_time:720050ms step_avg:86.74ms +[2025-08-22 16:36:31] [Rank 0] step:8321/10000 train_time:721890ms step_avg:86.76ms +[2025-08-22 16:36:31] [Rank 0] step:8321/10000 train_time:721890ms step_avg:86.76ms +[2025-08-22 16:36:33] [Rank 0] step:8341/10000 train_time:723736ms step_avg:86.77ms +[2025-08-22 16:36:33] [Rank 0] step:8341/10000 train_time:723736ms step_avg:86.77ms +[2025-08-22 16:36:34] [Rank 0] step:8361/10000 train_time:725580ms step_avg:86.78ms +[2025-08-22 16:36:34] [Rank 0] step:8361/10000 train_time:725580ms step_avg:86.78ms +[2025-08-22 16:36:36] [Rank 0] step:8381/10000 train_time:727425ms step_avg:86.79ms +[2025-08-22 16:36:36] [Rank 0] step:8381/10000 train_time:727425ms step_avg:86.79ms +[2025-08-22 16:36:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:36:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:36:52] [Rank 0] PRINT: step:8400/10000 val_loss:3.8172 svd_entropy: attn_qk:H=0.7309,top10E=0.28,eRank=135.6,q75/q25=93.17 attn_vo:H=0.6129,top10E=0.41,eRank=72.9,q75/q25=117.44 mlp_w1:H=0.7927,top10E=0.23,eRank=234.5,q75/q25=10.66 mlp_w2:H=0.9031,top10E=0.12,eRank=416.6,q75/q25=6.59 vo_prod:H=0.5111,top10E=0.59,eRank=37.9,q75/q25=13013.75 train_time:729586ms step_avg:86.86ms +[2025-08-22 16:36:52] [Rank 0] PRINT: step:8400/10000 val_loss:3.8172 svd_entropy: attn_qk:H=0.7309,top10E=0.28,eRank=135.6,q75/q25=93.17 attn_vo:H=0.6129,top10E=0.41,eRank=72.9,q75/q25=117.44 mlp_w1:H=0.7927,top10E=0.23,eRank=234.5,q75/q25=10.66 mlp_w2:H=0.9031,top10E=0.12,eRank=416.6,q75/q25=6.59 vo_prod:H=0.5111,top10E=0.59,eRank=37.9,q75/q25=13013.75 train_time:729586ms step_avg:86.86ms +[2025-08-22 16:36:52] [Rank 0] step:8401/10000 train_time:729596ms step_avg:86.85ms +[2025-08-22 16:36:52] [Rank 0] step:8401/10000 train_time:729596ms step_avg:86.85ms +[2025-08-22 16:36:54] [Rank 0] step:8421/10000 train_time:731143ms step_avg:86.82ms +[2025-08-22 16:36:54] [Rank 0] step:8421/10000 train_time:731143ms step_avg:86.82ms +[2025-08-22 16:36:56] [Rank 0] step:8441/10000 train_time:732980ms step_avg:86.84ms +[2025-08-22 16:36:56] [Rank 0] step:8441/10000 train_time:732980ms step_avg:86.84ms +[2025-08-22 16:36:58] [Rank 0] step:8461/10000 train_time:734818ms step_avg:86.85ms +[2025-08-22 16:36:58] [Rank 0] step:8461/10000 train_time:734818ms step_avg:86.85ms +[2025-08-22 16:36:59] [Rank 0] step:8481/10000 train_time:736672ms step_avg:86.86ms +[2025-08-22 16:36:59] [Rank 0] step:8481/10000 train_time:736672ms step_avg:86.86ms +[2025-08-22 16:37:01] [Rank 0] step:8501/10000 train_time:738535ms step_avg:86.88ms +[2025-08-22 16:37:01] [Rank 0] step:8501/10000 train_time:738535ms step_avg:86.88ms +[2025-08-22 16:37:03] [Rank 0] step:8521/10000 train_time:740395ms step_avg:86.89ms +[2025-08-22 16:37:03] [Rank 0] step:8521/10000 train_time:740395ms step_avg:86.89ms +[2025-08-22 16:37:05] [Rank 0] step:8541/10000 train_time:742252ms step_avg:86.90ms +[2025-08-22 16:37:05] [Rank 0] step:8541/10000 train_time:742252ms step_avg:86.90ms +[2025-08-22 16:37:07] [Rank 0] step:8561/10000 train_time:744114ms step_avg:86.92ms +[2025-08-22 16:37:07] [Rank 0] step:8561/10000 train_time:744114ms step_avg:86.92ms +[2025-08-22 16:37:09] [Rank 0] step:8581/10000 train_time:745968ms step_avg:86.93ms +[2025-08-22 16:37:09] [Rank 0] step:8581/10000 train_time:745968ms step_avg:86.93ms +[2025-08-22 16:37:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:37:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:37:24] [Rank 0] PRINT: step:8600/10000 val_loss:3.8055 svd_entropy: attn_qk:H=0.7314,top10E=0.28,eRank=136.1,q75/q25=93.00 attn_vo:H=0.6139,top10E=0.40,eRank=73.3,q75/q25=116.76 mlp_w1:H=0.7932,top10E=0.23,eRank=235.3,q75/q25=10.63 mlp_w2:H=0.9035,top10E=0.12,eRank=417.6,q75/q25=6.57 vo_prod:H=0.5121,top10E=0.59,eRank=38.2,q75/q25=12601.71 train_time:748132ms step_avg:86.99ms +[2025-08-22 16:37:24] [Rank 0] PRINT: step:8600/10000 val_loss:3.8055 svd_entropy: attn_qk:H=0.7314,top10E=0.28,eRank=136.1,q75/q25=93.00 attn_vo:H=0.6139,top10E=0.40,eRank=73.3,q75/q25=116.76 mlp_w1:H=0.7932,top10E=0.23,eRank=235.3,q75/q25=10.63 mlp_w2:H=0.9035,top10E=0.12,eRank=417.6,q75/q25=6.57 vo_prod:H=0.5121,top10E=0.59,eRank=38.2,q75/q25=12601.71 train_time:748132ms step_avg:86.99ms +[2025-08-22 16:37:25] [Rank 0] step:8601/10000 train_time:748141ms step_avg:86.98ms +[2025-08-22 16:37:25] [Rank 0] step:8601/10000 train_time:748141ms step_avg:86.98ms +[2025-08-22 16:37:26] [Rank 0] step:8621/10000 train_time:749702ms step_avg:86.96ms +[2025-08-22 16:37:26] [Rank 0] step:8621/10000 train_time:749702ms step_avg:86.96ms +[2025-08-22 16:37:28] [Rank 0] step:8641/10000 train_time:751541ms step_avg:86.97ms +[2025-08-22 16:37:28] [Rank 0] step:8641/10000 train_time:751541ms step_avg:86.97ms +[2025-08-22 16:37:30] [Rank 0] step:8661/10000 train_time:753387ms step_avg:86.99ms +[2025-08-22 16:37:30] [Rank 0] step:8661/10000 train_time:753387ms step_avg:86.99ms +[2025-08-22 16:37:32] [Rank 0] step:8681/10000 train_time:755227ms step_avg:87.00ms +[2025-08-22 16:37:32] [Rank 0] step:8681/10000 train_time:755227ms step_avg:87.00ms +[2025-08-22 16:37:34] [Rank 0] step:8701/10000 train_time:757073ms step_avg:87.01ms +[2025-08-22 16:37:34] [Rank 0] step:8701/10000 train_time:757073ms step_avg:87.01ms +[2025-08-22 16:37:36] [Rank 0] step:8721/10000 train_time:758912ms step_avg:87.02ms +[2025-08-22 16:37:36] [Rank 0] step:8721/10000 train_time:758912ms step_avg:87.02ms +[2025-08-22 16:37:37] [Rank 0] step:8741/10000 train_time:760752ms step_avg:87.03ms +[2025-08-22 16:37:37] [Rank 0] step:8741/10000 train_time:760752ms step_avg:87.03ms +[2025-08-22 16:37:39] [Rank 0] step:8761/10000 train_time:762593ms step_avg:87.04ms +[2025-08-22 16:37:39] [Rank 0] step:8761/10000 train_time:762593ms step_avg:87.04ms +[2025-08-22 16:37:41] [Rank 0] step:8781/10000 train_time:764447ms step_avg:87.06ms +[2025-08-22 16:37:41] [Rank 0] step:8781/10000 train_time:764447ms step_avg:87.06ms +[2025-08-22 16:37:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:37:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:37:57] [Rank 0] PRINT: step:8800/10000 val_loss:3.7911 svd_entropy: attn_qk:H=0.7318,top10E=0.28,eRank=136.5,q75/q25=92.82 attn_vo:H=0.6148,top10E=0.40,eRank=73.8,q75/q25=117.01 mlp_w1:H=0.7936,top10E=0.23,eRank=236.0,q75/q25=10.61 mlp_w2:H=0.9039,top10E=0.12,eRank=418.5,q75/q25=6.54 vo_prod:H=0.5129,top10E=0.59,eRank=38.4,q75/q25=12923.57 train_time:766608ms step_avg:87.11ms +[2025-08-22 16:37:57] [Rank 0] PRINT: step:8800/10000 val_loss:3.7911 svd_entropy: attn_qk:H=0.7318,top10E=0.28,eRank=136.5,q75/q25=92.82 attn_vo:H=0.6148,top10E=0.40,eRank=73.8,q75/q25=117.01 mlp_w1:H=0.7936,top10E=0.23,eRank=236.0,q75/q25=10.61 mlp_w2:H=0.9039,top10E=0.12,eRank=418.5,q75/q25=6.54 vo_prod:H=0.5129,top10E=0.59,eRank=38.4,q75/q25=12923.57 train_time:766608ms step_avg:87.11ms +[2025-08-22 16:37:57] [Rank 0] step:8801/10000 train_time:766616ms step_avg:87.11ms +[2025-08-22 16:37:57] [Rank 0] step:8801/10000 train_time:766616ms step_avg:87.11ms +[2025-08-22 16:37:59] [Rank 0] step:8821/10000 train_time:768149ms step_avg:87.08ms +[2025-08-22 16:37:59] [Rank 0] step:8821/10000 train_time:768149ms step_avg:87.08ms +[2025-08-22 16:38:01] [Rank 0] step:8841/10000 train_time:770005ms step_avg:87.09ms +[2025-08-22 16:38:01] [Rank 0] step:8841/10000 train_time:770005ms step_avg:87.09ms +[2025-08-22 16:38:02] [Rank 0] step:8861/10000 train_time:771850ms step_avg:87.11ms +[2025-08-22 16:38:02] [Rank 0] step:8861/10000 train_time:771850ms step_avg:87.11ms +[2025-08-22 16:38:04] [Rank 0] step:8881/10000 train_time:773686ms step_avg:87.12ms +[2025-08-22 16:38:04] [Rank 0] step:8881/10000 train_time:773686ms step_avg:87.12ms +[2025-08-22 16:38:06] [Rank 0] step:8901/10000 train_time:775536ms step_avg:87.13ms +[2025-08-22 16:38:06] [Rank 0] step:8901/10000 train_time:775536ms step_avg:87.13ms +[2025-08-22 16:38:08] [Rank 0] step:8921/10000 train_time:777383ms step_avg:87.14ms +[2025-08-22 16:38:08] [Rank 0] step:8921/10000 train_time:777383ms step_avg:87.14ms +[2025-08-22 16:38:10] [Rank 0] step:8941/10000 train_time:779243ms step_avg:87.15ms +[2025-08-22 16:38:10] [Rank 0] step:8941/10000 train_time:779243ms step_avg:87.15ms +[2025-08-22 16:38:12] [Rank 0] step:8961/10000 train_time:781083ms step_avg:87.16ms +[2025-08-22 16:38:12] [Rank 0] step:8961/10000 train_time:781083ms step_avg:87.16ms +[2025-08-22 16:38:14] [Rank 0] step:8981/10000 train_time:782922ms step_avg:87.18ms +[2025-08-22 16:38:14] [Rank 0] step:8981/10000 train_time:782922ms step_avg:87.18ms +[2025-08-22 16:38:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:38:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:38:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.7789 svd_entropy: attn_qk:H=0.7322,top10E=0.28,eRank=136.8,q75/q25=92.62 attn_vo:H=0.6156,top10E=0.40,eRank=74.2,q75/q25=117.18 mlp_w1:H=0.7940,top10E=0.23,eRank=236.7,q75/q25=10.59 mlp_w2:H=0.9042,top10E=0.12,eRank=419.3,q75/q25=6.51 vo_prod:H=0.5137,top10E=0.59,eRank=38.6,q75/q25=13104.54 train_time:785083ms step_avg:87.23ms +[2025-08-22 16:38:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.7789 svd_entropy: attn_qk:H=0.7322,top10E=0.28,eRank=136.8,q75/q25=92.62 attn_vo:H=0.6156,top10E=0.40,eRank=74.2,q75/q25=117.18 mlp_w1:H=0.7940,top10E=0.23,eRank=236.7,q75/q25=10.59 mlp_w2:H=0.9042,top10E=0.12,eRank=419.3,q75/q25=6.51 vo_prod:H=0.5137,top10E=0.59,eRank=38.6,q75/q25=13104.54 train_time:785083ms step_avg:87.23ms +[2025-08-22 16:38:29] [Rank 0] step:9001/10000 train_time:785094ms step_avg:87.22ms +[2025-08-22 16:38:29] [Rank 0] step:9001/10000 train_time:785094ms step_avg:87.22ms +[2025-08-22 16:38:31] [Rank 0] step:9021/10000 train_time:786641ms step_avg:87.20ms +[2025-08-22 16:38:31] [Rank 0] step:9021/10000 train_time:786641ms step_avg:87.20ms +[2025-08-22 16:38:33] [Rank 0] step:9041/10000 train_time:788479ms step_avg:87.21ms +[2025-08-22 16:38:33] [Rank 0] step:9041/10000 train_time:788479ms step_avg:87.21ms +[2025-08-22 16:38:35] [Rank 0] step:9061/10000 train_time:790333ms step_avg:87.22ms +[2025-08-22 16:38:35] [Rank 0] step:9061/10000 train_time:790333ms step_avg:87.22ms +[2025-08-22 16:38:37] [Rank 0] step:9081/10000 train_time:792185ms step_avg:87.24ms +[2025-08-22 16:38:37] [Rank 0] step:9081/10000 train_time:792185ms step_avg:87.24ms +[2025-08-22 16:38:39] [Rank 0] step:9101/10000 train_time:794048ms step_avg:87.25ms +[2025-08-22 16:38:39] [Rank 0] step:9101/10000 train_time:794048ms step_avg:87.25ms +[2025-08-22 16:38:40] [Rank 0] step:9121/10000 train_time:795895ms step_avg:87.26ms +[2025-08-22 16:38:40] [Rank 0] step:9121/10000 train_time:795895ms step_avg:87.26ms +[2025-08-22 16:38:42] [Rank 0] step:9141/10000 train_time:797736ms step_avg:87.27ms +[2025-08-22 16:38:42] [Rank 0] step:9141/10000 train_time:797736ms step_avg:87.27ms +[2025-08-22 16:38:44] [Rank 0] step:9161/10000 train_time:799572ms step_avg:87.28ms +[2025-08-22 16:38:44] [Rank 0] step:9161/10000 train_time:799572ms step_avg:87.28ms +[2025-08-22 16:38:46] [Rank 0] step:9181/10000 train_time:801445ms step_avg:87.29ms +[2025-08-22 16:38:46] [Rank 0] step:9181/10000 train_time:801445ms step_avg:87.29ms +[2025-08-22 16:38:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:38:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:39:02] [Rank 0] PRINT: step:9200/10000 val_loss:3.7690 svd_entropy: attn_qk:H=0.7326,top10E=0.28,eRank=137.1,q75/q25=92.64 attn_vo:H=0.6163,top10E=0.40,eRank=74.5,q75/q25=117.21 mlp_w1:H=0.7944,top10E=0.23,eRank=237.3,q75/q25=10.57 mlp_w2:H=0.9044,top10E=0.12,eRank=420.1,q75/q25=6.49 vo_prod:H=0.5142,top10E=0.59,eRank=38.8,q75/q25=13065.97 train_time:803617ms step_avg:87.35ms +[2025-08-22 16:39:02] [Rank 0] PRINT: step:9200/10000 val_loss:3.7690 svd_entropy: attn_qk:H=0.7326,top10E=0.28,eRank=137.1,q75/q25=92.64 attn_vo:H=0.6163,top10E=0.40,eRank=74.5,q75/q25=117.21 mlp_w1:H=0.7944,top10E=0.23,eRank=237.3,q75/q25=10.57 mlp_w2:H=0.9044,top10E=0.12,eRank=420.1,q75/q25=6.49 vo_prod:H=0.5142,top10E=0.59,eRank=38.8,q75/q25=13065.97 train_time:803617ms step_avg:87.35ms +[2025-08-22 16:39:02] [Rank 0] step:9201/10000 train_time:803626ms step_avg:87.34ms +[2025-08-22 16:39:02] [Rank 0] step:9201/10000 train_time:803626ms step_avg:87.34ms +[2025-08-22 16:39:04] [Rank 0] step:9221/10000 train_time:805175ms step_avg:87.32ms +[2025-08-22 16:39:04] [Rank 0] step:9221/10000 train_time:805175ms step_avg:87.32ms +[2025-08-22 16:39:05] [Rank 0] step:9241/10000 train_time:807028ms step_avg:87.33ms +[2025-08-22 16:39:05] [Rank 0] step:9241/10000 train_time:807028ms step_avg:87.33ms +[2025-08-22 16:39:07] [Rank 0] step:9261/10000 train_time:808888ms step_avg:87.34ms +[2025-08-22 16:39:07] [Rank 0] step:9261/10000 train_time:808888ms step_avg:87.34ms +[2025-08-22 16:39:09] [Rank 0] step:9281/10000 train_time:810730ms step_avg:87.35ms +[2025-08-22 16:39:09] [Rank 0] step:9281/10000 train_time:810730ms step_avg:87.35ms +[2025-08-22 16:39:11] [Rank 0] step:9301/10000 train_time:812572ms step_avg:87.36ms +[2025-08-22 16:39:11] [Rank 0] step:9301/10000 train_time:812572ms step_avg:87.36ms +[2025-08-22 16:39:13] [Rank 0] step:9321/10000 train_time:814428ms step_avg:87.38ms +[2025-08-22 16:39:13] [Rank 0] step:9321/10000 train_time:814428ms step_avg:87.38ms +[2025-08-22 16:39:15] [Rank 0] step:9341/10000 train_time:816281ms step_avg:87.39ms +[2025-08-22 16:39:15] [Rank 0] step:9341/10000 train_time:816281ms step_avg:87.39ms +[2025-08-22 16:39:17] [Rank 0] step:9361/10000 train_time:818135ms step_avg:87.40ms +[2025-08-22 16:39:17] [Rank 0] step:9361/10000 train_time:818135ms step_avg:87.40ms +[2025-08-22 16:39:18] [Rank 0] step:9381/10000 train_time:820004ms step_avg:87.41ms +[2025-08-22 16:39:18] [Rank 0] step:9381/10000 train_time:820004ms step_avg:87.41ms +[2025-08-22 16:39:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:39:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:39:34] [Rank 0] PRINT: step:9400/10000 val_loss:3.7594 svd_entropy: attn_qk:H=0.7328,top10E=0.28,eRank=137.4,q75/q25=92.27 attn_vo:H=0.6169,top10E=0.40,eRank=74.8,q75/q25=117.05 mlp_w1:H=0.7946,top10E=0.23,eRank=237.8,q75/q25=10.55 mlp_w2:H=0.9047,top10E=0.12,eRank=420.6,q75/q25=6.47 vo_prod:H=0.5149,top10E=0.59,eRank=39.0,q75/q25=13203.88 train_time:822189ms step_avg:87.47ms +[2025-08-22 16:39:34] [Rank 0] PRINT: step:9400/10000 val_loss:3.7594 svd_entropy: attn_qk:H=0.7328,top10E=0.28,eRank=137.4,q75/q25=92.27 attn_vo:H=0.6169,top10E=0.40,eRank=74.8,q75/q25=117.05 mlp_w1:H=0.7946,top10E=0.23,eRank=237.8,q75/q25=10.55 mlp_w2:H=0.9047,top10E=0.12,eRank=420.6,q75/q25=6.47 vo_prod:H=0.5149,top10E=0.59,eRank=39.0,q75/q25=13203.88 train_time:822189ms step_avg:87.47ms +[2025-08-22 16:39:34] [Rank 0] step:9401/10000 train_time:822199ms step_avg:87.46ms +[2025-08-22 16:39:34] [Rank 0] step:9401/10000 train_time:822199ms step_avg:87.46ms +[2025-08-22 16:39:36] [Rank 0] step:9421/10000 train_time:823741ms step_avg:87.44ms +[2025-08-22 16:39:36] [Rank 0] step:9421/10000 train_time:823741ms step_avg:87.44ms +[2025-08-22 16:39:38] [Rank 0] step:9441/10000 train_time:825587ms step_avg:87.45ms +[2025-08-22 16:39:38] [Rank 0] step:9441/10000 train_time:825587ms step_avg:87.45ms +[2025-08-22 16:39:40] [Rank 0] step:9461/10000 train_time:827440ms step_avg:87.46ms +[2025-08-22 16:39:40] [Rank 0] step:9461/10000 train_time:827440ms step_avg:87.46ms +[2025-08-22 16:39:42] [Rank 0] step:9481/10000 train_time:829291ms step_avg:87.47ms +[2025-08-22 16:39:42] [Rank 0] step:9481/10000 train_time:829291ms step_avg:87.47ms +[2025-08-22 16:39:44] [Rank 0] step:9501/10000 train_time:831146ms step_avg:87.48ms +[2025-08-22 16:39:44] [Rank 0] step:9501/10000 train_time:831146ms step_avg:87.48ms +[2025-08-22 16:39:45] [Rank 0] step:9521/10000 train_time:832988ms step_avg:87.49ms +[2025-08-22 16:39:45] [Rank 0] step:9521/10000 train_time:832988ms step_avg:87.49ms +[2025-08-22 16:39:47] [Rank 0] step:9541/10000 train_time:834873ms step_avg:87.50ms +[2025-08-22 16:39:47] [Rank 0] step:9541/10000 train_time:834873ms step_avg:87.50ms +[2025-08-22 16:39:49] [Rank 0] step:9561/10000 train_time:836719ms step_avg:87.51ms +[2025-08-22 16:39:49] [Rank 0] step:9561/10000 train_time:836719ms step_avg:87.51ms +[2025-08-22 16:39:51] [Rank 0] step:9581/10000 train_time:838568ms step_avg:87.52ms +[2025-08-22 16:39:51] [Rank 0] step:9581/10000 train_time:838568ms step_avg:87.52ms +[2025-08-22 16:39:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:39:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:40:07] [Rank 0] PRINT: step:9600/10000 val_loss:3.7506 svd_entropy: attn_qk:H=0.7331,top10E=0.28,eRank=137.6,q75/q25=92.52 attn_vo:H=0.6173,top10E=0.40,eRank=75.1,q75/q25=117.20 mlp_w1:H=0.7949,top10E=0.23,eRank=238.2,q75/q25=10.53 mlp_w2:H=0.9049,top10E=0.12,eRank=421.2,q75/q25=6.45 vo_prod:H=0.5153,top10E=0.59,eRank=39.1,q75/q25=13131.32 train_time:840747ms step_avg:87.58ms +[2025-08-22 16:40:07] [Rank 0] PRINT: step:9600/10000 val_loss:3.7506 svd_entropy: attn_qk:H=0.7331,top10E=0.28,eRank=137.6,q75/q25=92.52 attn_vo:H=0.6173,top10E=0.40,eRank=75.1,q75/q25=117.20 mlp_w1:H=0.7949,top10E=0.23,eRank=238.2,q75/q25=10.53 mlp_w2:H=0.9049,top10E=0.12,eRank=421.2,q75/q25=6.45 vo_prod:H=0.5153,top10E=0.59,eRank=39.1,q75/q25=13131.32 train_time:840747ms step_avg:87.58ms +[2025-08-22 16:40:07] [Rank 0] step:9601/10000 train_time:840757ms step_avg:87.57ms +[2025-08-22 16:40:07] [Rank 0] step:9601/10000 train_time:840757ms step_avg:87.57ms +[2025-08-22 16:40:09] [Rank 0] step:9621/10000 train_time:842289ms step_avg:87.55ms +[2025-08-22 16:40:09] [Rank 0] step:9621/10000 train_time:842289ms step_avg:87.55ms +[2025-08-22 16:40:10] [Rank 0] step:9641/10000 train_time:844136ms step_avg:87.56ms +[2025-08-22 16:40:10] [Rank 0] step:9641/10000 train_time:844136ms step_avg:87.56ms +[2025-08-22 16:40:12] [Rank 0] step:9661/10000 train_time:846009ms step_avg:87.57ms +[2025-08-22 16:40:12] [Rank 0] step:9661/10000 train_time:846009ms step_avg:87.57ms +[2025-08-22 16:40:14] [Rank 0] step:9681/10000 train_time:847880ms step_avg:87.58ms +[2025-08-22 16:40:14] [Rank 0] step:9681/10000 train_time:847880ms step_avg:87.58ms +[2025-08-22 16:40:16] [Rank 0] step:9701/10000 train_time:849762ms step_avg:87.60ms +[2025-08-22 16:40:16] [Rank 0] step:9701/10000 train_time:849762ms step_avg:87.60ms +[2025-08-22 16:40:18] [Rank 0] step:9721/10000 train_time:851634ms step_avg:87.61ms +[2025-08-22 16:40:18] [Rank 0] step:9721/10000 train_time:851634ms step_avg:87.61ms +[2025-08-22 16:40:20] [Rank 0] step:9741/10000 train_time:853536ms step_avg:87.62ms +[2025-08-22 16:40:20] [Rank 0] step:9741/10000 train_time:853536ms step_avg:87.62ms +[2025-08-22 16:40:22] [Rank 0] step:9761/10000 train_time:855417ms step_avg:87.64ms +[2025-08-22 16:40:22] [Rank 0] step:9761/10000 train_time:855417ms step_avg:87.64ms +[2025-08-22 16:40:24] [Rank 0] step:9781/10000 train_time:857313ms step_avg:87.65ms +[2025-08-22 16:40:24] [Rank 0] step:9781/10000 train_time:857313ms step_avg:87.65ms +[2025-08-22 16:40:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:40:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:40:39] [Rank 0] PRINT: step:9800/10000 val_loss:3.7420 svd_entropy: attn_qk:H=0.7333,top10E=0.28,eRank=137.8,q75/q25=92.54 attn_vo:H=0.6177,top10E=0.40,eRank=75.3,q75/q25=117.80 mlp_w1:H=0.7950,top10E=0.23,eRank=238.4,q75/q25=10.52 mlp_w2:H=0.9050,top10E=0.12,eRank=421.6,q75/q25=6.45 vo_prod:H=0.5156,top10E=0.59,eRank=39.2,q75/q25=13165.26 train_time:859530ms step_avg:87.71ms +[2025-08-22 16:40:39] [Rank 0] PRINT: step:9800/10000 val_loss:3.7420 svd_entropy: attn_qk:H=0.7333,top10E=0.28,eRank=137.8,q75/q25=92.54 attn_vo:H=0.6177,top10E=0.40,eRank=75.3,q75/q25=117.80 mlp_w1:H=0.7950,top10E=0.23,eRank=238.4,q75/q25=10.52 mlp_w2:H=0.9050,top10E=0.12,eRank=421.6,q75/q25=6.45 vo_prod:H=0.5156,top10E=0.59,eRank=39.2,q75/q25=13165.26 train_time:859530ms step_avg:87.71ms +[2025-08-22 16:40:39] [Rank 0] step:9801/10000 train_time:859539ms step_avg:87.70ms +[2025-08-22 16:40:39] [Rank 0] step:9801/10000 train_time:859539ms step_avg:87.70ms +[2025-08-22 16:40:41] [Rank 0] step:9821/10000 train_time:861099ms step_avg:87.68ms +[2025-08-22 16:40:41] [Rank 0] step:9821/10000 train_time:861099ms step_avg:87.68ms +[2025-08-22 16:40:43] [Rank 0] step:9841/10000 train_time:862981ms step_avg:87.69ms +[2025-08-22 16:40:43] [Rank 0] step:9841/10000 train_time:862981ms step_avg:87.69ms +[2025-08-22 16:40:45] [Rank 0] step:9861/10000 train_time:864845ms step_avg:87.70ms +[2025-08-22 16:40:45] [Rank 0] step:9861/10000 train_time:864845ms step_avg:87.70ms +[2025-08-22 16:40:47] [Rank 0] step:9881/10000 train_time:866713ms step_avg:87.72ms +[2025-08-22 16:40:47] [Rank 0] step:9881/10000 train_time:866713ms step_avg:87.72ms +[2025-08-22 16:40:49] [Rank 0] step:9901/10000 train_time:868595ms step_avg:87.73ms +[2025-08-22 16:40:49] [Rank 0] step:9901/10000 train_time:868595ms step_avg:87.73ms +[2025-08-22 16:40:51] [Rank 0] step:9921/10000 train_time:870468ms step_avg:87.74ms +[2025-08-22 16:40:51] [Rank 0] step:9921/10000 train_time:870468ms step_avg:87.74ms +[2025-08-22 16:40:53] [Rank 0] step:9941/10000 train_time:872348ms step_avg:87.75ms +[2025-08-22 16:40:53] [Rank 0] step:9941/10000 train_time:872348ms step_avg:87.75ms +[2025-08-22 16:40:54] [Rank 0] step:9961/10000 train_time:874218ms step_avg:87.76ms +[2025-08-22 16:40:54] [Rank 0] step:9961/10000 train_time:874218ms step_avg:87.76ms +[2025-08-22 16:40:56] [Rank 0] step:9981/10000 train_time:876095ms step_avg:87.78ms +[2025-08-22 16:40:56] [Rank 0] step:9981/10000 train_time:876095ms step_avg:87.78ms +[2025-08-22 16:40:58] [Rank 0] step:10000/10000 train_time:877880ms step_avg:87.79ms +[2025-08-22 16:40:58] [Rank 0] step:10000/10000 train_time:877880ms step_avg:87.79ms +[2025-08-22 16:40:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:40:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:41:12] [Rank 0] PRINT: step:10000/10000 val_loss:3.7328 svd_entropy: attn_qk:H=0.7334,top10E=0.28,eRank=137.9,q75/q25=92.47 attn_vo:H=0.6180,top10E=0.40,eRank=75.4,q75/q25=117.75 mlp_w1:H=0.7952,top10E=0.23,eRank=238.7,q75/q25=10.52 mlp_w2:H=0.9051,top10E=0.12,eRank=421.9,q75/q25=6.44 vo_prod:H=0.5159,top10E=0.59,eRank=39.3,q75/q25=13268.91 train_time:878312ms step_avg:87.83ms +[2025-08-22 16:41:12] [Rank 0] PRINT: step:10000/10000 val_loss:3.7328 svd_entropy: attn_qk:H=0.7334,top10E=0.28,eRank=137.9,q75/q25=92.47 attn_vo:H=0.6180,top10E=0.40,eRank=75.4,q75/q25=117.75 mlp_w1:H=0.7952,top10E=0.23,eRank=238.7,q75/q25=10.52 mlp_w2:H=0.9051,top10E=0.12,eRank=421.9,q75/q25=6.44 vo_prod:H=0.5159,top10E=0.59,eRank=39.3,q75/q25=13268.91 train_time:878312ms step_avg:87.83ms +[2025-08-22 16:41:12] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 16:41:12 2025 --- +[2025-08-22 16:41:12] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 16:41:12 2025 --- +[2025-08-22 16:41:12] [Rank 0] PRINT: Peak memory allocated: 11559 MiB reserved: 11616 MiB +[2025-08-22 16:41:12] [Rank 0] PRINT: Peak memory allocated: 11559 MiB reserved: 11616 MiB diff --git a/logs_svd_gated/mode_5_param_gated_seed_43/config.json b/logs_svd_gated/mode_5_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..ac9c22ea902a491b744fb66b5328a20440c41935 --- /dev/null +++ b/logs_svd_gated/mode_5_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 5, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "4c083d61-755d-4ebf-8822-d7e1c40d5260", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_5_param_gated_seed_43/training_log_4c083d61-755d-4ebf-8822-d7e1c40d5260.txt b/logs_svd_gated/mode_5_param_gated_seed_43/training_log_4c083d61-755d-4ebf-8822-d7e1c40d5260.txt new file mode 100644 index 0000000000000000000000000000000000000000..87d37701582735ef4f985e268232c312a9cd3e71 --- /dev/null +++ b/logs_svd_gated/mode_5_param_gated_seed_43/training_log_4c083d61-755d-4ebf-8822-d7e1c40d5260.txt @@ -0,0 +1,2926 @@ +[2025-08-22 21:27:46] [Rank 0] PRINT: --- Script Start: Fri Aug 22 21:27:46 2025 --- +[2025-08-22 21:27:46] [Rank 0] PRINT: --- Script Start: Fri Aug 22 21:27:46 2025 --- +[2025-08-22 21:27:46] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=5, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 21:27:46] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=5, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 21:27:46] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 21:27:46] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 21:27:46] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 21:27:46] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 21:27:46] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_5_param_gated_seed_43 +[2025-08-22 21:27:46] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_5_param_gated_seed_43 +[2025-08-22 21:27:46] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 21:27:46] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 21:27:46] [Rank 0] PRINT: Constructing model... +[2025-08-22 21:27:46] [Rank 0] PRINT: Constructing model... +[2025-08-22 21:27:49] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 21:27:49] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 21:27:49] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 21:27:49] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 21:27:49] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 21:27:49] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 21:27:49] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-08-22 21:27:49] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 5 +[2025-08-22 21:27:49] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.05). +[2025-08-22 21:27:49] [Rank 0] PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: 0.05). +[2025-08-22 21:27:49] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-08-22 21:27:49] [Rank 0] PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices). +[2025-08-22 21:27:49] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-08-22 21:27:49] [Rank 0] PRINT: Optimizers configured. Total optimizers: 1 +[2025-08-22 21:27:49] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 21:27:49] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 21:27:49] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 21:27:49] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 21:27:49] [Rank 0] PRINT: Starting warmup... +[2025-08-22 21:27:49] [Rank 0] PRINT: Starting warmup... +[2025-08-22 21:28:31] [Rank 0] PRINT: Warmup complete. +[2025-08-22 21:28:31] [Rank 0] PRINT: Warmup complete. +[2025-08-22 21:28:32] [Rank 0] PRINT: Starting training... +[2025-08-22 21:28:32] [Rank 0] PRINT: Starting training... +[2025-08-22 21:28:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:28:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:28:49] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 21:28:49] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 21:28:51] [Rank 0] step:21/10000 train_time:1401ms step_avg:66.71ms +[2025-08-22 21:28:51] [Rank 0] step:21/10000 train_time:1401ms step_avg:66.71ms +[2025-08-22 21:28:52] [Rank 0] step:41/10000 train_time:3012ms step_avg:73.46ms +[2025-08-22 21:28:52] [Rank 0] step:41/10000 train_time:3012ms step_avg:73.46ms +[2025-08-22 21:28:54] [Rank 0] step:61/10000 train_time:4627ms step_avg:75.85ms +[2025-08-22 21:28:54] [Rank 0] step:61/10000 train_time:4627ms step_avg:75.85ms +[2025-08-22 21:28:56] [Rank 0] step:81/10000 train_time:6245ms step_avg:77.10ms +[2025-08-22 21:28:56] [Rank 0] step:81/10000 train_time:6245ms step_avg:77.10ms +[2025-08-22 21:28:57] [Rank 0] step:101/10000 train_time:7867ms step_avg:77.89ms +[2025-08-22 21:28:57] [Rank 0] step:101/10000 train_time:7867ms step_avg:77.89ms +[2025-08-22 21:28:59] [Rank 0] step:121/10000 train_time:9492ms step_avg:78.44ms +[2025-08-22 21:28:59] [Rank 0] step:121/10000 train_time:9492ms step_avg:78.44ms +[2025-08-22 21:29:01] [Rank 0] step:141/10000 train_time:11118ms step_avg:78.85ms +[2025-08-22 21:29:01] [Rank 0] step:141/10000 train_time:11118ms step_avg:78.85ms +[2025-08-22 21:29:02] [Rank 0] step:161/10000 train_time:12821ms step_avg:79.63ms +[2025-08-22 21:29:02] [Rank 0] step:161/10000 train_time:12821ms step_avg:79.63ms +[2025-08-22 21:29:04] [Rank 0] step:181/10000 train_time:14477ms step_avg:79.98ms +[2025-08-22 21:29:04] [Rank 0] step:181/10000 train_time:14477ms step_avg:79.98ms +[2025-08-22 21:29:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:29:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:29:19] [Rank 0] PRINT: step:200/10000 val_loss:6.7305 svd_entropy: attn_qk:H=0.3525,top10E=0.88,eRank=15.0,q75/q25=19.30 attn_vo:H=0.1299,top10E=0.98,eRank=2.7,q75/q25=1545.61 mlp_w1:H=0.3159,top10E=0.91,eRank=8.8,q75/q25=4.81 mlp_w2:H=0.3739,top10E=0.84,eRank=13.6,q75/q25=6.30 vo_prod:H=0.0255,top10E=1.00,eRank=1.3,q75/q25=9823.54 train_time:16385ms step_avg:81.93ms +[2025-08-22 21:29:19] [Rank 0] PRINT: step:200/10000 val_loss:6.7305 svd_entropy: attn_qk:H=0.3525,top10E=0.88,eRank=15.0,q75/q25=19.30 attn_vo:H=0.1299,top10E=0.98,eRank=2.7,q75/q25=1545.61 mlp_w1:H=0.3159,top10E=0.91,eRank=8.8,q75/q25=4.81 mlp_w2:H=0.3739,top10E=0.84,eRank=13.6,q75/q25=6.30 vo_prod:H=0.0255,top10E=1.00,eRank=1.3,q75/q25=9823.54 train_time:16385ms step_avg:81.93ms +[2025-08-22 21:29:19] [Rank 0] step:201/10000 train_time:16398ms step_avg:81.58ms +[2025-08-22 21:29:19] [Rank 0] step:201/10000 train_time:16398ms step_avg:81.58ms +[2025-08-22 21:29:21] [Rank 0] step:221/10000 train_time:17749ms step_avg:80.31ms +[2025-08-22 21:29:21] [Rank 0] step:221/10000 train_time:17749ms step_avg:80.31ms +[2025-08-22 21:29:22] [Rank 0] step:241/10000 train_time:19369ms step_avg:80.37ms +[2025-08-22 21:29:22] [Rank 0] step:241/10000 train_time:19369ms step_avg:80.37ms +[2025-08-22 21:29:24] [Rank 0] step:261/10000 train_time:20992ms step_avg:80.43ms +[2025-08-22 21:29:24] [Rank 0] step:261/10000 train_time:20992ms step_avg:80.43ms +[2025-08-22 21:29:25] [Rank 0] step:281/10000 train_time:22616ms step_avg:80.48ms +[2025-08-22 21:29:25] [Rank 0] step:281/10000 train_time:22616ms step_avg:80.48ms +[2025-08-22 21:29:27] [Rank 0] step:301/10000 train_time:24239ms step_avg:80.53ms +[2025-08-22 21:29:27] [Rank 0] step:301/10000 train_time:24239ms step_avg:80.53ms +[2025-08-22 21:29:29] [Rank 0] step:321/10000 train_time:25861ms step_avg:80.56ms +[2025-08-22 21:29:29] [Rank 0] step:321/10000 train_time:25861ms step_avg:80.56ms +[2025-08-22 21:29:30] [Rank 0] step:341/10000 train_time:27485ms step_avg:80.60ms +[2025-08-22 21:29:30] [Rank 0] step:341/10000 train_time:27485ms step_avg:80.60ms +[2025-08-22 21:29:32] [Rank 0] step:361/10000 train_time:29108ms step_avg:80.63ms +[2025-08-22 21:29:32] [Rank 0] step:361/10000 train_time:29108ms step_avg:80.63ms +[2025-08-22 21:29:34] [Rank 0] step:381/10000 train_time:30729ms step_avg:80.65ms +[2025-08-22 21:29:34] [Rank 0] step:381/10000 train_time:30729ms step_avg:80.65ms +[2025-08-22 21:29:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:29:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:29:49] [Rank 0] PRINT: step:400/10000 val_loss:6.0982 svd_entropy: attn_qk:H=0.4964,top10E=0.69,eRank=31.8,q75/q25=65.66 attn_vo:H=0.2735,top10E=0.97,eRank=6.6,q75/q25=99.64 mlp_w1:H=0.5015,top10E=0.70,eRank=31.4,q75/q25=8.20 mlp_w2:H=0.5668,top10E=0.57,eRank=49.9,q75/q25=7.56 vo_prod:H=0.1464,top10E=1.00,eRank=2.9,q75/q25=815.11 train_time:32632ms step_avg:81.58ms +[2025-08-22 21:29:49] [Rank 0] PRINT: step:400/10000 val_loss:6.0982 svd_entropy: attn_qk:H=0.4964,top10E=0.69,eRank=31.8,q75/q25=65.66 attn_vo:H=0.2735,top10E=0.97,eRank=6.6,q75/q25=99.64 mlp_w1:H=0.5015,top10E=0.70,eRank=31.4,q75/q25=8.20 mlp_w2:H=0.5668,top10E=0.57,eRank=49.9,q75/q25=7.56 vo_prod:H=0.1464,top10E=1.00,eRank=2.9,q75/q25=815.11 train_time:32632ms step_avg:81.58ms +[2025-08-22 21:29:49] [Rank 0] step:401/10000 train_time:32642ms step_avg:81.40ms +[2025-08-22 21:29:49] [Rank 0] step:401/10000 train_time:32642ms step_avg:81.40ms +[2025-08-22 21:29:50] [Rank 0] step:421/10000 train_time:33992ms step_avg:80.74ms +[2025-08-22 21:29:50] [Rank 0] step:421/10000 train_time:33992ms step_avg:80.74ms +[2025-08-22 21:29:52] [Rank 0] step:441/10000 train_time:35607ms step_avg:80.74ms +[2025-08-22 21:29:52] [Rank 0] step:441/10000 train_time:35607ms step_avg:80.74ms +[2025-08-22 21:29:54] [Rank 0] step:461/10000 train_time:37227ms step_avg:80.75ms +[2025-08-22 21:29:54] [Rank 0] step:461/10000 train_time:37227ms step_avg:80.75ms +[2025-08-22 21:29:55] [Rank 0] step:481/10000 train_time:38845ms step_avg:80.76ms +[2025-08-22 21:29:55] [Rank 0] step:481/10000 train_time:38845ms step_avg:80.76ms +[2025-08-22 21:29:57] [Rank 0] step:501/10000 train_time:40460ms step_avg:80.76ms +[2025-08-22 21:29:57] [Rank 0] step:501/10000 train_time:40460ms step_avg:80.76ms +[2025-08-22 21:29:59] [Rank 0] step:521/10000 train_time:42078ms step_avg:80.76ms +[2025-08-22 21:29:59] [Rank 0] step:521/10000 train_time:42078ms step_avg:80.76ms +[2025-08-22 21:30:00] [Rank 0] step:541/10000 train_time:43699ms step_avg:80.77ms +[2025-08-22 21:30:00] [Rank 0] step:541/10000 train_time:43699ms step_avg:80.77ms +[2025-08-22 21:30:02] [Rank 0] step:561/10000 train_time:45321ms step_avg:80.79ms +[2025-08-22 21:30:02] [Rank 0] step:561/10000 train_time:45321ms step_avg:80.79ms +[2025-08-22 21:30:03] [Rank 0] step:581/10000 train_time:46945ms step_avg:80.80ms +[2025-08-22 21:30:03] [Rank 0] step:581/10000 train_time:46945ms step_avg:80.80ms +[2025-08-22 21:30:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:30:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:30:18] [Rank 0] PRINT: step:600/10000 val_loss:5.7207 svd_entropy: attn_qk:H=0.5381,top10E=0.60,eRank=40.3,q75/q25=73.23 attn_vo:H=0.3221,top10E=0.94,eRank=9.1,q75/q25=81.73 mlp_w1:H=0.5649,top10E=0.59,eRank=48.8,q75/q25=7.22 mlp_w2:H=0.6509,top10E=0.44,eRank=83.2,q75/q25=7.95 vo_prod:H=0.2087,top10E=1.00,eRank=4.3,q75/q25=769.55 train_time:48879ms step_avg:81.47ms +[2025-08-22 21:30:18] [Rank 0] PRINT: step:600/10000 val_loss:5.7207 svd_entropy: attn_qk:H=0.5381,top10E=0.60,eRank=40.3,q75/q25=73.23 attn_vo:H=0.3221,top10E=0.94,eRank=9.1,q75/q25=81.73 mlp_w1:H=0.5649,top10E=0.59,eRank=48.8,q75/q25=7.22 mlp_w2:H=0.6509,top10E=0.44,eRank=83.2,q75/q25=7.95 vo_prod:H=0.2087,top10E=1.00,eRank=4.3,q75/q25=769.55 train_time:48879ms step_avg:81.47ms +[2025-08-22 21:30:18] [Rank 0] step:601/10000 train_time:48889ms step_avg:81.35ms +[2025-08-22 21:30:18] [Rank 0] step:601/10000 train_time:48889ms step_avg:81.35ms +[2025-08-22 21:30:20] [Rank 0] step:621/10000 train_time:50243ms step_avg:80.91ms +[2025-08-22 21:30:20] [Rank 0] step:621/10000 train_time:50243ms step_avg:80.91ms +[2025-08-22 21:30:22] [Rank 0] step:641/10000 train_time:51860ms step_avg:80.91ms +[2025-08-22 21:30:22] [Rank 0] step:641/10000 train_time:51860ms step_avg:80.91ms +[2025-08-22 21:30:23] [Rank 0] step:661/10000 train_time:53480ms step_avg:80.91ms +[2025-08-22 21:30:23] [Rank 0] step:661/10000 train_time:53480ms step_avg:80.91ms +[2025-08-22 21:30:25] [Rank 0] step:681/10000 train_time:55099ms step_avg:80.91ms +[2025-08-22 21:30:25] [Rank 0] step:681/10000 train_time:55099ms step_avg:80.91ms +[2025-08-22 21:30:27] [Rank 0] step:701/10000 train_time:56720ms step_avg:80.91ms +[2025-08-22 21:30:27] [Rank 0] step:701/10000 train_time:56720ms step_avg:80.91ms +[2025-08-22 21:30:28] [Rank 0] step:721/10000 train_time:58345ms step_avg:80.92ms +[2025-08-22 21:30:28] [Rank 0] step:721/10000 train_time:58345ms step_avg:80.92ms +[2025-08-22 21:30:30] [Rank 0] step:741/10000 train_time:59972ms step_avg:80.93ms +[2025-08-22 21:30:30] [Rank 0] step:741/10000 train_time:59972ms step_avg:80.93ms +[2025-08-22 21:30:31] [Rank 0] step:761/10000 train_time:61609ms step_avg:80.96ms +[2025-08-22 21:30:31] [Rank 0] step:761/10000 train_time:61609ms step_avg:80.96ms +[2025-08-22 21:30:33] [Rank 0] step:781/10000 train_time:63249ms step_avg:80.98ms +[2025-08-22 21:30:33] [Rank 0] step:781/10000 train_time:63249ms step_avg:80.98ms +[2025-08-22 21:30:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:30:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:30:48] [Rank 0] PRINT: step:800/10000 val_loss:5.4822 svd_entropy: attn_qk:H=0.5623,top10E=0.55,eRank=46.9,q75/q25=71.01 attn_vo:H=0.3522,top10E=0.91,eRank=11.2,q75/q25=74.21 mlp_w1:H=0.5975,top10E=0.53,eRank=61.0,q75/q25=7.17 mlp_w2:H=0.6961,top10E=0.37,eRank=110.7,q75/q25=9.18 vo_prod:H=0.2465,top10E=0.99,eRank=5.6,q75/q25=792.74 train_time:65172ms step_avg:81.47ms +[2025-08-22 21:30:48] [Rank 0] PRINT: step:800/10000 val_loss:5.4822 svd_entropy: attn_qk:H=0.5623,top10E=0.55,eRank=46.9,q75/q25=71.01 attn_vo:H=0.3522,top10E=0.91,eRank=11.2,q75/q25=74.21 mlp_w1:H=0.5975,top10E=0.53,eRank=61.0,q75/q25=7.17 mlp_w2:H=0.6961,top10E=0.37,eRank=110.7,q75/q25=9.18 vo_prod:H=0.2465,top10E=0.99,eRank=5.6,q75/q25=792.74 train_time:65172ms step_avg:81.47ms +[2025-08-22 21:30:48] [Rank 0] step:801/10000 train_time:65182ms step_avg:81.38ms +[2025-08-22 21:30:48] [Rank 0] step:801/10000 train_time:65182ms step_avg:81.38ms +[2025-08-22 21:30:50] [Rank 0] step:821/10000 train_time:66546ms step_avg:81.05ms +[2025-08-22 21:30:50] [Rank 0] step:821/10000 train_time:66546ms step_avg:81.05ms +[2025-08-22 21:30:52] [Rank 0] step:841/10000 train_time:68179ms step_avg:81.07ms +[2025-08-22 21:30:52] [Rank 0] step:841/10000 train_time:68179ms step_avg:81.07ms +[2025-08-22 21:30:53] [Rank 0] step:861/10000 train_time:69815ms step_avg:81.09ms +[2025-08-22 21:30:53] [Rank 0] step:861/10000 train_time:69815ms step_avg:81.09ms +[2025-08-22 21:30:55] [Rank 0] step:881/10000 train_time:71448ms step_avg:81.10ms +[2025-08-22 21:30:55] [Rank 0] step:881/10000 train_time:71448ms step_avg:81.10ms +[2025-08-22 21:30:56] [Rank 0] step:901/10000 train_time:73082ms step_avg:81.11ms +[2025-08-22 21:30:56] [Rank 0] step:901/10000 train_time:73082ms step_avg:81.11ms +[2025-08-22 21:30:58] [Rank 0] step:921/10000 train_time:74720ms step_avg:81.13ms +[2025-08-22 21:30:58] [Rank 0] step:921/10000 train_time:74720ms step_avg:81.13ms +[2025-08-22 21:31:00] [Rank 0] step:941/10000 train_time:76358ms step_avg:81.15ms +[2025-08-22 21:31:00] [Rank 0] step:941/10000 train_time:76358ms step_avg:81.15ms +[2025-08-22 21:31:01] [Rank 0] step:961/10000 train_time:77996ms step_avg:81.16ms +[2025-08-22 21:31:01] [Rank 0] step:961/10000 train_time:77996ms step_avg:81.16ms +[2025-08-22 21:31:03] [Rank 0] step:981/10000 train_time:79632ms step_avg:81.17ms +[2025-08-22 21:31:03] [Rank 0] step:981/10000 train_time:79632ms step_avg:81.17ms +[2025-08-22 21:31:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:31:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:31:18] [Rank 0] PRINT: step:1000/10000 val_loss:5.3070 svd_entropy: attn_qk:H=0.5802,top10E=0.52,eRank=52.6,q75/q25=69.33 attn_vo:H=0.3767,top10E=0.87,eRank=13.4,q75/q25=71.91 mlp_w1:H=0.6195,top10E=0.49,eRank=70.9,q75/q25=7.43 mlp_w2:H=0.7275,top10E=0.32,eRank=134.9,q75/q25=10.77 vo_prod:H=0.2735,top10E=0.97,eRank=6.8,q75/q25=894.60 train_time:81551ms step_avg:81.55ms +[2025-08-22 21:31:18] [Rank 0] PRINT: step:1000/10000 val_loss:5.3070 svd_entropy: attn_qk:H=0.5802,top10E=0.52,eRank=52.6,q75/q25=69.33 attn_vo:H=0.3767,top10E=0.87,eRank=13.4,q75/q25=71.91 mlp_w1:H=0.6195,top10E=0.49,eRank=70.9,q75/q25=7.43 mlp_w2:H=0.7275,top10E=0.32,eRank=134.9,q75/q25=10.77 vo_prod:H=0.2735,top10E=0.97,eRank=6.8,q75/q25=894.60 train_time:81551ms step_avg:81.55ms +[2025-08-22 21:31:18] [Rank 0] step:1001/10000 train_time:81560ms step_avg:81.48ms +[2025-08-22 21:31:18] [Rank 0] step:1001/10000 train_time:81560ms step_avg:81.48ms +[2025-08-22 21:31:20] [Rank 0] step:1021/10000 train_time:82939ms step_avg:81.23ms +[2025-08-22 21:31:20] [Rank 0] step:1021/10000 train_time:82939ms step_avg:81.23ms +[2025-08-22 21:31:22] [Rank 0] step:1041/10000 train_time:84569ms step_avg:81.24ms +[2025-08-22 21:31:22] [Rank 0] step:1041/10000 train_time:84569ms step_avg:81.24ms +[2025-08-22 21:31:23] [Rank 0] step:1061/10000 train_time:86203ms step_avg:81.25ms +[2025-08-22 21:31:23] [Rank 0] step:1061/10000 train_time:86203ms step_avg:81.25ms +[2025-08-22 21:31:25] [Rank 0] step:1081/10000 train_time:87836ms step_avg:81.25ms +[2025-08-22 21:31:25] [Rank 0] step:1081/10000 train_time:87836ms step_avg:81.25ms +[2025-08-22 21:31:26] [Rank 0] step:1101/10000 train_time:89472ms step_avg:81.26ms +[2025-08-22 21:31:26] [Rank 0] step:1101/10000 train_time:89472ms step_avg:81.26ms +[2025-08-22 21:31:28] [Rank 0] step:1121/10000 train_time:91105ms step_avg:81.27ms +[2025-08-22 21:31:28] [Rank 0] step:1121/10000 train_time:91105ms step_avg:81.27ms +[2025-08-22 21:31:30] [Rank 0] step:1141/10000 train_time:92741ms step_avg:81.28ms +[2025-08-22 21:31:30] [Rank 0] step:1141/10000 train_time:92741ms step_avg:81.28ms +[2025-08-22 21:31:31] [Rank 0] step:1161/10000 train_time:94376ms step_avg:81.29ms +[2025-08-22 21:31:31] [Rank 0] step:1161/10000 train_time:94376ms step_avg:81.29ms +[2025-08-22 21:31:33] [Rank 0] step:1181/10000 train_time:96013ms step_avg:81.30ms +[2025-08-22 21:31:33] [Rank 0] step:1181/10000 train_time:96013ms step_avg:81.30ms +[2025-08-22 21:31:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:31:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:31:48] [Rank 0] PRINT: step:1200/10000 val_loss:5.1680 svd_entropy: attn_qk:H=0.5932,top10E=0.49,eRank=57.4,q75/q25=70.73 attn_vo:H=0.3981,top10E=0.84,eRank=15.8,q75/q25=72.03 mlp_w1:H=0.6350,top10E=0.46,eRank=79.1,q75/q25=7.97 mlp_w2:H=0.7485,top10E=0.29,eRank=154.9,q75/q25=12.49 vo_prod:H=0.2945,top10E=0.95,eRank=8.0,q75/q25=1030.60 train_time:97932ms step_avg:81.61ms +[2025-08-22 21:31:48] [Rank 0] PRINT: step:1200/10000 val_loss:5.1680 svd_entropy: attn_qk:H=0.5932,top10E=0.49,eRank=57.4,q75/q25=70.73 attn_vo:H=0.3981,top10E=0.84,eRank=15.8,q75/q25=72.03 mlp_w1:H=0.6350,top10E=0.46,eRank=79.1,q75/q25=7.97 mlp_w2:H=0.7485,top10E=0.29,eRank=154.9,q75/q25=12.49 vo_prod:H=0.2945,top10E=0.95,eRank=8.0,q75/q25=1030.60 train_time:97932ms step_avg:81.61ms +[2025-08-22 21:31:48] [Rank 0] step:1201/10000 train_time:97943ms step_avg:81.55ms +[2025-08-22 21:31:48] [Rank 0] step:1201/10000 train_time:97943ms step_avg:81.55ms +[2025-08-22 21:31:50] [Rank 0] step:1221/10000 train_time:99307ms step_avg:81.33ms +[2025-08-22 21:31:50] [Rank 0] step:1221/10000 train_time:99307ms step_avg:81.33ms +[2025-08-22 21:31:52] [Rank 0] step:1241/10000 train_time:100942ms step_avg:81.34ms +[2025-08-22 21:31:52] [Rank 0] step:1241/10000 train_time:100942ms step_avg:81.34ms +[2025-08-22 21:31:53] [Rank 0] step:1261/10000 train_time:102578ms step_avg:81.35ms +[2025-08-22 21:31:53] [Rank 0] step:1261/10000 train_time:102578ms step_avg:81.35ms +[2025-08-22 21:31:55] [Rank 0] step:1281/10000 train_time:104213ms step_avg:81.35ms +[2025-08-22 21:31:55] [Rank 0] step:1281/10000 train_time:104213ms step_avg:81.35ms +[2025-08-22 21:31:56] [Rank 0] step:1301/10000 train_time:105850ms step_avg:81.36ms +[2025-08-22 21:31:56] [Rank 0] step:1301/10000 train_time:105850ms step_avg:81.36ms +[2025-08-22 21:31:58] [Rank 0] step:1321/10000 train_time:107489ms step_avg:81.37ms +[2025-08-22 21:31:58] [Rank 0] step:1321/10000 train_time:107489ms step_avg:81.37ms +[2025-08-22 21:32:00] [Rank 0] step:1341/10000 train_time:109128ms step_avg:81.38ms +[2025-08-22 21:32:00] [Rank 0] step:1341/10000 train_time:109128ms step_avg:81.38ms +[2025-08-22 21:32:01] [Rank 0] step:1361/10000 train_time:110769ms step_avg:81.39ms +[2025-08-22 21:32:01] [Rank 0] step:1361/10000 train_time:110769ms step_avg:81.39ms +[2025-08-22 21:32:03] [Rank 0] step:1381/10000 train_time:112410ms step_avg:81.40ms +[2025-08-22 21:32:03] [Rank 0] step:1381/10000 train_time:112410ms step_avg:81.40ms +[2025-08-22 21:32:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:32:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:32:18] [Rank 0] PRINT: step:1400/10000 val_loss:5.0663 svd_entropy: attn_qk:H=0.6037,top10E=0.47,eRank=61.7,q75/q25=72.91 attn_vo:H=0.4168,top10E=0.80,eRank=18.3,q75/q25=72.91 mlp_w1:H=0.6476,top10E=0.44,eRank=86.5,q75/q25=8.64 mlp_w2:H=0.7643,top10E=0.27,eRank=172.1,q75/q25=13.84 vo_prod:H=0.3135,top10E=0.93,eRank=9.3,q75/q25=1226.14 train_time:114335ms step_avg:81.67ms +[2025-08-22 21:32:18] [Rank 0] PRINT: step:1400/10000 val_loss:5.0663 svd_entropy: attn_qk:H=0.6037,top10E=0.47,eRank=61.7,q75/q25=72.91 attn_vo:H=0.4168,top10E=0.80,eRank=18.3,q75/q25=72.91 mlp_w1:H=0.6476,top10E=0.44,eRank=86.5,q75/q25=8.64 mlp_w2:H=0.7643,top10E=0.27,eRank=172.1,q75/q25=13.84 vo_prod:H=0.3135,top10E=0.93,eRank=9.3,q75/q25=1226.14 train_time:114335ms step_avg:81.67ms +[2025-08-22 21:32:18] [Rank 0] step:1401/10000 train_time:114345ms step_avg:81.62ms +[2025-08-22 21:32:18] [Rank 0] step:1401/10000 train_time:114345ms step_avg:81.62ms +[2025-08-22 21:32:20] [Rank 0] step:1421/10000 train_time:115718ms step_avg:81.43ms +[2025-08-22 21:32:20] [Rank 0] step:1421/10000 train_time:115718ms step_avg:81.43ms +[2025-08-22 21:32:22] [Rank 0] step:1441/10000 train_time:117353ms step_avg:81.44ms +[2025-08-22 21:32:22] [Rank 0] step:1441/10000 train_time:117353ms step_avg:81.44ms +[2025-08-22 21:32:23] [Rank 0] step:1461/10000 train_time:118988ms step_avg:81.44ms +[2025-08-22 21:32:23] [Rank 0] step:1461/10000 train_time:118988ms step_avg:81.44ms +[2025-08-22 21:32:25] [Rank 0] step:1481/10000 train_time:120625ms step_avg:81.45ms +[2025-08-22 21:32:25] [Rank 0] step:1481/10000 train_time:120625ms step_avg:81.45ms +[2025-08-22 21:32:26] [Rank 0] step:1501/10000 train_time:122269ms step_avg:81.46ms +[2025-08-22 21:32:26] [Rank 0] step:1501/10000 train_time:122269ms step_avg:81.46ms +[2025-08-22 21:32:28] [Rank 0] step:1521/10000 train_time:123916ms step_avg:81.47ms +[2025-08-22 21:32:28] [Rank 0] step:1521/10000 train_time:123916ms step_avg:81.47ms +[2025-08-22 21:32:30] [Rank 0] step:1541/10000 train_time:125566ms step_avg:81.48ms +[2025-08-22 21:32:30] [Rank 0] step:1541/10000 train_time:125566ms step_avg:81.48ms +[2025-08-22 21:32:31] [Rank 0] step:1561/10000 train_time:127215ms step_avg:81.50ms +[2025-08-22 21:32:31] [Rank 0] step:1561/10000 train_time:127215ms step_avg:81.50ms +[2025-08-22 21:32:33] [Rank 0] step:1581/10000 train_time:128863ms step_avg:81.51ms +[2025-08-22 21:32:33] [Rank 0] step:1581/10000 train_time:128863ms step_avg:81.51ms +[2025-08-22 21:32:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:32:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:32:48] [Rank 0] PRINT: step:1600/10000 val_loss:4.9177 svd_entropy: attn_qk:H=0.6129,top10E=0.46,eRank=65.7,q75/q25=75.14 attn_vo:H=0.4336,top10E=0.77,eRank=20.9,q75/q25=74.86 mlp_w1:H=0.6584,top10E=0.42,eRank=93.4,q75/q25=9.33 mlp_w2:H=0.7768,top10E=0.25,eRank=187.2,q75/q25=15.04 vo_prod:H=0.3311,top10E=0.91,eRank=10.6,q75/q25=1572.27 train_time:130796ms step_avg:81.75ms +[2025-08-22 21:32:48] [Rank 0] PRINT: step:1600/10000 val_loss:4.9177 svd_entropy: attn_qk:H=0.6129,top10E=0.46,eRank=65.7,q75/q25=75.14 attn_vo:H=0.4336,top10E=0.77,eRank=20.9,q75/q25=74.86 mlp_w1:H=0.6584,top10E=0.42,eRank=93.4,q75/q25=9.33 mlp_w2:H=0.7768,top10E=0.25,eRank=187.2,q75/q25=15.04 vo_prod:H=0.3311,top10E=0.91,eRank=10.6,q75/q25=1572.27 train_time:130796ms step_avg:81.75ms +[2025-08-22 21:32:48] [Rank 0] step:1601/10000 train_time:130805ms step_avg:81.70ms +[2025-08-22 21:32:48] [Rank 0] step:1601/10000 train_time:130805ms step_avg:81.70ms +[2025-08-22 21:32:50] [Rank 0] step:1621/10000 train_time:132181ms step_avg:81.54ms +[2025-08-22 21:32:50] [Rank 0] step:1621/10000 train_time:132181ms step_avg:81.54ms +[2025-08-22 21:32:52] [Rank 0] step:1641/10000 train_time:133825ms step_avg:81.55ms +[2025-08-22 21:32:52] [Rank 0] step:1641/10000 train_time:133825ms step_avg:81.55ms +[2025-08-22 21:32:53] [Rank 0] step:1661/10000 train_time:135474ms step_avg:81.56ms +[2025-08-22 21:32:53] [Rank 0] step:1661/10000 train_time:135474ms step_avg:81.56ms +[2025-08-22 21:32:55] [Rank 0] step:1681/10000 train_time:137116ms step_avg:81.57ms +[2025-08-22 21:32:55] [Rank 0] step:1681/10000 train_time:137116ms step_avg:81.57ms +[2025-08-22 21:32:57] [Rank 0] step:1701/10000 train_time:138760ms step_avg:81.58ms +[2025-08-22 21:32:57] [Rank 0] step:1701/10000 train_time:138760ms step_avg:81.58ms +[2025-08-22 21:32:58] [Rank 0] step:1721/10000 train_time:140406ms step_avg:81.58ms +[2025-08-22 21:32:58] [Rank 0] step:1721/10000 train_time:140406ms step_avg:81.58ms +[2025-08-22 21:33:00] [Rank 0] step:1741/10000 train_time:142054ms step_avg:81.59ms +[2025-08-22 21:33:00] [Rank 0] step:1741/10000 train_time:142054ms step_avg:81.59ms +[2025-08-22 21:33:02] [Rank 0] step:1761/10000 train_time:143702ms step_avg:81.60ms +[2025-08-22 21:33:02] [Rank 0] step:1761/10000 train_time:143702ms step_avg:81.60ms +[2025-08-22 21:33:03] [Rank 0] step:1781/10000 train_time:145354ms step_avg:81.61ms +[2025-08-22 21:33:03] [Rank 0] step:1781/10000 train_time:145354ms step_avg:81.61ms +[2025-08-22 21:33:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:33:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:33:18] [Rank 0] PRINT: step:1800/10000 val_loss:4.7904 svd_entropy: attn_qk:H=0.6206,top10E=0.44,eRank=69.2,q75/q25=76.96 attn_vo:H=0.4479,top10E=0.74,eRank=23.4,q75/q25=78.29 mlp_w1:H=0.6674,top10E=0.41,eRank=99.6,q75/q25=10.01 mlp_w2:H=0.7870,top10E=0.24,eRank=200.4,q75/q25=15.74 vo_prod:H=0.3464,top10E=0.89,eRank=12.0,q75/q25=2019.39 train_time:147289ms step_avg:81.83ms +[2025-08-22 21:33:18] [Rank 0] PRINT: step:1800/10000 val_loss:4.7904 svd_entropy: attn_qk:H=0.6206,top10E=0.44,eRank=69.2,q75/q25=76.96 attn_vo:H=0.4479,top10E=0.74,eRank=23.4,q75/q25=78.29 mlp_w1:H=0.6674,top10E=0.41,eRank=99.6,q75/q25=10.01 mlp_w2:H=0.7870,top10E=0.24,eRank=200.4,q75/q25=15.74 vo_prod:H=0.3464,top10E=0.89,eRank=12.0,q75/q25=2019.39 train_time:147289ms step_avg:81.83ms +[2025-08-22 21:33:18] [Rank 0] step:1801/10000 train_time:147298ms step_avg:81.79ms +[2025-08-22 21:33:18] [Rank 0] step:1801/10000 train_time:147298ms step_avg:81.79ms +[2025-08-22 21:33:20] [Rank 0] step:1821/10000 train_time:148666ms step_avg:81.64ms +[2025-08-22 21:33:20] [Rank 0] step:1821/10000 train_time:148666ms step_avg:81.64ms +[2025-08-22 21:33:22] [Rank 0] step:1841/10000 train_time:150313ms step_avg:81.65ms +[2025-08-22 21:33:22] [Rank 0] step:1841/10000 train_time:150313ms step_avg:81.65ms +[2025-08-22 21:33:23] [Rank 0] step:1861/10000 train_time:151961ms step_avg:81.66ms +[2025-08-22 21:33:23] [Rank 0] step:1861/10000 train_time:151961ms step_avg:81.66ms +[2025-08-22 21:33:25] [Rank 0] step:1881/10000 train_time:153609ms step_avg:81.66ms +[2025-08-22 21:33:25] [Rank 0] step:1881/10000 train_time:153609ms step_avg:81.66ms +[2025-08-22 21:33:27] [Rank 0] step:1901/10000 train_time:155257ms step_avg:81.67ms +[2025-08-22 21:33:27] [Rank 0] step:1901/10000 train_time:155257ms step_avg:81.67ms +[2025-08-22 21:33:28] [Rank 0] step:1921/10000 train_time:156908ms step_avg:81.68ms +[2025-08-22 21:33:28] [Rank 0] step:1921/10000 train_time:156908ms step_avg:81.68ms +[2025-08-22 21:33:30] [Rank 0] step:1941/10000 train_time:158559ms step_avg:81.69ms +[2025-08-22 21:33:30] [Rank 0] step:1941/10000 train_time:158559ms step_avg:81.69ms +[2025-08-22 21:33:32] [Rank 0] step:1961/10000 train_time:160212ms step_avg:81.70ms +[2025-08-22 21:33:32] [Rank 0] step:1961/10000 train_time:160212ms step_avg:81.70ms +[2025-08-22 21:33:33] [Rank 0] step:1981/10000 train_time:161863ms step_avg:81.71ms +[2025-08-22 21:33:33] [Rank 0] step:1981/10000 train_time:161863ms step_avg:81.71ms +[2025-08-22 21:33:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:33:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:33:48] [Rank 0] PRINT: step:2000/10000 val_loss:4.7323 svd_entropy: attn_qk:H=0.6277,top10E=0.43,eRank=72.5,q75/q25=79.62 attn_vo:H=0.4604,top10E=0.72,eRank=25.8,q75/q25=81.74 mlp_w1:H=0.6752,top10E=0.39,eRank=105.3,q75/q25=10.66 mlp_w2:H=0.7953,top10E=0.23,eRank=211.8,q75/q25=16.15 vo_prod:H=0.3597,top10E=0.87,eRank=13.4,q75/q25=2528.53 train_time:163802ms step_avg:81.90ms +[2025-08-22 21:33:48] [Rank 0] PRINT: step:2000/10000 val_loss:4.7323 svd_entropy: attn_qk:H=0.6277,top10E=0.43,eRank=72.5,q75/q25=79.62 attn_vo:H=0.4604,top10E=0.72,eRank=25.8,q75/q25=81.74 mlp_w1:H=0.6752,top10E=0.39,eRank=105.3,q75/q25=10.66 mlp_w2:H=0.7953,top10E=0.23,eRank=211.8,q75/q25=16.15 vo_prod:H=0.3597,top10E=0.87,eRank=13.4,q75/q25=2528.53 train_time:163802ms step_avg:81.90ms +[2025-08-22 21:33:49] [Rank 0] step:2001/10000 train_time:163812ms step_avg:81.87ms +[2025-08-22 21:33:49] [Rank 0] step:2001/10000 train_time:163812ms step_avg:81.87ms +[2025-08-22 21:33:50] [Rank 0] step:2021/10000 train_time:165185ms step_avg:81.73ms +[2025-08-22 21:33:50] [Rank 0] step:2021/10000 train_time:165185ms step_avg:81.73ms +[2025-08-22 21:33:52] [Rank 0] step:2041/10000 train_time:167226ms step_avg:81.93ms +[2025-08-22 21:33:52] [Rank 0] step:2041/10000 train_time:167226ms step_avg:81.93ms +[2025-08-22 21:33:54] [Rank 0] step:2061/10000 train_time:168873ms step_avg:81.94ms +[2025-08-22 21:33:54] [Rank 0] step:2061/10000 train_time:168873ms step_avg:81.94ms +[2025-08-22 21:33:56] [Rank 0] step:2081/10000 train_time:170518ms step_avg:81.94ms +[2025-08-22 21:33:56] [Rank 0] step:2081/10000 train_time:170518ms step_avg:81.94ms +[2025-08-22 21:33:57] [Rank 0] step:2101/10000 train_time:172164ms step_avg:81.94ms +[2025-08-22 21:33:57] [Rank 0] step:2101/10000 train_time:172164ms step_avg:81.94ms +[2025-08-22 21:33:59] [Rank 0] step:2121/10000 train_time:173813ms step_avg:81.95ms +[2025-08-22 21:33:59] [Rank 0] step:2121/10000 train_time:173813ms step_avg:81.95ms +[2025-08-22 21:34:00] [Rank 0] step:2141/10000 train_time:175460ms step_avg:81.95ms +[2025-08-22 21:34:00] [Rank 0] step:2141/10000 train_time:175460ms step_avg:81.95ms +[2025-08-22 21:34:02] [Rank 0] step:2161/10000 train_time:177109ms step_avg:81.96ms +[2025-08-22 21:34:02] [Rank 0] step:2161/10000 train_time:177109ms step_avg:81.96ms +[2025-08-22 21:34:04] [Rank 0] step:2181/10000 train_time:178758ms step_avg:81.96ms +[2025-08-22 21:34:04] [Rank 0] step:2181/10000 train_time:178758ms step_avg:81.96ms +[2025-08-22 21:34:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:34:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:34:19] [Rank 0] PRINT: step:2200/10000 val_loss:4.6332 svd_entropy: attn_qk:H=0.6333,top10E=0.42,eRank=75.4,q75/q25=80.77 attn_vo:H=0.4705,top10E=0.70,eRank=27.9,q75/q25=84.60 mlp_w1:H=0.6817,top10E=0.38,eRank=110.4,q75/q25=11.23 mlp_w2:H=0.8020,top10E=0.23,eRank=221.5,q75/q25=16.38 vo_prod:H=0.3706,top10E=0.85,eRank=14.6,q75/q25=2957.59 train_time:180691ms step_avg:82.13ms +[2025-08-22 21:34:19] [Rank 0] PRINT: step:2200/10000 val_loss:4.6332 svd_entropy: attn_qk:H=0.6333,top10E=0.42,eRank=75.4,q75/q25=80.77 attn_vo:H=0.4705,top10E=0.70,eRank=27.9,q75/q25=84.60 mlp_w1:H=0.6817,top10E=0.38,eRank=110.4,q75/q25=11.23 mlp_w2:H=0.8020,top10E=0.23,eRank=221.5,q75/q25=16.38 vo_prod:H=0.3706,top10E=0.85,eRank=14.6,q75/q25=2957.59 train_time:180691ms step_avg:82.13ms +[2025-08-22 21:34:19] [Rank 0] step:2201/10000 train_time:180702ms step_avg:82.10ms +[2025-08-22 21:34:19] [Rank 0] step:2201/10000 train_time:180702ms step_avg:82.10ms +[2025-08-22 21:34:21] [Rank 0] step:2221/10000 train_time:182075ms step_avg:81.98ms +[2025-08-22 21:34:21] [Rank 0] step:2221/10000 train_time:182075ms step_avg:81.98ms +[2025-08-22 21:34:22] [Rank 0] step:2241/10000 train_time:183751ms step_avg:82.00ms +[2025-08-22 21:34:22] [Rank 0] step:2241/10000 train_time:183751ms step_avg:82.00ms +[2025-08-22 21:34:24] [Rank 0] step:2261/10000 train_time:185442ms step_avg:82.02ms +[2025-08-22 21:34:24] [Rank 0] step:2261/10000 train_time:185442ms step_avg:82.02ms +[2025-08-22 21:34:26] [Rank 0] step:2281/10000 train_time:187133ms step_avg:82.04ms +[2025-08-22 21:34:26] [Rank 0] step:2281/10000 train_time:187133ms step_avg:82.04ms +[2025-08-22 21:34:27] [Rank 0] step:2301/10000 train_time:188824ms step_avg:82.06ms +[2025-08-22 21:34:27] [Rank 0] step:2301/10000 train_time:188824ms step_avg:82.06ms +[2025-08-22 21:34:29] [Rank 0] step:2321/10000 train_time:190515ms step_avg:82.08ms +[2025-08-22 21:34:29] [Rank 0] step:2321/10000 train_time:190515ms step_avg:82.08ms +[2025-08-22 21:34:31] [Rank 0] step:2341/10000 train_time:192208ms step_avg:82.10ms +[2025-08-22 21:34:31] [Rank 0] step:2341/10000 train_time:192208ms step_avg:82.10ms +[2025-08-22 21:34:32] [Rank 0] step:2361/10000 train_time:193900ms step_avg:82.13ms +[2025-08-22 21:34:32] [Rank 0] step:2361/10000 train_time:193900ms step_avg:82.13ms +[2025-08-22 21:34:34] [Rank 0] step:2381/10000 train_time:195595ms step_avg:82.15ms +[2025-08-22 21:34:34] [Rank 0] step:2381/10000 train_time:195595ms step_avg:82.15ms +[2025-08-22 21:34:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:34:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:34:49] [Rank 0] PRINT: step:2400/10000 val_loss:4.5644 svd_entropy: attn_qk:H=0.6384,top10E=0.41,eRank=78.1,q75/q25=82.98 attn_vo:H=0.4797,top10E=0.68,eRank=29.9,q75/q25=87.48 mlp_w1:H=0.6877,top10E=0.37,eRank=115.3,q75/q25=11.78 mlp_w2:H=0.8081,top10E=0.22,eRank=230.6,q75/q25=16.47 vo_prod:H=0.3814,top10E=0.83,eRank=15.9,q75/q25=3572.42 train_time:197581ms step_avg:82.33ms +[2025-08-22 21:34:49] [Rank 0] PRINT: step:2400/10000 val_loss:4.5644 svd_entropy: attn_qk:H=0.6384,top10E=0.41,eRank=78.1,q75/q25=82.98 attn_vo:H=0.4797,top10E=0.68,eRank=29.9,q75/q25=87.48 mlp_w1:H=0.6877,top10E=0.37,eRank=115.3,q75/q25=11.78 mlp_w2:H=0.8081,top10E=0.22,eRank=230.6,q75/q25=16.47 vo_prod:H=0.3814,top10E=0.83,eRank=15.9,q75/q25=3572.42 train_time:197581ms step_avg:82.33ms +[2025-08-22 21:34:49] [Rank 0] step:2401/10000 train_time:197592ms step_avg:82.30ms +[2025-08-22 21:34:49] [Rank 0] step:2401/10000 train_time:197592ms step_avg:82.30ms +[2025-08-22 21:34:51] [Rank 0] step:2421/10000 train_time:199001ms step_avg:82.20ms +[2025-08-22 21:34:51] [Rank 0] step:2421/10000 train_time:199001ms step_avg:82.20ms +[2025-08-22 21:34:53] [Rank 0] step:2441/10000 train_time:200694ms step_avg:82.22ms +[2025-08-22 21:34:53] [Rank 0] step:2441/10000 train_time:200694ms step_avg:82.22ms +[2025-08-22 21:34:55] [Rank 0] step:2461/10000 train_time:202386ms step_avg:82.24ms +[2025-08-22 21:34:55] [Rank 0] step:2461/10000 train_time:202386ms step_avg:82.24ms +[2025-08-22 21:34:56] [Rank 0] step:2481/10000 train_time:204080ms step_avg:82.26ms +[2025-08-22 21:34:56] [Rank 0] step:2481/10000 train_time:204080ms step_avg:82.26ms +[2025-08-22 21:34:58] [Rank 0] step:2501/10000 train_time:205775ms step_avg:82.28ms +[2025-08-22 21:34:58] [Rank 0] step:2501/10000 train_time:205775ms step_avg:82.28ms +[2025-08-22 21:35:00] [Rank 0] step:2521/10000 train_time:207470ms step_avg:82.30ms +[2025-08-22 21:35:00] [Rank 0] step:2521/10000 train_time:207470ms step_avg:82.30ms +[2025-08-22 21:35:01] [Rank 0] step:2541/10000 train_time:209179ms step_avg:82.32ms +[2025-08-22 21:35:01] [Rank 0] step:2541/10000 train_time:209179ms step_avg:82.32ms +[2025-08-22 21:35:03] [Rank 0] step:2561/10000 train_time:210863ms step_avg:82.34ms +[2025-08-22 21:35:03] [Rank 0] step:2561/10000 train_time:210863ms step_avg:82.34ms +[2025-08-22 21:35:05] [Rank 0] step:2581/10000 train_time:212562ms step_avg:82.36ms +[2025-08-22 21:35:05] [Rank 0] step:2581/10000 train_time:212562ms step_avg:82.36ms +[2025-08-22 21:35:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:35:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:35:20] [Rank 0] PRINT: step:2600/10000 val_loss:4.4950 svd_entropy: attn_qk:H=0.6433,top10E=0.41,eRank=80.7,q75/q25=84.35 attn_vo:H=0.4880,top10E=0.66,eRank=31.9,q75/q25=90.24 mlp_w1:H=0.6930,top10E=0.36,eRank=119.8,q75/q25=12.22 mlp_w2:H=0.8134,top10E=0.21,eRank=238.7,q75/q25=16.46 vo_prod:H=0.3907,top10E=0.82,eRank=17.0,q75/q25=4113.57 train_time:214555ms step_avg:82.52ms +[2025-08-22 21:35:20] [Rank 0] PRINT: step:2600/10000 val_loss:4.4950 svd_entropy: attn_qk:H=0.6433,top10E=0.41,eRank=80.7,q75/q25=84.35 attn_vo:H=0.4880,top10E=0.66,eRank=31.9,q75/q25=90.24 mlp_w1:H=0.6930,top10E=0.36,eRank=119.8,q75/q25=12.22 mlp_w2:H=0.8134,top10E=0.21,eRank=238.7,q75/q25=16.46 vo_prod:H=0.3907,top10E=0.82,eRank=17.0,q75/q25=4113.57 train_time:214555ms step_avg:82.52ms +[2025-08-22 21:35:20] [Rank 0] step:2601/10000 train_time:214565ms step_avg:82.49ms +[2025-08-22 21:35:20] [Rank 0] step:2601/10000 train_time:214565ms step_avg:82.49ms +[2025-08-22 21:35:22] [Rank 0] step:2621/10000 train_time:215967ms step_avg:82.40ms +[2025-08-22 21:35:22] [Rank 0] step:2621/10000 train_time:215967ms step_avg:82.40ms +[2025-08-22 21:35:23] [Rank 0] step:2641/10000 train_time:217659ms step_avg:82.42ms +[2025-08-22 21:35:23] [Rank 0] step:2641/10000 train_time:217659ms step_avg:82.42ms +[2025-08-22 21:35:25] [Rank 0] step:2661/10000 train_time:219349ms step_avg:82.43ms +[2025-08-22 21:35:25] [Rank 0] step:2661/10000 train_time:219349ms step_avg:82.43ms +[2025-08-22 21:35:27] [Rank 0] step:2681/10000 train_time:221041ms step_avg:82.45ms +[2025-08-22 21:35:27] [Rank 0] step:2681/10000 train_time:221041ms step_avg:82.45ms +[2025-08-22 21:35:28] [Rank 0] step:2701/10000 train_time:222734ms step_avg:82.46ms +[2025-08-22 21:35:28] [Rank 0] step:2701/10000 train_time:222734ms step_avg:82.46ms +[2025-08-22 21:35:30] [Rank 0] step:2721/10000 train_time:224426ms step_avg:82.48ms +[2025-08-22 21:35:30] [Rank 0] step:2721/10000 train_time:224426ms step_avg:82.48ms +[2025-08-22 21:35:32] [Rank 0] step:2741/10000 train_time:226119ms step_avg:82.50ms +[2025-08-22 21:35:32] [Rank 0] step:2741/10000 train_time:226119ms step_avg:82.50ms +[2025-08-22 21:35:34] [Rank 0] step:2761/10000 train_time:227814ms step_avg:82.51ms +[2025-08-22 21:35:34] [Rank 0] step:2761/10000 train_time:227814ms step_avg:82.51ms +[2025-08-22 21:35:35] [Rank 0] step:2781/10000 train_time:229508ms step_avg:82.53ms +[2025-08-22 21:35:35] [Rank 0] step:2781/10000 train_time:229508ms step_avg:82.53ms +[2025-08-22 21:35:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:35:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:35:50] [Rank 0] PRINT: step:2800/10000 val_loss:4.4653 svd_entropy: attn_qk:H=0.6477,top10E=0.40,eRank=83.1,q75/q25=85.86 attn_vo:H=0.4954,top10E=0.65,eRank=33.8,q75/q25=93.44 mlp_w1:H=0.6978,top10E=0.35,eRank=124.2,q75/q25=12.63 mlp_w2:H=0.8182,top10E=0.21,eRank=246.2,q75/q25=16.32 vo_prod:H=0.3985,top10E=0.81,eRank=18.1,q75/q25=4739.31 train_time:231494ms step_avg:82.68ms +[2025-08-22 21:35:50] [Rank 0] PRINT: step:2800/10000 val_loss:4.4653 svd_entropy: attn_qk:H=0.6477,top10E=0.40,eRank=83.1,q75/q25=85.86 attn_vo:H=0.4954,top10E=0.65,eRank=33.8,q75/q25=93.44 mlp_w1:H=0.6978,top10E=0.35,eRank=124.2,q75/q25=12.63 mlp_w2:H=0.8182,top10E=0.21,eRank=246.2,q75/q25=16.32 vo_prod:H=0.3985,top10E=0.81,eRank=18.1,q75/q25=4739.31 train_time:231494ms step_avg:82.68ms +[2025-08-22 21:35:51] [Rank 0] step:2801/10000 train_time:231504ms step_avg:82.65ms +[2025-08-22 21:35:51] [Rank 0] step:2801/10000 train_time:231504ms step_avg:82.65ms +[2025-08-22 21:35:52] [Rank 0] step:2821/10000 train_time:232924ms step_avg:82.57ms +[2025-08-22 21:35:52] [Rank 0] step:2821/10000 train_time:232924ms step_avg:82.57ms +[2025-08-22 21:35:54] [Rank 0] step:2841/10000 train_time:234612ms step_avg:82.58ms +[2025-08-22 21:35:54] [Rank 0] step:2841/10000 train_time:234612ms step_avg:82.58ms +[2025-08-22 21:35:56] [Rank 0] step:2861/10000 train_time:236299ms step_avg:82.59ms +[2025-08-22 21:35:56] [Rank 0] step:2861/10000 train_time:236299ms step_avg:82.59ms +[2025-08-22 21:35:57] [Rank 0] step:2881/10000 train_time:237987ms step_avg:82.61ms +[2025-08-22 21:35:57] [Rank 0] step:2881/10000 train_time:237987ms step_avg:82.61ms +[2025-08-22 21:35:59] [Rank 0] step:2901/10000 train_time:239677ms step_avg:82.62ms +[2025-08-22 21:35:59] [Rank 0] step:2901/10000 train_time:239677ms step_avg:82.62ms +[2025-08-22 21:36:01] [Rank 0] step:2921/10000 train_time:241367ms step_avg:82.63ms +[2025-08-22 21:36:01] [Rank 0] step:2921/10000 train_time:241367ms step_avg:82.63ms +[2025-08-22 21:36:02] [Rank 0] step:2941/10000 train_time:243062ms step_avg:82.65ms +[2025-08-22 21:36:02] [Rank 0] step:2941/10000 train_time:243062ms step_avg:82.65ms +[2025-08-22 21:36:04] [Rank 0] step:2961/10000 train_time:244756ms step_avg:82.66ms +[2025-08-22 21:36:04] [Rank 0] step:2961/10000 train_time:244756ms step_avg:82.66ms +[2025-08-22 21:36:06] [Rank 0] step:2981/10000 train_time:246455ms step_avg:82.68ms +[2025-08-22 21:36:06] [Rank 0] step:2981/10000 train_time:246455ms step_avg:82.68ms +[2025-08-22 21:36:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:36:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:36:21] [Rank 0] PRINT: step:3000/10000 val_loss:4.4186 svd_entropy: attn_qk:H=0.6518,top10E=0.39,eRank=85.4,q75/q25=86.71 attn_vo:H=0.5021,top10E=0.63,eRank=35.6,q75/q25=95.67 mlp_w1:H=0.7022,top10E=0.35,eRank=128.2,q75/q25=13.03 mlp_w2:H=0.8224,top10E=0.20,eRank=253.0,q75/q25=16.12 vo_prod:H=0.4058,top10E=0.79,eRank=19.1,q75/q25=5340.30 train_time:248453ms step_avg:82.82ms +[2025-08-22 21:36:21] [Rank 0] PRINT: step:3000/10000 val_loss:4.4186 svd_entropy: attn_qk:H=0.6518,top10E=0.39,eRank=85.4,q75/q25=86.71 attn_vo:H=0.5021,top10E=0.63,eRank=35.6,q75/q25=95.67 mlp_w1:H=0.7022,top10E=0.35,eRank=128.2,q75/q25=13.03 mlp_w2:H=0.8224,top10E=0.20,eRank=253.0,q75/q25=16.12 vo_prod:H=0.4058,top10E=0.79,eRank=19.1,q75/q25=5340.30 train_time:248453ms step_avg:82.82ms +[2025-08-22 21:36:21] [Rank 0] step:3001/10000 train_time:248463ms step_avg:82.79ms +[2025-08-22 21:36:21] [Rank 0] step:3001/10000 train_time:248463ms step_avg:82.79ms +[2025-08-22 21:36:23] [Rank 0] step:3021/10000 train_time:249886ms step_avg:82.72ms +[2025-08-22 21:36:23] [Rank 0] step:3021/10000 train_time:249886ms step_avg:82.72ms +[2025-08-22 21:36:25] [Rank 0] step:3041/10000 train_time:251651ms step_avg:82.75ms +[2025-08-22 21:36:25] [Rank 0] step:3041/10000 train_time:251651ms step_avg:82.75ms +[2025-08-22 21:36:26] [Rank 0] step:3061/10000 train_time:253351ms step_avg:82.77ms +[2025-08-22 21:36:26] [Rank 0] step:3061/10000 train_time:253351ms step_avg:82.77ms +[2025-08-22 21:36:28] [Rank 0] step:3081/10000 train_time:255051ms step_avg:82.78ms +[2025-08-22 21:36:28] [Rank 0] step:3081/10000 train_time:255051ms step_avg:82.78ms +[2025-08-22 21:36:30] [Rank 0] step:3101/10000 train_time:256754ms step_avg:82.80ms +[2025-08-22 21:36:30] [Rank 0] step:3101/10000 train_time:256754ms step_avg:82.80ms +[2025-08-22 21:36:31] [Rank 0] step:3121/10000 train_time:258459ms step_avg:82.81ms +[2025-08-22 21:36:31] [Rank 0] step:3121/10000 train_time:258459ms step_avg:82.81ms +[2025-08-22 21:36:33] [Rank 0] step:3141/10000 train_time:260163ms step_avg:82.83ms +[2025-08-22 21:36:33] [Rank 0] step:3141/10000 train_time:260163ms step_avg:82.83ms +[2025-08-22 21:36:35] [Rank 0] step:3161/10000 train_time:261869ms step_avg:82.84ms +[2025-08-22 21:36:35] [Rank 0] step:3161/10000 train_time:261869ms step_avg:82.84ms +[2025-08-22 21:36:36] [Rank 0] step:3181/10000 train_time:263576ms step_avg:82.86ms +[2025-08-22 21:36:36] [Rank 0] step:3181/10000 train_time:263576ms step_avg:82.86ms +[2025-08-22 21:36:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:36:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:36:52] [Rank 0] PRINT: step:3200/10000 val_loss:4.4036 svd_entropy: attn_qk:H=0.6558,top10E=0.39,eRank=87.6,q75/q25=88.04 attn_vo:H=0.5083,top10E=0.62,eRank=37.3,q75/q25=97.58 mlp_w1:H=0.7061,top10E=0.34,eRank=131.9,q75/q25=13.33 mlp_w2:H=0.8264,top10E=0.20,eRank=259.4,q75/q25=15.92 vo_prod:H=0.4126,top10E=0.78,eRank=20.1,q75/q25=5898.54 train_time:265578ms step_avg:82.99ms +[2025-08-22 21:36:52] [Rank 0] PRINT: step:3200/10000 val_loss:4.4036 svd_entropy: attn_qk:H=0.6558,top10E=0.39,eRank=87.6,q75/q25=88.04 attn_vo:H=0.5083,top10E=0.62,eRank=37.3,q75/q25=97.58 mlp_w1:H=0.7061,top10E=0.34,eRank=131.9,q75/q25=13.33 mlp_w2:H=0.8264,top10E=0.20,eRank=259.4,q75/q25=15.92 vo_prod:H=0.4126,top10E=0.78,eRank=20.1,q75/q25=5898.54 train_time:265578ms step_avg:82.99ms +[2025-08-22 21:36:52] [Rank 0] step:3201/10000 train_time:265589ms step_avg:82.97ms +[2025-08-22 21:36:52] [Rank 0] step:3201/10000 train_time:265589ms step_avg:82.97ms +[2025-08-22 21:36:53] [Rank 0] step:3221/10000 train_time:267003ms step_avg:82.89ms +[2025-08-22 21:36:53] [Rank 0] step:3221/10000 train_time:267003ms step_avg:82.89ms +[2025-08-22 21:36:55] [Rank 0] step:3241/10000 train_time:268699ms step_avg:82.91ms +[2025-08-22 21:36:55] [Rank 0] step:3241/10000 train_time:268699ms step_avg:82.91ms +[2025-08-22 21:36:57] [Rank 0] step:3261/10000 train_time:270396ms step_avg:82.92ms +[2025-08-22 21:36:57] [Rank 0] step:3261/10000 train_time:270396ms step_avg:82.92ms +[2025-08-22 21:36:59] [Rank 0] step:3281/10000 train_time:272097ms step_avg:82.93ms +[2025-08-22 21:36:59] [Rank 0] step:3281/10000 train_time:272097ms step_avg:82.93ms +[2025-08-22 21:37:00] [Rank 0] step:3301/10000 train_time:273796ms step_avg:82.94ms +[2025-08-22 21:37:00] [Rank 0] step:3301/10000 train_time:273796ms step_avg:82.94ms +[2025-08-22 21:37:02] [Rank 0] step:3321/10000 train_time:275498ms step_avg:82.96ms +[2025-08-22 21:37:02] [Rank 0] step:3321/10000 train_time:275498ms step_avg:82.96ms +[2025-08-22 21:37:04] [Rank 0] step:3341/10000 train_time:277199ms step_avg:82.97ms +[2025-08-22 21:37:04] [Rank 0] step:3341/10000 train_time:277199ms step_avg:82.97ms +[2025-08-22 21:37:05] [Rank 0] step:3361/10000 train_time:278900ms step_avg:82.98ms +[2025-08-22 21:37:05] [Rank 0] step:3361/10000 train_time:278900ms step_avg:82.98ms +[2025-08-22 21:37:07] [Rank 0] step:3381/10000 train_time:280603ms step_avg:82.99ms +[2025-08-22 21:37:07] [Rank 0] step:3381/10000 train_time:280603ms step_avg:82.99ms +[2025-08-22 21:37:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:37:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:37:22] [Rank 0] PRINT: step:3400/10000 val_loss:4.3600 svd_entropy: attn_qk:H=0.6594,top10E=0.38,eRank=89.8,q75/q25=88.83 attn_vo:H=0.5141,top10E=0.61,eRank=39.0,q75/q25=100.94 mlp_w1:H=0.7097,top10E=0.34,eRank=135.5,q75/q25=13.60 mlp_w2:H=0.8300,top10E=0.19,eRank=265.2,q75/q25=15.73 vo_prod:H=0.4193,top10E=0.77,eRank=21.1,q75/q25=6526.07 train_time:282598ms step_avg:83.12ms +[2025-08-22 21:37:22] [Rank 0] PRINT: step:3400/10000 val_loss:4.3600 svd_entropy: attn_qk:H=0.6594,top10E=0.38,eRank=89.8,q75/q25=88.83 attn_vo:H=0.5141,top10E=0.61,eRank=39.0,q75/q25=100.94 mlp_w1:H=0.7097,top10E=0.34,eRank=135.5,q75/q25=13.60 mlp_w2:H=0.8300,top10E=0.19,eRank=265.2,q75/q25=15.73 vo_prod:H=0.4193,top10E=0.77,eRank=21.1,q75/q25=6526.07 train_time:282598ms step_avg:83.12ms +[2025-08-22 21:37:22] [Rank 0] step:3401/10000 train_time:282609ms step_avg:83.10ms +[2025-08-22 21:37:22] [Rank 0] step:3401/10000 train_time:282609ms step_avg:83.10ms +[2025-08-22 21:37:24] [Rank 0] step:3421/10000 train_time:284030ms step_avg:83.03ms +[2025-08-22 21:37:24] [Rank 0] step:3421/10000 train_time:284030ms step_avg:83.03ms +[2025-08-22 21:37:26] [Rank 0] step:3441/10000 train_time:285724ms step_avg:83.04ms +[2025-08-22 21:37:26] [Rank 0] step:3441/10000 train_time:285724ms step_avg:83.04ms +[2025-08-22 21:37:28] [Rank 0] step:3461/10000 train_time:287481ms step_avg:83.06ms +[2025-08-22 21:37:28] [Rank 0] step:3461/10000 train_time:287481ms step_avg:83.06ms +[2025-08-22 21:37:29] [Rank 0] step:3481/10000 train_time:289177ms step_avg:83.07ms +[2025-08-22 21:37:29] [Rank 0] step:3481/10000 train_time:289177ms step_avg:83.07ms +[2025-08-22 21:37:31] [Rank 0] step:3501/10000 train_time:290873ms step_avg:83.08ms +[2025-08-22 21:37:31] [Rank 0] step:3501/10000 train_time:290873ms step_avg:83.08ms +[2025-08-22 21:37:33] [Rank 0] step:3521/10000 train_time:292572ms step_avg:83.09ms +[2025-08-22 21:37:33] [Rank 0] step:3521/10000 train_time:292572ms step_avg:83.09ms +[2025-08-22 21:37:34] [Rank 0] step:3541/10000 train_time:294269ms step_avg:83.10ms +[2025-08-22 21:37:34] [Rank 0] step:3541/10000 train_time:294269ms step_avg:83.10ms +[2025-08-22 21:37:36] [Rank 0] step:3561/10000 train_time:295968ms step_avg:83.11ms +[2025-08-22 21:37:36] [Rank 0] step:3561/10000 train_time:295968ms step_avg:83.11ms +[2025-08-22 21:37:38] [Rank 0] step:3581/10000 train_time:297669ms step_avg:83.12ms +[2025-08-22 21:37:38] [Rank 0] step:3581/10000 train_time:297669ms step_avg:83.12ms +[2025-08-22 21:37:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:37:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:37:53] [Rank 0] PRINT: step:3600/10000 val_loss:4.3414 svd_entropy: attn_qk:H=0.6630,top10E=0.37,eRank=91.9,q75/q25=89.75 attn_vo:H=0.5192,top10E=0.60,eRank=40.6,q75/q25=102.51 mlp_w1:H=0.7130,top10E=0.33,eRank=138.9,q75/q25=13.85 mlp_w2:H=0.8332,top10E=0.19,eRank=270.6,q75/q25=15.48 vo_prod:H=0.4243,top10E=0.76,eRank=22.0,q75/q25=6979.78 train_time:299663ms step_avg:83.24ms +[2025-08-22 21:37:53] [Rank 0] PRINT: step:3600/10000 val_loss:4.3414 svd_entropy: attn_qk:H=0.6630,top10E=0.37,eRank=91.9,q75/q25=89.75 attn_vo:H=0.5192,top10E=0.60,eRank=40.6,q75/q25=102.51 mlp_w1:H=0.7130,top10E=0.33,eRank=138.9,q75/q25=13.85 mlp_w2:H=0.8332,top10E=0.19,eRank=270.6,q75/q25=15.48 vo_prod:H=0.4243,top10E=0.76,eRank=22.0,q75/q25=6979.78 train_time:299663ms step_avg:83.24ms +[2025-08-22 21:37:53] [Rank 0] step:3601/10000 train_time:299674ms step_avg:83.22ms +[2025-08-22 21:37:53] [Rank 0] step:3601/10000 train_time:299674ms step_avg:83.22ms +[2025-08-22 21:37:55] [Rank 0] step:3621/10000 train_time:301102ms step_avg:83.15ms +[2025-08-22 21:37:55] [Rank 0] step:3621/10000 train_time:301102ms step_avg:83.15ms +[2025-08-22 21:37:56] [Rank 0] step:3641/10000 train_time:302797ms step_avg:83.16ms +[2025-08-22 21:37:56] [Rank 0] step:3641/10000 train_time:302797ms step_avg:83.16ms +[2025-08-22 21:37:58] [Rank 0] step:3661/10000 train_time:304493ms step_avg:83.17ms +[2025-08-22 21:37:58] [Rank 0] step:3661/10000 train_time:304493ms step_avg:83.17ms +[2025-08-22 21:38:00] [Rank 0] step:3681/10000 train_time:306191ms step_avg:83.18ms +[2025-08-22 21:38:00] [Rank 0] step:3681/10000 train_time:306191ms step_avg:83.18ms +[2025-08-22 21:38:01] [Rank 0] step:3701/10000 train_time:307891ms step_avg:83.19ms +[2025-08-22 21:38:01] [Rank 0] step:3701/10000 train_time:307891ms step_avg:83.19ms +[2025-08-22 21:38:03] [Rank 0] step:3721/10000 train_time:309613ms step_avg:83.21ms +[2025-08-22 21:38:03] [Rank 0] step:3721/10000 train_time:309613ms step_avg:83.21ms +[2025-08-22 21:38:05] [Rank 0] step:3741/10000 train_time:311349ms step_avg:83.23ms +[2025-08-22 21:38:05] [Rank 0] step:3741/10000 train_time:311349ms step_avg:83.23ms +[2025-08-22 21:38:07] [Rank 0] step:3761/10000 train_time:313084ms step_avg:83.24ms +[2025-08-22 21:38:07] [Rank 0] step:3761/10000 train_time:313084ms step_avg:83.24ms +[2025-08-22 21:38:08] [Rank 0] step:3781/10000 train_time:314822ms step_avg:83.26ms +[2025-08-22 21:38:08] [Rank 0] step:3781/10000 train_time:314822ms step_avg:83.26ms +[2025-08-22 21:38:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:38:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:38:24] [Rank 0] PRINT: step:3800/10000 val_loss:4.2792 svd_entropy: attn_qk:H=0.6662,top10E=0.37,eRank=93.9,q75/q25=90.14 attn_vo:H=0.5241,top10E=0.59,eRank=42.1,q75/q25=103.93 mlp_w1:H=0.7161,top10E=0.33,eRank=142.1,q75/q25=14.05 mlp_w2:H=0.8360,top10E=0.19,eRank=275.5,q75/q25=15.30 vo_prod:H=0.4298,top10E=0.75,eRank=22.9,q75/q25=7449.49 train_time:316861ms step_avg:83.38ms +[2025-08-22 21:38:24] [Rank 0] PRINT: step:3800/10000 val_loss:4.2792 svd_entropy: attn_qk:H=0.6662,top10E=0.37,eRank=93.9,q75/q25=90.14 attn_vo:H=0.5241,top10E=0.59,eRank=42.1,q75/q25=103.93 mlp_w1:H=0.7161,top10E=0.33,eRank=142.1,q75/q25=14.05 mlp_w2:H=0.8360,top10E=0.19,eRank=275.5,q75/q25=15.30 vo_prod:H=0.4298,top10E=0.75,eRank=22.9,q75/q25=7449.49 train_time:316861ms step_avg:83.38ms +[2025-08-22 21:38:24] [Rank 0] step:3801/10000 train_time:316871ms step_avg:83.37ms +[2025-08-22 21:38:24] [Rank 0] step:3801/10000 train_time:316871ms step_avg:83.37ms +[2025-08-22 21:38:26] [Rank 0] step:3821/10000 train_time:318319ms step_avg:83.31ms +[2025-08-22 21:38:26] [Rank 0] step:3821/10000 train_time:318319ms step_avg:83.31ms +[2025-08-22 21:38:27] [Rank 0] step:3841/10000 train_time:320056ms step_avg:83.33ms +[2025-08-22 21:38:27] [Rank 0] step:3841/10000 train_time:320056ms step_avg:83.33ms +[2025-08-22 21:38:29] [Rank 0] step:3861/10000 train_time:321794ms step_avg:83.34ms +[2025-08-22 21:38:29] [Rank 0] step:3861/10000 train_time:321794ms step_avg:83.34ms +[2025-08-22 21:38:31] [Rank 0] step:3881/10000 train_time:323550ms step_avg:83.37ms +[2025-08-22 21:38:31] [Rank 0] step:3881/10000 train_time:323550ms step_avg:83.37ms +[2025-08-22 21:38:32] [Rank 0] step:3901/10000 train_time:325286ms step_avg:83.39ms +[2025-08-22 21:38:32] [Rank 0] step:3901/10000 train_time:325286ms step_avg:83.39ms +[2025-08-22 21:38:34] [Rank 0] step:3921/10000 train_time:327024ms step_avg:83.40ms +[2025-08-22 21:38:34] [Rank 0] step:3921/10000 train_time:327024ms step_avg:83.40ms +[2025-08-22 21:38:36] [Rank 0] step:3941/10000 train_time:328765ms step_avg:83.42ms +[2025-08-22 21:38:36] [Rank 0] step:3941/10000 train_time:328765ms step_avg:83.42ms +[2025-08-22 21:38:38] [Rank 0] step:3961/10000 train_time:330506ms step_avg:83.44ms +[2025-08-22 21:38:38] [Rank 0] step:3961/10000 train_time:330506ms step_avg:83.44ms +[2025-08-22 21:38:39] [Rank 0] step:3981/10000 train_time:332248ms step_avg:83.46ms +[2025-08-22 21:38:39] [Rank 0] step:3981/10000 train_time:332248ms step_avg:83.46ms +[2025-08-22 21:38:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:38:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:38:55] [Rank 0] PRINT: step:4000/10000 val_loss:4.2462 svd_entropy: attn_qk:H=0.6694,top10E=0.36,eRank=95.9,q75/q25=90.58 attn_vo:H=0.5287,top10E=0.58,eRank=43.6,q75/q25=105.27 mlp_w1:H=0.7191,top10E=0.32,eRank=145.2,q75/q25=14.25 mlp_w2:H=0.8388,top10E=0.18,eRank=280.3,q75/q25=14.98 vo_prod:H=0.4348,top10E=0.75,eRank=23.8,q75/q25=7908.83 train_time:334291ms step_avg:83.57ms +[2025-08-22 21:38:55] [Rank 0] PRINT: step:4000/10000 val_loss:4.2462 svd_entropy: attn_qk:H=0.6694,top10E=0.36,eRank=95.9,q75/q25=90.58 attn_vo:H=0.5287,top10E=0.58,eRank=43.6,q75/q25=105.27 mlp_w1:H=0.7191,top10E=0.32,eRank=145.2,q75/q25=14.25 mlp_w2:H=0.8388,top10E=0.18,eRank=280.3,q75/q25=14.98 vo_prod:H=0.4348,top10E=0.75,eRank=23.8,q75/q25=7908.83 train_time:334291ms step_avg:83.57ms +[2025-08-22 21:38:55] [Rank 0] step:4001/10000 train_time:334302ms step_avg:83.55ms +[2025-08-22 21:38:55] [Rank 0] step:4001/10000 train_time:334302ms step_avg:83.55ms +[2025-08-22 21:38:57] [Rank 0] step:4021/10000 train_time:335757ms step_avg:83.50ms +[2025-08-22 21:38:57] [Rank 0] step:4021/10000 train_time:335757ms step_avg:83.50ms +[2025-08-22 21:38:58] [Rank 0] step:4041/10000 train_time:337491ms step_avg:83.52ms +[2025-08-22 21:38:58] [Rank 0] step:4041/10000 train_time:337491ms step_avg:83.52ms +[2025-08-22 21:39:00] [Rank 0] step:4061/10000 train_time:339226ms step_avg:83.53ms +[2025-08-22 21:39:00] [Rank 0] step:4061/10000 train_time:339226ms step_avg:83.53ms +[2025-08-22 21:39:02] [Rank 0] step:4081/10000 train_time:341328ms step_avg:83.64ms +[2025-08-22 21:39:02] [Rank 0] step:4081/10000 train_time:341328ms step_avg:83.64ms +[2025-08-22 21:39:04] [Rank 0] step:4101/10000 train_time:343063ms step_avg:83.65ms +[2025-08-22 21:39:04] [Rank 0] step:4101/10000 train_time:343063ms step_avg:83.65ms +[2025-08-22 21:39:06] [Rank 0] step:4121/10000 train_time:344801ms step_avg:83.67ms +[2025-08-22 21:39:06] [Rank 0] step:4121/10000 train_time:344801ms step_avg:83.67ms +[2025-08-22 21:39:07] [Rank 0] step:4141/10000 train_time:346536ms step_avg:83.68ms +[2025-08-22 21:39:07] [Rank 0] step:4141/10000 train_time:346536ms step_avg:83.68ms +[2025-08-22 21:39:09] [Rank 0] step:4161/10000 train_time:348270ms step_avg:83.70ms +[2025-08-22 21:39:09] [Rank 0] step:4161/10000 train_time:348270ms step_avg:83.70ms +[2025-08-22 21:39:11] [Rank 0] step:4181/10000 train_time:350008ms step_avg:83.71ms +[2025-08-22 21:39:11] [Rank 0] step:4181/10000 train_time:350008ms step_avg:83.71ms +[2025-08-22 21:39:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:39:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:39:26] [Rank 0] PRINT: step:4200/10000 val_loss:4.2298 svd_entropy: attn_qk:H=0.6722,top10E=0.36,eRank=97.6,q75/q25=90.78 attn_vo:H=0.5331,top10E=0.57,eRank=45.1,q75/q25=106.20 mlp_w1:H=0.7219,top10E=0.32,eRank=148.2,q75/q25=14.40 mlp_w2:H=0.8414,top10E=0.18,eRank=284.8,q75/q25=14.79 vo_prod:H=0.4399,top10E=0.74,eRank=24.6,q75/q25=8186.53 train_time:352045ms step_avg:83.82ms +[2025-08-22 21:39:26] [Rank 0] PRINT: step:4200/10000 val_loss:4.2298 svd_entropy: attn_qk:H=0.6722,top10E=0.36,eRank=97.6,q75/q25=90.78 attn_vo:H=0.5331,top10E=0.57,eRank=45.1,q75/q25=106.20 mlp_w1:H=0.7219,top10E=0.32,eRank=148.2,q75/q25=14.40 mlp_w2:H=0.8414,top10E=0.18,eRank=284.8,q75/q25=14.79 vo_prod:H=0.4399,top10E=0.74,eRank=24.6,q75/q25=8186.53 train_time:352045ms step_avg:83.82ms +[2025-08-22 21:39:26] [Rank 0] step:4201/10000 train_time:352056ms step_avg:83.80ms +[2025-08-22 21:39:26] [Rank 0] step:4201/10000 train_time:352056ms step_avg:83.80ms +[2025-08-22 21:39:28] [Rank 0] step:4221/10000 train_time:353498ms step_avg:83.75ms +[2025-08-22 21:39:28] [Rank 0] step:4221/10000 train_time:353498ms step_avg:83.75ms +[2025-08-22 21:39:30] [Rank 0] step:4241/10000 train_time:355232ms step_avg:83.76ms +[2025-08-22 21:39:30] [Rank 0] step:4241/10000 train_time:355232ms step_avg:83.76ms +[2025-08-22 21:39:31] [Rank 0] step:4261/10000 train_time:356967ms step_avg:83.78ms +[2025-08-22 21:39:31] [Rank 0] step:4261/10000 train_time:356967ms step_avg:83.78ms +[2025-08-22 21:39:33] [Rank 0] step:4281/10000 train_time:358701ms step_avg:83.79ms +[2025-08-22 21:39:33] [Rank 0] step:4281/10000 train_time:358701ms step_avg:83.79ms +[2025-08-22 21:39:35] [Rank 0] step:4301/10000 train_time:360435ms step_avg:83.80ms +[2025-08-22 21:39:35] [Rank 0] step:4301/10000 train_time:360435ms step_avg:83.80ms +[2025-08-22 21:39:37] [Rank 0] step:4321/10000 train_time:362172ms step_avg:83.82ms +[2025-08-22 21:39:37] [Rank 0] step:4321/10000 train_time:362172ms step_avg:83.82ms +[2025-08-22 21:39:38] [Rank 0] step:4341/10000 train_time:363906ms step_avg:83.83ms +[2025-08-22 21:39:38] [Rank 0] step:4341/10000 train_time:363906ms step_avg:83.83ms +[2025-08-22 21:39:40] [Rank 0] step:4361/10000 train_time:365642ms step_avg:83.84ms +[2025-08-22 21:39:40] [Rank 0] step:4361/10000 train_time:365642ms step_avg:83.84ms +[2025-08-22 21:39:42] [Rank 0] step:4381/10000 train_time:367381ms step_avg:83.86ms +[2025-08-22 21:39:42] [Rank 0] step:4381/10000 train_time:367381ms step_avg:83.86ms +[2025-08-22 21:39:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:39:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:39:57] [Rank 0] PRINT: step:4400/10000 val_loss:4.2190 svd_entropy: attn_qk:H=0.6748,top10E=0.35,eRank=99.4,q75/q25=91.12 attn_vo:H=0.5374,top10E=0.56,eRank=46.6,q75/q25=107.44 mlp_w1:H=0.7245,top10E=0.31,eRank=151.0,q75/q25=14.53 mlp_w2:H=0.8438,top10E=0.18,eRank=289.0,q75/q25=14.53 vo_prod:H=0.4443,top10E=0.73,eRank=25.5,q75/q25=8666.54 train_time:369420ms step_avg:83.96ms +[2025-08-22 21:39:57] [Rank 0] PRINT: step:4400/10000 val_loss:4.2190 svd_entropy: attn_qk:H=0.6748,top10E=0.35,eRank=99.4,q75/q25=91.12 attn_vo:H=0.5374,top10E=0.56,eRank=46.6,q75/q25=107.44 mlp_w1:H=0.7245,top10E=0.31,eRank=151.0,q75/q25=14.53 mlp_w2:H=0.8438,top10E=0.18,eRank=289.0,q75/q25=14.53 vo_prod:H=0.4443,top10E=0.73,eRank=25.5,q75/q25=8666.54 train_time:369420ms step_avg:83.96ms +[2025-08-22 21:39:57] [Rank 0] step:4401/10000 train_time:369431ms step_avg:83.94ms +[2025-08-22 21:39:57] [Rank 0] step:4401/10000 train_time:369431ms step_avg:83.94ms +[2025-08-22 21:39:59] [Rank 0] step:4421/10000 train_time:370890ms step_avg:83.89ms +[2025-08-22 21:39:59] [Rank 0] step:4421/10000 train_time:370890ms step_avg:83.89ms +[2025-08-22 21:40:01] [Rank 0] step:4441/10000 train_time:372625ms step_avg:83.91ms +[2025-08-22 21:40:01] [Rank 0] step:4441/10000 train_time:372625ms step_avg:83.91ms +[2025-08-22 21:40:02] [Rank 0] step:4461/10000 train_time:374366ms step_avg:83.92ms +[2025-08-22 21:40:02] [Rank 0] step:4461/10000 train_time:374366ms step_avg:83.92ms +[2025-08-22 21:40:04] [Rank 0] step:4481/10000 train_time:376110ms step_avg:83.93ms +[2025-08-22 21:40:04] [Rank 0] step:4481/10000 train_time:376110ms step_avg:83.93ms +[2025-08-22 21:40:06] [Rank 0] step:4501/10000 train_time:377855ms step_avg:83.95ms +[2025-08-22 21:40:06] [Rank 0] step:4501/10000 train_time:377855ms step_avg:83.95ms +[2025-08-22 21:40:08] [Rank 0] step:4521/10000 train_time:379602ms step_avg:83.96ms +[2025-08-22 21:40:08] [Rank 0] step:4521/10000 train_time:379602ms step_avg:83.96ms +[2025-08-22 21:40:09] [Rank 0] step:4541/10000 train_time:381346ms step_avg:83.98ms +[2025-08-22 21:40:09] [Rank 0] step:4541/10000 train_time:381346ms step_avg:83.98ms +[2025-08-22 21:40:11] [Rank 0] step:4561/10000 train_time:383094ms step_avg:83.99ms +[2025-08-22 21:40:11] [Rank 0] step:4561/10000 train_time:383094ms step_avg:83.99ms +[2025-08-22 21:40:13] [Rank 0] step:4581/10000 train_time:384844ms step_avg:84.01ms +[2025-08-22 21:40:13] [Rank 0] step:4581/10000 train_time:384844ms step_avg:84.01ms +[2025-08-22 21:40:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:40:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:40:28] [Rank 0] PRINT: step:4600/10000 val_loss:4.1755 svd_entropy: attn_qk:H=0.6773,top10E=0.35,eRank=101.1,q75/q25=91.76 attn_vo:H=0.5416,top10E=0.55,eRank=48.1,q75/q25=108.62 mlp_w1:H=0.7270,top10E=0.31,eRank=153.8,q75/q25=14.64 mlp_w2:H=0.8461,top10E=0.18,eRank=293.1,q75/q25=14.32 vo_prod:H=0.4489,top10E=0.72,eRank=26.3,q75/q25=9102.30 train_time:386898ms step_avg:84.11ms +[2025-08-22 21:40:28] [Rank 0] PRINT: step:4600/10000 val_loss:4.1755 svd_entropy: attn_qk:H=0.6773,top10E=0.35,eRank=101.1,q75/q25=91.76 attn_vo:H=0.5416,top10E=0.55,eRank=48.1,q75/q25=108.62 mlp_w1:H=0.7270,top10E=0.31,eRank=153.8,q75/q25=14.64 mlp_w2:H=0.8461,top10E=0.18,eRank=293.1,q75/q25=14.32 vo_prod:H=0.4489,top10E=0.72,eRank=26.3,q75/q25=9102.30 train_time:386898ms step_avg:84.11ms +[2025-08-22 21:40:28] [Rank 0] step:4601/10000 train_time:386909ms step_avg:84.09ms +[2025-08-22 21:40:28] [Rank 0] step:4601/10000 train_time:386909ms step_avg:84.09ms +[2025-08-22 21:40:30] [Rank 0] step:4621/10000 train_time:388374ms step_avg:84.05ms +[2025-08-22 21:40:30] [Rank 0] step:4621/10000 train_time:388374ms step_avg:84.05ms +[2025-08-22 21:40:32] [Rank 0] step:4641/10000 train_time:390116ms step_avg:84.06ms +[2025-08-22 21:40:32] [Rank 0] step:4641/10000 train_time:390116ms step_avg:84.06ms +[2025-08-22 21:40:33] [Rank 0] step:4661/10000 train_time:391855ms step_avg:84.07ms +[2025-08-22 21:40:33] [Rank 0] step:4661/10000 train_time:391855ms step_avg:84.07ms +[2025-08-22 21:40:35] [Rank 0] step:4681/10000 train_time:393596ms step_avg:84.08ms +[2025-08-22 21:40:35] [Rank 0] step:4681/10000 train_time:393596ms step_avg:84.08ms +[2025-08-22 21:40:37] [Rank 0] step:4701/10000 train_time:395340ms step_avg:84.10ms +[2025-08-22 21:40:37] [Rank 0] step:4701/10000 train_time:395340ms step_avg:84.10ms +[2025-08-22 21:40:39] [Rank 0] step:4721/10000 train_time:397079ms step_avg:84.11ms +[2025-08-22 21:40:39] [Rank 0] step:4721/10000 train_time:397079ms step_avg:84.11ms +[2025-08-22 21:40:40] [Rank 0] step:4741/10000 train_time:398822ms step_avg:84.12ms +[2025-08-22 21:40:40] [Rank 0] step:4741/10000 train_time:398822ms step_avg:84.12ms +[2025-08-22 21:40:42] [Rank 0] step:4761/10000 train_time:400566ms step_avg:84.13ms +[2025-08-22 21:40:42] [Rank 0] step:4761/10000 train_time:400566ms step_avg:84.13ms +[2025-08-22 21:40:44] [Rank 0] step:4781/10000 train_time:402309ms step_avg:84.15ms +[2025-08-22 21:40:44] [Rank 0] step:4781/10000 train_time:402309ms step_avg:84.15ms +[2025-08-22 21:40:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:40:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:40:59] [Rank 0] PRINT: step:4800/10000 val_loss:4.1662 svd_entropy: attn_qk:H=0.6797,top10E=0.35,eRank=102.7,q75/q25=92.26 attn_vo:H=0.5454,top10E=0.55,eRank=49.5,q75/q25=109.62 mlp_w1:H=0.7293,top10E=0.31,eRank=156.5,q75/q25=14.77 mlp_w2:H=0.8482,top10E=0.17,eRank=296.9,q75/q25=14.12 vo_prod:H=0.4530,top10E=0.71,eRank=27.1,q75/q25=9428.32 train_time:404354ms step_avg:84.24ms +[2025-08-22 21:40:59] [Rank 0] PRINT: step:4800/10000 val_loss:4.1662 svd_entropy: attn_qk:H=0.6797,top10E=0.35,eRank=102.7,q75/q25=92.26 attn_vo:H=0.5454,top10E=0.55,eRank=49.5,q75/q25=109.62 mlp_w1:H=0.7293,top10E=0.31,eRank=156.5,q75/q25=14.77 mlp_w2:H=0.8482,top10E=0.17,eRank=296.9,q75/q25=14.12 vo_prod:H=0.4530,top10E=0.71,eRank=27.1,q75/q25=9428.32 train_time:404354ms step_avg:84.24ms +[2025-08-22 21:40:59] [Rank 0] step:4801/10000 train_time:404363ms step_avg:84.22ms +[2025-08-22 21:40:59] [Rank 0] step:4801/10000 train_time:404363ms step_avg:84.22ms +[2025-08-22 21:41:01] [Rank 0] step:4821/10000 train_time:405834ms step_avg:84.18ms +[2025-08-22 21:41:01] [Rank 0] step:4821/10000 train_time:405834ms step_avg:84.18ms +[2025-08-22 21:41:03] [Rank 0] step:4841/10000 train_time:407577ms step_avg:84.19ms +[2025-08-22 21:41:03] [Rank 0] step:4841/10000 train_time:407577ms step_avg:84.19ms +[2025-08-22 21:41:05] [Rank 0] step:4861/10000 train_time:409319ms step_avg:84.20ms +[2025-08-22 21:41:05] [Rank 0] step:4861/10000 train_time:409319ms step_avg:84.20ms +[2025-08-22 21:41:06] [Rank 0] step:4881/10000 train_time:411058ms step_avg:84.22ms +[2025-08-22 21:41:06] [Rank 0] step:4881/10000 train_time:411058ms step_avg:84.22ms +[2025-08-22 21:41:08] [Rank 0] step:4901/10000 train_time:412798ms step_avg:84.23ms +[2025-08-22 21:41:08] [Rank 0] step:4901/10000 train_time:412798ms step_avg:84.23ms +[2025-08-22 21:41:10] [Rank 0] step:4921/10000 train_time:414542ms step_avg:84.24ms +[2025-08-22 21:41:10] [Rank 0] step:4921/10000 train_time:414542ms step_avg:84.24ms +[2025-08-22 21:41:11] [Rank 0] step:4941/10000 train_time:416286ms step_avg:84.25ms +[2025-08-22 21:41:11] [Rank 0] step:4941/10000 train_time:416286ms step_avg:84.25ms +[2025-08-22 21:41:13] [Rank 0] step:4961/10000 train_time:418029ms step_avg:84.26ms +[2025-08-22 21:41:13] [Rank 0] step:4961/10000 train_time:418029ms step_avg:84.26ms +[2025-08-22 21:41:15] [Rank 0] step:4981/10000 train_time:419771ms step_avg:84.27ms +[2025-08-22 21:41:15] [Rank 0] step:4981/10000 train_time:419771ms step_avg:84.27ms +[2025-08-22 21:41:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:41:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:41:30] [Rank 0] PRINT: step:5000/10000 val_loss:4.1424 svd_entropy: attn_qk:H=0.6819,top10E=0.34,eRank=104.2,q75/q25=93.06 attn_vo:H=0.5491,top10E=0.54,eRank=50.8,q75/q25=110.66 mlp_w1:H=0.7316,top10E=0.30,eRank=159.1,q75/q25=14.87 mlp_w2:H=0.8502,top10E=0.17,eRank=300.4,q75/q25=13.96 vo_prod:H=0.4569,top10E=0.71,eRank=27.8,q75/q25=9699.85 train_time:421819ms step_avg:84.36ms +[2025-08-22 21:41:30] [Rank 0] PRINT: step:5000/10000 val_loss:4.1424 svd_entropy: attn_qk:H=0.6819,top10E=0.34,eRank=104.2,q75/q25=93.06 attn_vo:H=0.5491,top10E=0.54,eRank=50.8,q75/q25=110.66 mlp_w1:H=0.7316,top10E=0.30,eRank=159.1,q75/q25=14.87 mlp_w2:H=0.8502,top10E=0.17,eRank=300.4,q75/q25=13.96 vo_prod:H=0.4569,top10E=0.71,eRank=27.8,q75/q25=9699.85 train_time:421819ms step_avg:84.36ms +[2025-08-22 21:41:30] [Rank 0] step:5001/10000 train_time:421829ms step_avg:84.35ms +[2025-08-22 21:41:30] [Rank 0] step:5001/10000 train_time:421829ms step_avg:84.35ms +[2025-08-22 21:41:32] [Rank 0] step:5021/10000 train_time:423289ms step_avg:84.30ms +[2025-08-22 21:41:32] [Rank 0] step:5021/10000 train_time:423289ms step_avg:84.30ms +[2025-08-22 21:41:34] [Rank 0] step:5041/10000 train_time:425029ms step_avg:84.31ms +[2025-08-22 21:41:34] [Rank 0] step:5041/10000 train_time:425029ms step_avg:84.31ms +[2025-08-22 21:41:36] [Rank 0] step:5061/10000 train_time:426766ms step_avg:84.32ms +[2025-08-22 21:41:36] [Rank 0] step:5061/10000 train_time:426766ms step_avg:84.32ms +[2025-08-22 21:41:37] [Rank 0] step:5081/10000 train_time:428506ms step_avg:84.33ms +[2025-08-22 21:41:37] [Rank 0] step:5081/10000 train_time:428506ms step_avg:84.33ms +[2025-08-22 21:41:39] [Rank 0] step:5101/10000 train_time:430336ms step_avg:84.36ms +[2025-08-22 21:41:39] [Rank 0] step:5101/10000 train_time:430336ms step_avg:84.36ms +[2025-08-22 21:41:41] [Rank 0] step:5121/10000 train_time:431989ms step_avg:84.36ms +[2025-08-22 21:41:41] [Rank 0] step:5121/10000 train_time:431989ms step_avg:84.36ms +[2025-08-22 21:41:43] [Rank 0] step:5141/10000 train_time:433755ms step_avg:84.37ms +[2025-08-22 21:41:43] [Rank 0] step:5141/10000 train_time:433755ms step_avg:84.37ms +[2025-08-22 21:41:44] [Rank 0] step:5161/10000 train_time:435497ms step_avg:84.38ms +[2025-08-22 21:41:44] [Rank 0] step:5161/10000 train_time:435497ms step_avg:84.38ms +[2025-08-22 21:41:46] [Rank 0] step:5181/10000 train_time:437243ms step_avg:84.39ms +[2025-08-22 21:41:46] [Rank 0] step:5181/10000 train_time:437243ms step_avg:84.39ms +[2025-08-22 21:41:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:41:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:42:01] [Rank 0] PRINT: step:5200/10000 val_loss:4.1265 svd_entropy: attn_qk:H=0.6842,top10E=0.34,eRank=105.7,q75/q25=93.01 attn_vo:H=0.5526,top10E=0.53,eRank=52.1,q75/q25=110.09 mlp_w1:H=0.7336,top10E=0.30,eRank=161.5,q75/q25=14.94 mlp_w2:H=0.8519,top10E=0.17,eRank=303.6,q75/q25=13.78 vo_prod:H=0.4603,top10E=0.70,eRank=28.5,q75/q25=9764.86 train_time:439311ms step_avg:84.48ms +[2025-08-22 21:42:01] [Rank 0] PRINT: step:5200/10000 val_loss:4.1265 svd_entropy: attn_qk:H=0.6842,top10E=0.34,eRank=105.7,q75/q25=93.01 attn_vo:H=0.5526,top10E=0.53,eRank=52.1,q75/q25=110.09 mlp_w1:H=0.7336,top10E=0.30,eRank=161.5,q75/q25=14.94 mlp_w2:H=0.8519,top10E=0.17,eRank=303.6,q75/q25=13.78 vo_prod:H=0.4603,top10E=0.70,eRank=28.5,q75/q25=9764.86 train_time:439311ms step_avg:84.48ms +[2025-08-22 21:42:01] [Rank 0] step:5201/10000 train_time:439321ms step_avg:84.47ms +[2025-08-22 21:42:01] [Rank 0] step:5201/10000 train_time:439321ms step_avg:84.47ms +[2025-08-22 21:42:03] [Rank 0] step:5221/10000 train_time:440807ms step_avg:84.43ms +[2025-08-22 21:42:03] [Rank 0] step:5221/10000 train_time:440807ms step_avg:84.43ms +[2025-08-22 21:42:05] [Rank 0] step:5241/10000 train_time:442578ms step_avg:84.45ms +[2025-08-22 21:42:05] [Rank 0] step:5241/10000 train_time:442578ms step_avg:84.45ms +[2025-08-22 21:42:07] [Rank 0] step:5261/10000 train_time:444350ms step_avg:84.46ms +[2025-08-22 21:42:07] [Rank 0] step:5261/10000 train_time:444350ms step_avg:84.46ms +[2025-08-22 21:42:09] [Rank 0] step:5281/10000 train_time:446127ms step_avg:84.48ms +[2025-08-22 21:42:09] [Rank 0] step:5281/10000 train_time:446127ms step_avg:84.48ms +[2025-08-22 21:42:10] [Rank 0] step:5301/10000 train_time:447912ms step_avg:84.50ms +[2025-08-22 21:42:10] [Rank 0] step:5301/10000 train_time:447912ms step_avg:84.50ms +[2025-08-22 21:42:12] [Rank 0] step:5321/10000 train_time:449687ms step_avg:84.51ms +[2025-08-22 21:42:12] [Rank 0] step:5321/10000 train_time:449687ms step_avg:84.51ms +[2025-08-22 21:42:14] [Rank 0] step:5341/10000 train_time:451463ms step_avg:84.53ms +[2025-08-22 21:42:14] [Rank 0] step:5341/10000 train_time:451463ms step_avg:84.53ms +[2025-08-22 21:42:16] [Rank 0] step:5361/10000 train_time:453242ms step_avg:84.54ms +[2025-08-22 21:42:16] [Rank 0] step:5361/10000 train_time:453242ms step_avg:84.54ms +[2025-08-22 21:42:17] [Rank 0] step:5381/10000 train_time:455020ms step_avg:84.56ms +[2025-08-22 21:42:17] [Rank 0] step:5381/10000 train_time:455020ms step_avg:84.56ms +[2025-08-22 21:42:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:42:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:42:33] [Rank 0] PRINT: step:5400/10000 val_loss:4.1056 svd_entropy: attn_qk:H=0.6862,top10E=0.34,eRank=107.2,q75/q25=92.81 attn_vo:H=0.5558,top10E=0.52,eRank=53.4,q75/q25=110.53 mlp_w1:H=0.7357,top10E=0.30,eRank=163.8,q75/q25=14.98 mlp_w2:H=0.8536,top10E=0.17,eRank=306.8,q75/q25=13.60 vo_prod:H=0.4637,top10E=0.69,eRank=29.2,q75/q25=9996.61 train_time:457105ms step_avg:84.65ms +[2025-08-22 21:42:33] [Rank 0] PRINT: step:5400/10000 val_loss:4.1056 svd_entropy: attn_qk:H=0.6862,top10E=0.34,eRank=107.2,q75/q25=92.81 attn_vo:H=0.5558,top10E=0.52,eRank=53.4,q75/q25=110.53 mlp_w1:H=0.7357,top10E=0.30,eRank=163.8,q75/q25=14.98 mlp_w2:H=0.8536,top10E=0.17,eRank=306.8,q75/q25=13.60 vo_prod:H=0.4637,top10E=0.69,eRank=29.2,q75/q25=9996.61 train_time:457105ms step_avg:84.65ms +[2025-08-22 21:42:33] [Rank 0] step:5401/10000 train_time:457116ms step_avg:84.64ms +[2025-08-22 21:42:33] [Rank 0] step:5401/10000 train_time:457116ms step_avg:84.64ms +[2025-08-22 21:42:35] [Rank 0] step:5421/10000 train_time:458581ms step_avg:84.59ms +[2025-08-22 21:42:35] [Rank 0] step:5421/10000 train_time:458581ms step_avg:84.59ms +[2025-08-22 21:42:36] [Rank 0] step:5441/10000 train_time:460355ms step_avg:84.61ms +[2025-08-22 21:42:36] [Rank 0] step:5441/10000 train_time:460355ms step_avg:84.61ms +[2025-08-22 21:42:38] [Rank 0] step:5461/10000 train_time:462130ms step_avg:84.62ms +[2025-08-22 21:42:38] [Rank 0] step:5461/10000 train_time:462130ms step_avg:84.62ms +[2025-08-22 21:42:40] [Rank 0] step:5481/10000 train_time:463907ms step_avg:84.64ms +[2025-08-22 21:42:40] [Rank 0] step:5481/10000 train_time:463907ms step_avg:84.64ms +[2025-08-22 21:42:42] [Rank 0] step:5501/10000 train_time:465685ms step_avg:84.65ms +[2025-08-22 21:42:42] [Rank 0] step:5501/10000 train_time:465685ms step_avg:84.65ms +[2025-08-22 21:42:44] [Rank 0] step:5521/10000 train_time:467466ms step_avg:84.67ms +[2025-08-22 21:42:44] [Rank 0] step:5521/10000 train_time:467466ms step_avg:84.67ms +[2025-08-22 21:42:45] [Rank 0] step:5541/10000 train_time:469237ms step_avg:84.68ms +[2025-08-22 21:42:45] [Rank 0] step:5541/10000 train_time:469237ms step_avg:84.68ms +[2025-08-22 21:42:47] [Rank 0] step:5561/10000 train_time:471017ms step_avg:84.70ms +[2025-08-22 21:42:47] [Rank 0] step:5561/10000 train_time:471017ms step_avg:84.70ms +[2025-08-22 21:42:49] [Rank 0] step:5581/10000 train_time:472795ms step_avg:84.72ms +[2025-08-22 21:42:49] [Rank 0] step:5581/10000 train_time:472795ms step_avg:84.72ms +[2025-08-22 21:42:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:42:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:43:04] [Rank 0] PRINT: step:5600/10000 val_loss:4.0981 svd_entropy: attn_qk:H=0.6880,top10E=0.34,eRank=108.4,q75/q25=92.88 attn_vo:H=0.5589,top10E=0.52,eRank=54.6,q75/q25=110.63 mlp_w1:H=0.7376,top10E=0.29,eRank=166.1,q75/q25=15.09 mlp_w2:H=0.8552,top10E=0.17,eRank=309.8,q75/q25=13.41 vo_prod:H=0.4666,top10E=0.69,eRank=29.9,q75/q25=10083.42 train_time:474880ms step_avg:84.80ms +[2025-08-22 21:43:04] [Rank 0] PRINT: step:5600/10000 val_loss:4.0981 svd_entropy: attn_qk:H=0.6880,top10E=0.34,eRank=108.4,q75/q25=92.88 attn_vo:H=0.5589,top10E=0.52,eRank=54.6,q75/q25=110.63 mlp_w1:H=0.7376,top10E=0.29,eRank=166.1,q75/q25=15.09 mlp_w2:H=0.8552,top10E=0.17,eRank=309.8,q75/q25=13.41 vo_prod:H=0.4666,top10E=0.69,eRank=29.9,q75/q25=10083.42 train_time:474880ms step_avg:84.80ms +[2025-08-22 21:43:04] [Rank 0] step:5601/10000 train_time:474889ms step_avg:84.79ms +[2025-08-22 21:43:04] [Rank 0] step:5601/10000 train_time:474889ms step_avg:84.79ms +[2025-08-22 21:43:06] [Rank 0] step:5621/10000 train_time:476372ms step_avg:84.75ms +[2025-08-22 21:43:06] [Rank 0] step:5621/10000 train_time:476372ms step_avg:84.75ms +[2025-08-22 21:43:08] [Rank 0] step:5641/10000 train_time:478142ms step_avg:84.76ms +[2025-08-22 21:43:08] [Rank 0] step:5641/10000 train_time:478142ms step_avg:84.76ms +[2025-08-22 21:43:10] [Rank 0] step:5661/10000 train_time:479909ms step_avg:84.77ms +[2025-08-22 21:43:10] [Rank 0] step:5661/10000 train_time:479909ms step_avg:84.77ms +[2025-08-22 21:43:11] [Rank 0] step:5681/10000 train_time:481687ms step_avg:84.79ms +[2025-08-22 21:43:11] [Rank 0] step:5681/10000 train_time:481687ms step_avg:84.79ms +[2025-08-22 21:43:13] [Rank 0] step:5701/10000 train_time:483458ms step_avg:84.80ms +[2025-08-22 21:43:13] [Rank 0] step:5701/10000 train_time:483458ms step_avg:84.80ms +[2025-08-22 21:43:15] [Rank 0] step:5721/10000 train_time:485234ms step_avg:84.82ms +[2025-08-22 21:43:15] [Rank 0] step:5721/10000 train_time:485234ms step_avg:84.82ms +[2025-08-22 21:43:17] [Rank 0] step:5741/10000 train_time:487011ms step_avg:84.83ms +[2025-08-22 21:43:17] [Rank 0] step:5741/10000 train_time:487011ms step_avg:84.83ms +[2025-08-22 21:43:18] [Rank 0] step:5761/10000 train_time:488788ms step_avg:84.84ms +[2025-08-22 21:43:18] [Rank 0] step:5761/10000 train_time:488788ms step_avg:84.84ms +[2025-08-22 21:43:20] [Rank 0] step:5781/10000 train_time:490563ms step_avg:84.86ms +[2025-08-22 21:43:20] [Rank 0] step:5781/10000 train_time:490563ms step_avg:84.86ms +[2025-08-22 21:43:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:43:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:43:36] [Rank 0] PRINT: step:5800/10000 val_loss:4.0896 svd_entropy: attn_qk:H=0.6899,top10E=0.33,eRank=109.8,q75/q25=93.08 attn_vo:H=0.5619,top10E=0.51,eRank=55.8,q75/q25=111.93 mlp_w1:H=0.7395,top10E=0.29,eRank=168.4,q75/q25=15.10 mlp_w2:H=0.8568,top10E=0.16,eRank=312.7,q75/q25=13.29 vo_prod:H=0.4697,top10E=0.68,eRank=30.5,q75/q25=10474.31 train_time:492648ms step_avg:84.94ms +[2025-08-22 21:43:36] [Rank 0] PRINT: step:5800/10000 val_loss:4.0896 svd_entropy: attn_qk:H=0.6899,top10E=0.33,eRank=109.8,q75/q25=93.08 attn_vo:H=0.5619,top10E=0.51,eRank=55.8,q75/q25=111.93 mlp_w1:H=0.7395,top10E=0.29,eRank=168.4,q75/q25=15.10 mlp_w2:H=0.8568,top10E=0.16,eRank=312.7,q75/q25=13.29 vo_prod:H=0.4697,top10E=0.68,eRank=30.5,q75/q25=10474.31 train_time:492648ms step_avg:84.94ms +[2025-08-22 21:43:36] [Rank 0] step:5801/10000 train_time:492658ms step_avg:84.93ms +[2025-08-22 21:43:36] [Rank 0] step:5801/10000 train_time:492658ms step_avg:84.93ms +[2025-08-22 21:43:37] [Rank 0] step:5821/10000 train_time:494149ms step_avg:84.89ms +[2025-08-22 21:43:37] [Rank 0] step:5821/10000 train_time:494149ms step_avg:84.89ms +[2025-08-22 21:43:39] [Rank 0] step:5841/10000 train_time:495923ms step_avg:84.90ms +[2025-08-22 21:43:39] [Rank 0] step:5841/10000 train_time:495923ms step_avg:84.90ms +[2025-08-22 21:43:41] [Rank 0] step:5861/10000 train_time:497703ms step_avg:84.92ms +[2025-08-22 21:43:41] [Rank 0] step:5861/10000 train_time:497703ms step_avg:84.92ms +[2025-08-22 21:43:43] [Rank 0] step:5881/10000 train_time:499480ms step_avg:84.93ms +[2025-08-22 21:43:43] [Rank 0] step:5881/10000 train_time:499480ms step_avg:84.93ms +[2025-08-22 21:43:45] [Rank 0] step:5901/10000 train_time:501256ms step_avg:84.94ms +[2025-08-22 21:43:45] [Rank 0] step:5901/10000 train_time:501256ms step_avg:84.94ms +[2025-08-22 21:43:46] [Rank 0] step:5921/10000 train_time:503038ms step_avg:84.96ms +[2025-08-22 21:43:46] [Rank 0] step:5921/10000 train_time:503038ms step_avg:84.96ms +[2025-08-22 21:43:48] [Rank 0] step:5941/10000 train_time:504820ms step_avg:84.97ms +[2025-08-22 21:43:48] [Rank 0] step:5941/10000 train_time:504820ms step_avg:84.97ms +[2025-08-22 21:43:50] [Rank 0] step:5961/10000 train_time:506605ms step_avg:84.99ms +[2025-08-22 21:43:50] [Rank 0] step:5961/10000 train_time:506605ms step_avg:84.99ms +[2025-08-22 21:43:52] [Rank 0] step:5981/10000 train_time:508385ms step_avg:85.00ms +[2025-08-22 21:43:52] [Rank 0] step:5981/10000 train_time:508385ms step_avg:85.00ms +[2025-08-22 21:43:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:43:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:44:07] [Rank 0] PRINT: step:6000/10000 val_loss:4.0611 svd_entropy: attn_qk:H=0.6916,top10E=0.33,eRank=111.1,q75/q25=93.45 attn_vo:H=0.5650,top10E=0.51,eRank=57.1,q75/q25=111.65 mlp_w1:H=0.7413,top10E=0.29,eRank=170.4,q75/q25=15.12 mlp_w2:H=0.8582,top10E=0.16,eRank=315.4,q75/q25=13.11 vo_prod:H=0.4732,top10E=0.68,eRank=31.2,q75/q25=10694.97 train_time:510474ms step_avg:85.08ms +[2025-08-22 21:44:07] [Rank 0] PRINT: step:6000/10000 val_loss:4.0611 svd_entropy: attn_qk:H=0.6916,top10E=0.33,eRank=111.1,q75/q25=93.45 attn_vo:H=0.5650,top10E=0.51,eRank=57.1,q75/q25=111.65 mlp_w1:H=0.7413,top10E=0.29,eRank=170.4,q75/q25=15.12 mlp_w2:H=0.8582,top10E=0.16,eRank=315.4,q75/q25=13.11 vo_prod:H=0.4732,top10E=0.68,eRank=31.2,q75/q25=10694.97 train_time:510474ms step_avg:85.08ms +[2025-08-22 21:44:07] [Rank 0] step:6001/10000 train_time:510484ms step_avg:85.07ms +[2025-08-22 21:44:07] [Rank 0] step:6001/10000 train_time:510484ms step_avg:85.07ms +[2025-08-22 21:44:09] [Rank 0] step:6021/10000 train_time:511959ms step_avg:85.03ms +[2025-08-22 21:44:09] [Rank 0] step:6021/10000 train_time:511959ms step_avg:85.03ms +[2025-08-22 21:44:11] [Rank 0] step:6041/10000 train_time:513754ms step_avg:85.04ms +[2025-08-22 21:44:11] [Rank 0] step:6041/10000 train_time:513754ms step_avg:85.04ms +[2025-08-22 21:44:12] [Rank 0] step:6061/10000 train_time:515538ms step_avg:85.06ms +[2025-08-22 21:44:12] [Rank 0] step:6061/10000 train_time:515538ms step_avg:85.06ms +[2025-08-22 21:44:14] [Rank 0] step:6081/10000 train_time:517318ms step_avg:85.07ms +[2025-08-22 21:44:14] [Rank 0] step:6081/10000 train_time:517318ms step_avg:85.07ms +[2025-08-22 21:44:16] [Rank 0] step:6101/10000 train_time:519101ms step_avg:85.08ms +[2025-08-22 21:44:16] [Rank 0] step:6101/10000 train_time:519101ms step_avg:85.08ms +[2025-08-22 21:44:18] [Rank 0] step:6121/10000 train_time:521136ms step_avg:85.14ms +[2025-08-22 21:44:18] [Rank 0] step:6121/10000 train_time:521136ms step_avg:85.14ms +[2025-08-22 21:44:20] [Rank 0] step:6141/10000 train_time:522929ms step_avg:85.15ms +[2025-08-22 21:44:20] [Rank 0] step:6141/10000 train_time:522929ms step_avg:85.15ms +[2025-08-22 21:44:22] [Rank 0] step:6161/10000 train_time:524709ms step_avg:85.17ms +[2025-08-22 21:44:22] [Rank 0] step:6161/10000 train_time:524709ms step_avg:85.17ms +[2025-08-22 21:44:23] [Rank 0] step:6181/10000 train_time:526487ms step_avg:85.18ms +[2025-08-22 21:44:23] [Rank 0] step:6181/10000 train_time:526487ms step_avg:85.18ms +[2025-08-22 21:44:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:44:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:44:39] [Rank 0] PRINT: step:6200/10000 val_loss:4.0460 svd_entropy: attn_qk:H=0.6934,top10E=0.33,eRank=112.4,q75/q25=93.67 attn_vo:H=0.5678,top10E=0.50,eRank=58.2,q75/q25=112.75 mlp_w1:H=0.7430,top10E=0.29,eRank=172.5,q75/q25=15.17 mlp_w2:H=0.8597,top10E=0.16,eRank=318.1,q75/q25=13.02 vo_prod:H=0.4757,top10E=0.67,eRank=31.8,q75/q25=10835.49 train_time:528576ms step_avg:85.25ms +[2025-08-22 21:44:39] [Rank 0] PRINT: step:6200/10000 val_loss:4.0460 svd_entropy: attn_qk:H=0.6934,top10E=0.33,eRank=112.4,q75/q25=93.67 attn_vo:H=0.5678,top10E=0.50,eRank=58.2,q75/q25=112.75 mlp_w1:H=0.7430,top10E=0.29,eRank=172.5,q75/q25=15.17 mlp_w2:H=0.8597,top10E=0.16,eRank=318.1,q75/q25=13.02 vo_prod:H=0.4757,top10E=0.67,eRank=31.8,q75/q25=10835.49 train_time:528576ms step_avg:85.25ms +[2025-08-22 21:44:39] [Rank 0] step:6201/10000 train_time:528587ms step_avg:85.24ms +[2025-08-22 21:44:39] [Rank 0] step:6201/10000 train_time:528587ms step_avg:85.24ms +[2025-08-22 21:44:41] [Rank 0] step:6221/10000 train_time:530068ms step_avg:85.21ms +[2025-08-22 21:44:41] [Rank 0] step:6221/10000 train_time:530068ms step_avg:85.21ms +[2025-08-22 21:44:42] [Rank 0] step:6241/10000 train_time:531843ms step_avg:85.22ms +[2025-08-22 21:44:42] [Rank 0] step:6241/10000 train_time:531843ms step_avg:85.22ms +[2025-08-22 21:44:44] [Rank 0] step:6261/10000 train_time:533621ms step_avg:85.23ms +[2025-08-22 21:44:44] [Rank 0] step:6261/10000 train_time:533621ms step_avg:85.23ms +[2025-08-22 21:44:46] [Rank 0] step:6281/10000 train_time:535400ms step_avg:85.24ms +[2025-08-22 21:44:46] [Rank 0] step:6281/10000 train_time:535400ms step_avg:85.24ms +[2025-08-22 21:44:48] [Rank 0] step:6301/10000 train_time:537178ms step_avg:85.25ms +[2025-08-22 21:44:48] [Rank 0] step:6301/10000 train_time:537178ms step_avg:85.25ms +[2025-08-22 21:44:49] [Rank 0] step:6321/10000 train_time:538984ms step_avg:85.27ms +[2025-08-22 21:44:49] [Rank 0] step:6321/10000 train_time:538984ms step_avg:85.27ms +[2025-08-22 21:44:51] [Rank 0] step:6341/10000 train_time:540741ms step_avg:85.28ms +[2025-08-22 21:44:51] [Rank 0] step:6341/10000 train_time:540741ms step_avg:85.28ms +[2025-08-22 21:44:53] [Rank 0] step:6361/10000 train_time:542524ms step_avg:85.29ms +[2025-08-22 21:44:53] [Rank 0] step:6361/10000 train_time:542524ms step_avg:85.29ms +[2025-08-22 21:44:55] [Rank 0] step:6381/10000 train_time:544312ms step_avg:85.30ms +[2025-08-22 21:44:55] [Rank 0] step:6381/10000 train_time:544312ms step_avg:85.30ms +[2025-08-22 21:44:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:44:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:45:10] [Rank 0] PRINT: step:6400/10000 val_loss:4.0263 svd_entropy: attn_qk:H=0.6948,top10E=0.33,eRank=113.5,q75/q25=93.26 attn_vo:H=0.5704,top10E=0.50,eRank=59.3,q75/q25=113.11 mlp_w1:H=0.7446,top10E=0.28,eRank=174.4,q75/q25=15.20 mlp_w2:H=0.8609,top10E=0.16,eRank=320.3,q75/q25=12.83 vo_prod:H=0.4786,top10E=0.67,eRank=32.5,q75/q25=11115.79 train_time:546399ms step_avg:85.37ms +[2025-08-22 21:45:10] [Rank 0] PRINT: step:6400/10000 val_loss:4.0263 svd_entropy: attn_qk:H=0.6948,top10E=0.33,eRank=113.5,q75/q25=93.26 attn_vo:H=0.5704,top10E=0.50,eRank=59.3,q75/q25=113.11 mlp_w1:H=0.7446,top10E=0.28,eRank=174.4,q75/q25=15.20 mlp_w2:H=0.8609,top10E=0.16,eRank=320.3,q75/q25=12.83 vo_prod:H=0.4786,top10E=0.67,eRank=32.5,q75/q25=11115.79 train_time:546399ms step_avg:85.37ms +[2025-08-22 21:45:10] [Rank 0] step:6401/10000 train_time:546409ms step_avg:85.36ms +[2025-08-22 21:45:10] [Rank 0] step:6401/10000 train_time:546409ms step_avg:85.36ms +[2025-08-22 21:45:12] [Rank 0] step:6421/10000 train_time:547889ms step_avg:85.33ms +[2025-08-22 21:45:12] [Rank 0] step:6421/10000 train_time:547889ms step_avg:85.33ms +[2025-08-22 21:45:14] [Rank 0] step:6441/10000 train_time:549668ms step_avg:85.34ms +[2025-08-22 21:45:14] [Rank 0] step:6441/10000 train_time:549668ms step_avg:85.34ms +[2025-08-22 21:45:16] [Rank 0] step:6461/10000 train_time:551454ms step_avg:85.35ms +[2025-08-22 21:45:16] [Rank 0] step:6461/10000 train_time:551454ms step_avg:85.35ms +[2025-08-22 21:45:17] [Rank 0] step:6481/10000 train_time:553239ms step_avg:85.36ms +[2025-08-22 21:45:17] [Rank 0] step:6481/10000 train_time:553239ms step_avg:85.36ms +[2025-08-22 21:45:19] [Rank 0] step:6501/10000 train_time:555017ms step_avg:85.37ms +[2025-08-22 21:45:19] [Rank 0] step:6501/10000 train_time:555017ms step_avg:85.37ms +[2025-08-22 21:45:21] [Rank 0] step:6521/10000 train_time:556796ms step_avg:85.39ms +[2025-08-22 21:45:21] [Rank 0] step:6521/10000 train_time:556796ms step_avg:85.39ms +[2025-08-22 21:45:23] [Rank 0] step:6541/10000 train_time:558578ms step_avg:85.40ms +[2025-08-22 21:45:23] [Rank 0] step:6541/10000 train_time:558578ms step_avg:85.40ms +[2025-08-22 21:45:24] [Rank 0] step:6561/10000 train_time:560363ms step_avg:85.41ms +[2025-08-22 21:45:24] [Rank 0] step:6561/10000 train_time:560363ms step_avg:85.41ms +[2025-08-22 21:45:26] [Rank 0] step:6581/10000 train_time:562141ms step_avg:85.42ms +[2025-08-22 21:45:26] [Rank 0] step:6581/10000 train_time:562141ms step_avg:85.42ms +[2025-08-22 21:45:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:45:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:45:41] [Rank 0] PRINT: step:6600/10000 val_loss:4.0174 svd_entropy: attn_qk:H=0.6964,top10E=0.32,eRank=114.7,q75/q25=93.72 attn_vo:H=0.5727,top10E=0.49,eRank=60.3,q75/q25=113.01 mlp_w1:H=0.7459,top10E=0.28,eRank=176.1,q75/q25=15.22 mlp_w2:H=0.8621,top10E=0.16,eRank=322.6,q75/q25=12.72 vo_prod:H=0.4808,top10E=0.66,eRank=33.0,q75/q25=11232.66 train_time:564236ms step_avg:85.49ms +[2025-08-22 21:45:41] [Rank 0] PRINT: step:6600/10000 val_loss:4.0174 svd_entropy: attn_qk:H=0.6964,top10E=0.32,eRank=114.7,q75/q25=93.72 attn_vo:H=0.5727,top10E=0.49,eRank=60.3,q75/q25=113.01 mlp_w1:H=0.7459,top10E=0.28,eRank=176.1,q75/q25=15.22 mlp_w2:H=0.8621,top10E=0.16,eRank=322.6,q75/q25=12.72 vo_prod:H=0.4808,top10E=0.66,eRank=33.0,q75/q25=11232.66 train_time:564236ms step_avg:85.49ms +[2025-08-22 21:45:41] [Rank 0] step:6601/10000 train_time:564246ms step_avg:85.48ms +[2025-08-22 21:45:41] [Rank 0] step:6601/10000 train_time:564246ms step_avg:85.48ms +[2025-08-22 21:45:43] [Rank 0] step:6621/10000 train_time:565728ms step_avg:85.44ms +[2025-08-22 21:45:43] [Rank 0] step:6621/10000 train_time:565728ms step_avg:85.44ms +[2025-08-22 21:45:45] [Rank 0] step:6641/10000 train_time:567514ms step_avg:85.46ms +[2025-08-22 21:45:45] [Rank 0] step:6641/10000 train_time:567514ms step_avg:85.46ms +[2025-08-22 21:45:47] [Rank 0] step:6661/10000 train_time:569294ms step_avg:85.47ms +[2025-08-22 21:45:47] [Rank 0] step:6661/10000 train_time:569294ms step_avg:85.47ms +[2025-08-22 21:45:49] [Rank 0] step:6681/10000 train_time:571089ms step_avg:85.48ms +[2025-08-22 21:45:49] [Rank 0] step:6681/10000 train_time:571089ms step_avg:85.48ms +[2025-08-22 21:45:50] [Rank 0] step:6701/10000 train_time:572905ms step_avg:85.50ms +[2025-08-22 21:45:50] [Rank 0] step:6701/10000 train_time:572905ms step_avg:85.50ms +[2025-08-22 21:45:52] [Rank 0] step:6721/10000 train_time:574724ms step_avg:85.51ms +[2025-08-22 21:45:52] [Rank 0] step:6721/10000 train_time:574724ms step_avg:85.51ms +[2025-08-22 21:45:54] [Rank 0] step:6741/10000 train_time:576571ms step_avg:85.53ms +[2025-08-22 21:45:54] [Rank 0] step:6741/10000 train_time:576571ms step_avg:85.53ms +[2025-08-22 21:45:56] [Rank 0] step:6761/10000 train_time:578382ms step_avg:85.55ms +[2025-08-22 21:45:56] [Rank 0] step:6761/10000 train_time:578382ms step_avg:85.55ms +[2025-08-22 21:45:58] [Rank 0] step:6781/10000 train_time:580202ms step_avg:85.56ms +[2025-08-22 21:45:58] [Rank 0] step:6781/10000 train_time:580202ms step_avg:85.56ms +[2025-08-22 21:45:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:45:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:46:13] [Rank 0] PRINT: step:6800/10000 val_loss:4.0009 svd_entropy: attn_qk:H=0.6977,top10E=0.32,eRank=115.7,q75/q25=93.85 attn_vo:H=0.5749,top10E=0.49,eRank=61.3,q75/q25=114.30 mlp_w1:H=0.7472,top10E=0.28,eRank=177.7,q75/q25=15.23 mlp_w2:H=0.8631,top10E=0.16,eRank=324.6,q75/q25=12.66 vo_prod:H=0.4831,top10E=0.66,eRank=33.5,q75/q25=11473.23 train_time:582335ms step_avg:85.64ms +[2025-08-22 21:46:13] [Rank 0] PRINT: step:6800/10000 val_loss:4.0009 svd_entropy: attn_qk:H=0.6977,top10E=0.32,eRank=115.7,q75/q25=93.85 attn_vo:H=0.5749,top10E=0.49,eRank=61.3,q75/q25=114.30 mlp_w1:H=0.7472,top10E=0.28,eRank=177.7,q75/q25=15.23 mlp_w2:H=0.8631,top10E=0.16,eRank=324.6,q75/q25=12.66 vo_prod:H=0.4831,top10E=0.66,eRank=33.5,q75/q25=11473.23 train_time:582335ms step_avg:85.64ms +[2025-08-22 21:46:13] [Rank 0] step:6801/10000 train_time:582345ms step_avg:85.63ms +[2025-08-22 21:46:13] [Rank 0] step:6801/10000 train_time:582345ms step_avg:85.63ms +[2025-08-22 21:46:15] [Rank 0] step:6821/10000 train_time:583846ms step_avg:85.60ms +[2025-08-22 21:46:15] [Rank 0] step:6821/10000 train_time:583846ms step_avg:85.60ms +[2025-08-22 21:46:17] [Rank 0] step:6841/10000 train_time:585645ms step_avg:85.61ms +[2025-08-22 21:46:17] [Rank 0] step:6841/10000 train_time:585645ms step_avg:85.61ms +[2025-08-22 21:46:19] [Rank 0] step:6861/10000 train_time:587455ms step_avg:85.62ms +[2025-08-22 21:46:19] [Rank 0] step:6861/10000 train_time:587455ms step_avg:85.62ms +[2025-08-22 21:46:20] [Rank 0] step:6881/10000 train_time:589262ms step_avg:85.64ms +[2025-08-22 21:46:20] [Rank 0] step:6881/10000 train_time:589262ms step_avg:85.64ms +[2025-08-22 21:46:22] [Rank 0] step:6901/10000 train_time:591069ms step_avg:85.65ms +[2025-08-22 21:46:22] [Rank 0] step:6901/10000 train_time:591069ms step_avg:85.65ms +[2025-08-22 21:46:24] [Rank 0] step:6921/10000 train_time:592869ms step_avg:85.66ms +[2025-08-22 21:46:24] [Rank 0] step:6921/10000 train_time:592869ms step_avg:85.66ms +[2025-08-22 21:46:26] [Rank 0] step:6941/10000 train_time:594684ms step_avg:85.68ms +[2025-08-22 21:46:26] [Rank 0] step:6941/10000 train_time:594684ms step_avg:85.68ms +[2025-08-22 21:46:28] [Rank 0] step:6961/10000 train_time:596508ms step_avg:85.69ms +[2025-08-22 21:46:28] [Rank 0] step:6961/10000 train_time:596508ms step_avg:85.69ms +[2025-08-22 21:46:29] [Rank 0] step:6981/10000 train_time:598323ms step_avg:85.71ms +[2025-08-22 21:46:29] [Rank 0] step:6981/10000 train_time:598323ms step_avg:85.71ms +[2025-08-22 21:46:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:46:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:46:45] [Rank 0] PRINT: step:7000/10000 val_loss:3.9787 svd_entropy: attn_qk:H=0.6989,top10E=0.32,eRank=116.6,q75/q25=94.12 attn_vo:H=0.5770,top10E=0.48,eRank=62.2,q75/q25=114.21 mlp_w1:H=0.7484,top10E=0.28,eRank=179.2,q75/q25=15.28 mlp_w2:H=0.8641,top10E=0.16,eRank=326.6,q75/q25=12.51 vo_prod:H=0.4853,top10E=0.65,eRank=34.0,q75/q25=11563.55 train_time:600446ms step_avg:85.78ms +[2025-08-22 21:46:45] [Rank 0] PRINT: step:7000/10000 val_loss:3.9787 svd_entropy: attn_qk:H=0.6989,top10E=0.32,eRank=116.6,q75/q25=94.12 attn_vo:H=0.5770,top10E=0.48,eRank=62.2,q75/q25=114.21 mlp_w1:H=0.7484,top10E=0.28,eRank=179.2,q75/q25=15.28 mlp_w2:H=0.8641,top10E=0.16,eRank=326.6,q75/q25=12.51 vo_prod:H=0.4853,top10E=0.65,eRank=34.0,q75/q25=11563.55 train_time:600446ms step_avg:85.78ms +[2025-08-22 21:46:45] [Rank 0] step:7001/10000 train_time:600457ms step_avg:85.77ms +[2025-08-22 21:46:45] [Rank 0] step:7001/10000 train_time:600457ms step_avg:85.77ms +[2025-08-22 21:46:47] [Rank 0] step:7021/10000 train_time:601972ms step_avg:85.74ms +[2025-08-22 21:46:47] [Rank 0] step:7021/10000 train_time:601972ms step_avg:85.74ms +[2025-08-22 21:46:49] [Rank 0] step:7041/10000 train_time:603777ms step_avg:85.75ms +[2025-08-22 21:46:49] [Rank 0] step:7041/10000 train_time:603777ms step_avg:85.75ms +[2025-08-22 21:46:50] [Rank 0] step:7061/10000 train_time:605579ms step_avg:85.76ms +[2025-08-22 21:46:50] [Rank 0] step:7061/10000 train_time:605579ms step_avg:85.76ms +[2025-08-22 21:46:52] [Rank 0] step:7081/10000 train_time:607391ms step_avg:85.78ms +[2025-08-22 21:46:52] [Rank 0] step:7081/10000 train_time:607391ms step_avg:85.78ms +[2025-08-22 21:46:54] [Rank 0] step:7101/10000 train_time:609200ms step_avg:85.79ms +[2025-08-22 21:46:54] [Rank 0] step:7101/10000 train_time:609200ms step_avg:85.79ms +[2025-08-22 21:46:56] [Rank 0] step:7121/10000 train_time:611126ms step_avg:85.82ms +[2025-08-22 21:46:56] [Rank 0] step:7121/10000 train_time:611126ms step_avg:85.82ms +[2025-08-22 21:46:58] [Rank 0] step:7141/10000 train_time:612832ms step_avg:85.82ms +[2025-08-22 21:46:58] [Rank 0] step:7141/10000 train_time:612832ms step_avg:85.82ms +[2025-08-22 21:46:59] [Rank 0] step:7161/10000 train_time:614647ms step_avg:85.83ms +[2025-08-22 21:46:59] [Rank 0] step:7161/10000 train_time:614647ms step_avg:85.83ms +[2025-08-22 21:47:01] [Rank 0] step:7181/10000 train_time:616457ms step_avg:85.85ms +[2025-08-22 21:47:01] [Rank 0] step:7181/10000 train_time:616457ms step_avg:85.85ms +[2025-08-22 21:47:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:47:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:47:17] [Rank 0] PRINT: step:7200/10000 val_loss:3.9644 svd_entropy: attn_qk:H=0.7001,top10E=0.32,eRank=117.5,q75/q25=94.24 attn_vo:H=0.5789,top10E=0.48,eRank=63.0,q75/q25=114.07 mlp_w1:H=0.7496,top10E=0.28,eRank=180.6,q75/q25=15.20 mlp_w2:H=0.8651,top10E=0.16,eRank=328.4,q75/q25=12.39 vo_prod:H=0.4871,top10E=0.65,eRank=34.5,q75/q25=11794.48 train_time:618591ms step_avg:85.92ms +[2025-08-22 21:47:17] [Rank 0] PRINT: step:7200/10000 val_loss:3.9644 svd_entropy: attn_qk:H=0.7001,top10E=0.32,eRank=117.5,q75/q25=94.24 attn_vo:H=0.5789,top10E=0.48,eRank=63.0,q75/q25=114.07 mlp_w1:H=0.7496,top10E=0.28,eRank=180.6,q75/q25=15.20 mlp_w2:H=0.8651,top10E=0.16,eRank=328.4,q75/q25=12.39 vo_prod:H=0.4871,top10E=0.65,eRank=34.5,q75/q25=11794.48 train_time:618591ms step_avg:85.92ms +[2025-08-22 21:47:17] [Rank 0] step:7201/10000 train_time:618601ms step_avg:85.90ms +[2025-08-22 21:47:17] [Rank 0] step:7201/10000 train_time:618601ms step_avg:85.90ms +[2025-08-22 21:47:19] [Rank 0] step:7221/10000 train_time:620112ms step_avg:85.88ms +[2025-08-22 21:47:19] [Rank 0] step:7221/10000 train_time:620112ms step_avg:85.88ms +[2025-08-22 21:47:20] [Rank 0] step:7241/10000 train_time:621920ms step_avg:85.89ms +[2025-08-22 21:47:20] [Rank 0] step:7241/10000 train_time:621920ms step_avg:85.89ms +[2025-08-22 21:47:22] [Rank 0] step:7261/10000 train_time:623728ms step_avg:85.90ms +[2025-08-22 21:47:22] [Rank 0] step:7261/10000 train_time:623728ms step_avg:85.90ms +[2025-08-22 21:47:24] [Rank 0] step:7281/10000 train_time:625547ms step_avg:85.92ms +[2025-08-22 21:47:24] [Rank 0] step:7281/10000 train_time:625547ms step_avg:85.92ms +[2025-08-22 21:47:26] [Rank 0] step:7301/10000 train_time:627361ms step_avg:85.93ms +[2025-08-22 21:47:26] [Rank 0] step:7301/10000 train_time:627361ms step_avg:85.93ms +[2025-08-22 21:47:28] [Rank 0] step:7321/10000 train_time:629182ms step_avg:85.94ms +[2025-08-22 21:47:28] [Rank 0] step:7321/10000 train_time:629182ms step_avg:85.94ms +[2025-08-22 21:47:29] [Rank 0] step:7341/10000 train_time:630998ms step_avg:85.96ms +[2025-08-22 21:47:29] [Rank 0] step:7341/10000 train_time:630998ms step_avg:85.96ms +[2025-08-22 21:47:31] [Rank 0] step:7361/10000 train_time:632822ms step_avg:85.97ms +[2025-08-22 21:47:31] [Rank 0] step:7361/10000 train_time:632822ms step_avg:85.97ms +[2025-08-22 21:47:33] [Rank 0] step:7381/10000 train_time:634645ms step_avg:85.98ms +[2025-08-22 21:47:33] [Rank 0] step:7381/10000 train_time:634645ms step_avg:85.98ms +[2025-08-22 21:47:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:47:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:47:48] [Rank 0] PRINT: step:7400/10000 val_loss:3.9396 svd_entropy: attn_qk:H=0.7012,top10E=0.32,eRank=118.4,q75/q25=93.90 attn_vo:H=0.5805,top10E=0.48,eRank=63.8,q75/q25=113.69 mlp_w1:H=0.7505,top10E=0.28,eRank=181.8,q75/q25=15.23 mlp_w2:H=0.8659,top10E=0.15,eRank=330.0,q75/q25=12.32 vo_prod:H=0.4887,top10E=0.65,eRank=34.9,q75/q25=11748.21 train_time:636760ms step_avg:86.05ms +[2025-08-22 21:47:48] [Rank 0] PRINT: step:7400/10000 val_loss:3.9396 svd_entropy: attn_qk:H=0.7012,top10E=0.32,eRank=118.4,q75/q25=93.90 attn_vo:H=0.5805,top10E=0.48,eRank=63.8,q75/q25=113.69 mlp_w1:H=0.7505,top10E=0.28,eRank=181.8,q75/q25=15.23 mlp_w2:H=0.8659,top10E=0.15,eRank=330.0,q75/q25=12.32 vo_prod:H=0.4887,top10E=0.65,eRank=34.9,q75/q25=11748.21 train_time:636760ms step_avg:86.05ms +[2025-08-22 21:47:49] [Rank 0] step:7401/10000 train_time:636770ms step_avg:86.04ms +[2025-08-22 21:47:49] [Rank 0] step:7401/10000 train_time:636770ms step_avg:86.04ms +[2025-08-22 21:47:50] [Rank 0] step:7421/10000 train_time:638291ms step_avg:86.01ms +[2025-08-22 21:47:50] [Rank 0] step:7421/10000 train_time:638291ms step_avg:86.01ms +[2025-08-22 21:47:52] [Rank 0] step:7441/10000 train_time:640095ms step_avg:86.02ms +[2025-08-22 21:47:52] [Rank 0] step:7441/10000 train_time:640095ms step_avg:86.02ms +[2025-08-22 21:47:54] [Rank 0] step:7461/10000 train_time:641908ms step_avg:86.04ms +[2025-08-22 21:47:54] [Rank 0] step:7461/10000 train_time:641908ms step_avg:86.04ms +[2025-08-22 21:47:56] [Rank 0] step:7481/10000 train_time:643724ms step_avg:86.05ms +[2025-08-22 21:47:56] [Rank 0] step:7481/10000 train_time:643724ms step_avg:86.05ms +[2025-08-22 21:47:58] [Rank 0] step:7501/10000 train_time:645538ms step_avg:86.06ms +[2025-08-22 21:47:58] [Rank 0] step:7501/10000 train_time:645538ms step_avg:86.06ms +[2025-08-22 21:47:59] [Rank 0] step:7521/10000 train_time:647349ms step_avg:86.07ms +[2025-08-22 21:47:59] [Rank 0] step:7521/10000 train_time:647349ms step_avg:86.07ms +[2025-08-22 21:48:01] [Rank 0] step:7541/10000 train_time:649175ms step_avg:86.09ms +[2025-08-22 21:48:01] [Rank 0] step:7541/10000 train_time:649175ms step_avg:86.09ms +[2025-08-22 21:48:03] [Rank 0] step:7561/10000 train_time:650985ms step_avg:86.10ms +[2025-08-22 21:48:03] [Rank 0] step:7561/10000 train_time:650985ms step_avg:86.10ms +[2025-08-22 21:48:05] [Rank 0] step:7581/10000 train_time:652807ms step_avg:86.11ms +[2025-08-22 21:48:05] [Rank 0] step:7581/10000 train_time:652807ms step_avg:86.11ms +[2025-08-22 21:48:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:48:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:48:20] [Rank 0] PRINT: step:7600/10000 val_loss:3.9380 svd_entropy: attn_qk:H=0.7021,top10E=0.32,eRank=119.1,q75/q25=94.09 attn_vo:H=0.5820,top10E=0.47,eRank=64.5,q75/q25=113.54 mlp_w1:H=0.7514,top10E=0.28,eRank=182.9,q75/q25=15.23 mlp_w2:H=0.8667,top10E=0.15,eRank=331.6,q75/q25=12.25 vo_prod:H=0.4901,top10E=0.65,eRank=35.3,q75/q25=11613.74 train_time:654948ms step_avg:86.18ms +[2025-08-22 21:48:20] [Rank 0] PRINT: step:7600/10000 val_loss:3.9380 svd_entropy: attn_qk:H=0.7021,top10E=0.32,eRank=119.1,q75/q25=94.09 attn_vo:H=0.5820,top10E=0.47,eRank=64.5,q75/q25=113.54 mlp_w1:H=0.7514,top10E=0.28,eRank=182.9,q75/q25=15.23 mlp_w2:H=0.8667,top10E=0.15,eRank=331.6,q75/q25=12.25 vo_prod:H=0.4901,top10E=0.65,eRank=35.3,q75/q25=11613.74 train_time:654948ms step_avg:86.18ms +[2025-08-22 21:48:20] [Rank 0] step:7601/10000 train_time:654958ms step_avg:86.17ms +[2025-08-22 21:48:20] [Rank 0] step:7601/10000 train_time:654958ms step_avg:86.17ms +[2025-08-22 21:48:22] [Rank 0] step:7621/10000 train_time:656467ms step_avg:86.14ms +[2025-08-22 21:48:22] [Rank 0] step:7621/10000 train_time:656467ms step_avg:86.14ms +[2025-08-22 21:48:24] [Rank 0] step:7641/10000 train_time:658275ms step_avg:86.15ms +[2025-08-22 21:48:24] [Rank 0] step:7641/10000 train_time:658275ms step_avg:86.15ms +[2025-08-22 21:48:26] [Rank 0] step:7661/10000 train_time:660095ms step_avg:86.16ms +[2025-08-22 21:48:26] [Rank 0] step:7661/10000 train_time:660095ms step_avg:86.16ms +[2025-08-22 21:48:28] [Rank 0] step:7681/10000 train_time:661907ms step_avg:86.17ms +[2025-08-22 21:48:28] [Rank 0] step:7681/10000 train_time:661907ms step_avg:86.17ms +[2025-08-22 21:48:29] [Rank 0] step:7701/10000 train_time:663716ms step_avg:86.19ms +[2025-08-22 21:48:29] [Rank 0] step:7701/10000 train_time:663716ms step_avg:86.19ms +[2025-08-22 21:48:31] [Rank 0] step:7721/10000 train_time:665545ms step_avg:86.20ms +[2025-08-22 21:48:31] [Rank 0] step:7721/10000 train_time:665545ms step_avg:86.20ms +[2025-08-22 21:48:33] [Rank 0] step:7741/10000 train_time:667360ms step_avg:86.21ms +[2025-08-22 21:48:33] [Rank 0] step:7741/10000 train_time:667360ms step_avg:86.21ms +[2025-08-22 21:48:35] [Rank 0] step:7761/10000 train_time:669178ms step_avg:86.22ms +[2025-08-22 21:48:35] [Rank 0] step:7761/10000 train_time:669178ms step_avg:86.22ms +[2025-08-22 21:48:37] [Rank 0] step:7781/10000 train_time:670998ms step_avg:86.24ms +[2025-08-22 21:48:37] [Rank 0] step:7781/10000 train_time:670998ms step_avg:86.24ms +[2025-08-22 21:48:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:48:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:48:52] [Rank 0] PRINT: step:7800/10000 val_loss:3.9187 svd_entropy: attn_qk:H=0.7030,top10E=0.31,eRank=119.8,q75/q25=93.97 attn_vo:H=0.5835,top10E=0.47,eRank=65.1,q75/q25=113.43 mlp_w1:H=0.7522,top10E=0.27,eRank=183.9,q75/q25=15.19 mlp_w2:H=0.8675,top10E=0.15,eRank=333.1,q75/q25=12.16 vo_prod:H=0.4916,top10E=0.64,eRank=35.6,q75/q25=11760.35 train_time:673137ms step_avg:86.30ms +[2025-08-22 21:48:52] [Rank 0] PRINT: step:7800/10000 val_loss:3.9187 svd_entropy: attn_qk:H=0.7030,top10E=0.31,eRank=119.8,q75/q25=93.97 attn_vo:H=0.5835,top10E=0.47,eRank=65.1,q75/q25=113.43 mlp_w1:H=0.7522,top10E=0.27,eRank=183.9,q75/q25=15.19 mlp_w2:H=0.8675,top10E=0.15,eRank=333.1,q75/q25=12.16 vo_prod:H=0.4916,top10E=0.64,eRank=35.6,q75/q25=11760.35 train_time:673137ms step_avg:86.30ms +[2025-08-22 21:48:52] [Rank 0] step:7801/10000 train_time:673147ms step_avg:86.29ms +[2025-08-22 21:48:52] [Rank 0] step:7801/10000 train_time:673147ms step_avg:86.29ms +[2025-08-22 21:48:54] [Rank 0] step:7821/10000 train_time:674655ms step_avg:86.26ms +[2025-08-22 21:48:54] [Rank 0] step:7821/10000 train_time:674655ms step_avg:86.26ms +[2025-08-22 21:48:56] [Rank 0] step:7841/10000 train_time:676465ms step_avg:86.27ms +[2025-08-22 21:48:56] [Rank 0] step:7841/10000 train_time:676465ms step_avg:86.27ms +[2025-08-22 21:48:58] [Rank 0] step:7861/10000 train_time:678275ms step_avg:86.28ms +[2025-08-22 21:48:58] [Rank 0] step:7861/10000 train_time:678275ms step_avg:86.28ms +[2025-08-22 21:48:59] [Rank 0] step:7881/10000 train_time:680097ms step_avg:86.30ms +[2025-08-22 21:48:59] [Rank 0] step:7881/10000 train_time:680097ms step_avg:86.30ms +[2025-08-22 21:49:01] [Rank 0] step:7901/10000 train_time:681906ms step_avg:86.31ms +[2025-08-22 21:49:01] [Rank 0] step:7901/10000 train_time:681906ms step_avg:86.31ms +[2025-08-22 21:49:03] [Rank 0] step:7921/10000 train_time:683725ms step_avg:86.32ms +[2025-08-22 21:49:03] [Rank 0] step:7921/10000 train_time:683725ms step_avg:86.32ms +[2025-08-22 21:49:05] [Rank 0] step:7941/10000 train_time:685549ms step_avg:86.33ms +[2025-08-22 21:49:05] [Rank 0] step:7941/10000 train_time:685549ms step_avg:86.33ms +[2025-08-22 21:49:07] [Rank 0] step:7961/10000 train_time:687367ms step_avg:86.34ms +[2025-08-22 21:49:07] [Rank 0] step:7961/10000 train_time:687367ms step_avg:86.34ms +[2025-08-22 21:49:08] [Rank 0] step:7981/10000 train_time:689178ms step_avg:86.35ms +[2025-08-22 21:49:08] [Rank 0] step:7981/10000 train_time:689178ms step_avg:86.35ms +[2025-08-22 21:49:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:49:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:49:24] [Rank 0] PRINT: step:8000/10000 val_loss:3.8980 svd_entropy: attn_qk:H=0.7039,top10E=0.31,eRank=120.5,q75/q25=94.16 attn_vo:H=0.5848,top10E=0.47,eRank=65.7,q75/q25=113.03 mlp_w1:H=0.7530,top10E=0.27,eRank=184.9,q75/q25=15.21 mlp_w2:H=0.8681,top10E=0.15,eRank=334.4,q75/q25=12.07 vo_prod:H=0.4930,top10E=0.64,eRank=35.9,q75/q25=11756.46 train_time:691312ms step_avg:86.41ms +[2025-08-22 21:49:24] [Rank 0] PRINT: step:8000/10000 val_loss:3.8980 svd_entropy: attn_qk:H=0.7039,top10E=0.31,eRank=120.5,q75/q25=94.16 attn_vo:H=0.5848,top10E=0.47,eRank=65.7,q75/q25=113.03 mlp_w1:H=0.7530,top10E=0.27,eRank=184.9,q75/q25=15.21 mlp_w2:H=0.8681,top10E=0.15,eRank=334.4,q75/q25=12.07 vo_prod:H=0.4930,top10E=0.64,eRank=35.9,q75/q25=11756.46 train_time:691312ms step_avg:86.41ms +[2025-08-22 21:49:24] [Rank 0] step:8001/10000 train_time:691321ms step_avg:86.40ms +[2025-08-22 21:49:24] [Rank 0] step:8001/10000 train_time:691321ms step_avg:86.40ms +[2025-08-22 21:49:26] [Rank 0] step:8021/10000 train_time:692845ms step_avg:86.38ms +[2025-08-22 21:49:26] [Rank 0] step:8021/10000 train_time:692845ms step_avg:86.38ms +[2025-08-22 21:49:28] [Rank 0] step:8041/10000 train_time:694673ms step_avg:86.39ms +[2025-08-22 21:49:28] [Rank 0] step:8041/10000 train_time:694673ms step_avg:86.39ms +[2025-08-22 21:49:29] [Rank 0] step:8061/10000 train_time:696487ms step_avg:86.40ms +[2025-08-22 21:49:29] [Rank 0] step:8061/10000 train_time:696487ms step_avg:86.40ms +[2025-08-22 21:49:31] [Rank 0] step:8081/10000 train_time:698300ms step_avg:86.41ms +[2025-08-22 21:49:31] [Rank 0] step:8081/10000 train_time:698300ms step_avg:86.41ms +[2025-08-22 21:49:33] [Rank 0] step:8101/10000 train_time:700124ms step_avg:86.42ms +[2025-08-22 21:49:33] [Rank 0] step:8101/10000 train_time:700124ms step_avg:86.42ms +[2025-08-22 21:49:35] [Rank 0] step:8121/10000 train_time:701938ms step_avg:86.43ms +[2025-08-22 21:49:35] [Rank 0] step:8121/10000 train_time:701938ms step_avg:86.43ms +[2025-08-22 21:49:37] [Rank 0] step:8141/10000 train_time:703758ms step_avg:86.45ms +[2025-08-22 21:49:37] [Rank 0] step:8141/10000 train_time:703758ms step_avg:86.45ms +[2025-08-22 21:49:39] [Rank 0] step:8161/10000 train_time:705933ms step_avg:86.50ms +[2025-08-22 21:49:39] [Rank 0] step:8161/10000 train_time:705933ms step_avg:86.50ms +[2025-08-22 21:49:41] [Rank 0] step:8181/10000 train_time:707791ms step_avg:86.52ms +[2025-08-22 21:49:41] [Rank 0] step:8181/10000 train_time:707791ms step_avg:86.52ms +[2025-08-22 21:49:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:49:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:49:56] [Rank 0] PRINT: step:8200/10000 val_loss:3.8883 svd_entropy: attn_qk:H=0.7046,top10E=0.31,eRank=121.1,q75/q25=94.21 attn_vo:H=0.5860,top10E=0.47,eRank=66.3,q75/q25=113.28 mlp_w1:H=0.7536,top10E=0.27,eRank=185.8,q75/q25=15.19 mlp_w2:H=0.8687,top10E=0.15,eRank=335.6,q75/q25=12.00 vo_prod:H=0.4942,top10E=0.64,eRank=36.2,q75/q25=11849.03 train_time:709982ms step_avg:86.58ms +[2025-08-22 21:49:56] [Rank 0] PRINT: step:8200/10000 val_loss:3.8883 svd_entropy: attn_qk:H=0.7046,top10E=0.31,eRank=121.1,q75/q25=94.21 attn_vo:H=0.5860,top10E=0.47,eRank=66.3,q75/q25=113.28 mlp_w1:H=0.7536,top10E=0.27,eRank=185.8,q75/q25=15.19 mlp_w2:H=0.8687,top10E=0.15,eRank=335.6,q75/q25=12.00 vo_prod:H=0.4942,top10E=0.64,eRank=36.2,q75/q25=11849.03 train_time:709982ms step_avg:86.58ms +[2025-08-22 21:49:56] [Rank 0] step:8201/10000 train_time:709992ms step_avg:86.57ms +[2025-08-22 21:49:56] [Rank 0] step:8201/10000 train_time:709992ms step_avg:86.57ms +[2025-08-22 21:49:58] [Rank 0] step:8221/10000 train_time:711544ms step_avg:86.55ms +[2025-08-22 21:49:58] [Rank 0] step:8221/10000 train_time:711544ms step_avg:86.55ms +[2025-08-22 21:50:00] [Rank 0] step:8241/10000 train_time:713393ms step_avg:86.57ms +[2025-08-22 21:50:00] [Rank 0] step:8241/10000 train_time:713393ms step_avg:86.57ms +[2025-08-22 21:50:02] [Rank 0] step:8261/10000 train_time:715235ms step_avg:86.58ms +[2025-08-22 21:50:02] [Rank 0] step:8261/10000 train_time:715235ms step_avg:86.58ms +[2025-08-22 21:50:04] [Rank 0] step:8281/10000 train_time:717083ms step_avg:86.59ms +[2025-08-22 21:50:04] [Rank 0] step:8281/10000 train_time:717083ms step_avg:86.59ms +[2025-08-22 21:50:05] [Rank 0] step:8301/10000 train_time:718919ms step_avg:86.61ms +[2025-08-22 21:50:05] [Rank 0] step:8301/10000 train_time:718919ms step_avg:86.61ms +[2025-08-22 21:50:07] [Rank 0] step:8321/10000 train_time:720758ms step_avg:86.62ms +[2025-08-22 21:50:07] [Rank 0] step:8321/10000 train_time:720758ms step_avg:86.62ms +[2025-08-22 21:50:09] [Rank 0] step:8341/10000 train_time:722601ms step_avg:86.63ms +[2025-08-22 21:50:09] [Rank 0] step:8341/10000 train_time:722601ms step_avg:86.63ms +[2025-08-22 21:50:11] [Rank 0] step:8361/10000 train_time:724446ms step_avg:86.65ms +[2025-08-22 21:50:11] [Rank 0] step:8361/10000 train_time:724446ms step_avg:86.65ms +[2025-08-22 21:50:13] [Rank 0] step:8381/10000 train_time:726289ms step_avg:86.66ms +[2025-08-22 21:50:13] [Rank 0] step:8381/10000 train_time:726289ms step_avg:86.66ms +[2025-08-22 21:50:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:50:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:50:28] [Rank 0] PRINT: step:8400/10000 val_loss:3.8723 svd_entropy: attn_qk:H=0.7053,top10E=0.31,eRank=121.6,q75/q25=94.83 attn_vo:H=0.5871,top10E=0.46,eRank=66.8,q75/q25=113.30 mlp_w1:H=0.7543,top10E=0.27,eRank=186.6,q75/q25=15.17 mlp_w2:H=0.8693,top10E=0.15,eRank=336.8,q75/q25=11.97 vo_prod:H=0.4954,top10E=0.64,eRank=36.5,q75/q25=11831.50 train_time:728448ms step_avg:86.72ms +[2025-08-22 21:50:28] [Rank 0] PRINT: step:8400/10000 val_loss:3.8723 svd_entropy: attn_qk:H=0.7053,top10E=0.31,eRank=121.6,q75/q25=94.83 attn_vo:H=0.5871,top10E=0.46,eRank=66.8,q75/q25=113.30 mlp_w1:H=0.7543,top10E=0.27,eRank=186.6,q75/q25=15.17 mlp_w2:H=0.8693,top10E=0.15,eRank=336.8,q75/q25=11.97 vo_prod:H=0.4954,top10E=0.64,eRank=36.5,q75/q25=11831.50 train_time:728448ms step_avg:86.72ms +[2025-08-22 21:50:28] [Rank 0] step:8401/10000 train_time:728458ms step_avg:86.71ms +[2025-08-22 21:50:28] [Rank 0] step:8401/10000 train_time:728458ms step_avg:86.71ms +[2025-08-22 21:50:30] [Rank 0] step:8421/10000 train_time:729997ms step_avg:86.69ms +[2025-08-22 21:50:30] [Rank 0] step:8421/10000 train_time:729997ms step_avg:86.69ms +[2025-08-22 21:50:32] [Rank 0] step:8441/10000 train_time:731834ms step_avg:86.70ms +[2025-08-22 21:50:32] [Rank 0] step:8441/10000 train_time:731834ms step_avg:86.70ms +[2025-08-22 21:50:34] [Rank 0] step:8461/10000 train_time:733666ms step_avg:86.71ms +[2025-08-22 21:50:34] [Rank 0] step:8461/10000 train_time:733666ms step_avg:86.71ms +[2025-08-22 21:50:36] [Rank 0] step:8481/10000 train_time:735513ms step_avg:86.72ms +[2025-08-22 21:50:36] [Rank 0] step:8481/10000 train_time:735513ms step_avg:86.72ms +[2025-08-22 21:50:38] [Rank 0] step:8501/10000 train_time:737370ms step_avg:86.74ms +[2025-08-22 21:50:38] [Rank 0] step:8501/10000 train_time:737370ms step_avg:86.74ms +[2025-08-22 21:50:39] [Rank 0] step:8521/10000 train_time:739224ms step_avg:86.75ms +[2025-08-22 21:50:39] [Rank 0] step:8521/10000 train_time:739224ms step_avg:86.75ms +[2025-08-22 21:50:41] [Rank 0] step:8541/10000 train_time:741075ms step_avg:86.77ms +[2025-08-22 21:50:41] [Rank 0] step:8541/10000 train_time:741075ms step_avg:86.77ms +[2025-08-22 21:50:43] [Rank 0] step:8561/10000 train_time:742928ms step_avg:86.78ms +[2025-08-22 21:50:43] [Rank 0] step:8561/10000 train_time:742928ms step_avg:86.78ms +[2025-08-22 21:50:45] [Rank 0] step:8581/10000 train_time:744774ms step_avg:86.79ms +[2025-08-22 21:50:45] [Rank 0] step:8581/10000 train_time:744774ms step_avg:86.79ms +[2025-08-22 21:50:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:50:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:51:00] [Rank 0] PRINT: step:8600/10000 val_loss:3.8600 svd_entropy: attn_qk:H=0.7059,top10E=0.31,eRank=122.1,q75/q25=94.28 attn_vo:H=0.5880,top10E=0.46,eRank=67.2,q75/q25=113.52 mlp_w1:H=0.7548,top10E=0.27,eRank=187.3,q75/q25=15.13 mlp_w2:H=0.8699,top10E=0.15,eRank=337.9,q75/q25=11.88 vo_prod:H=0.4963,top10E=0.63,eRank=36.8,q75/q25=11771.71 train_time:746931ms step_avg:86.85ms +[2025-08-22 21:51:00] [Rank 0] PRINT: step:8600/10000 val_loss:3.8600 svd_entropy: attn_qk:H=0.7059,top10E=0.31,eRank=122.1,q75/q25=94.28 attn_vo:H=0.5880,top10E=0.46,eRank=67.2,q75/q25=113.52 mlp_w1:H=0.7548,top10E=0.27,eRank=187.3,q75/q25=15.13 mlp_w2:H=0.8699,top10E=0.15,eRank=337.9,q75/q25=11.88 vo_prod:H=0.4963,top10E=0.63,eRank=36.8,q75/q25=11771.71 train_time:746931ms step_avg:86.85ms +[2025-08-22 21:51:00] [Rank 0] step:8601/10000 train_time:746941ms step_avg:86.84ms +[2025-08-22 21:51:00] [Rank 0] step:8601/10000 train_time:746941ms step_avg:86.84ms +[2025-08-22 21:51:02] [Rank 0] step:8621/10000 train_time:748488ms step_avg:86.82ms +[2025-08-22 21:51:02] [Rank 0] step:8621/10000 train_time:748488ms step_avg:86.82ms +[2025-08-22 21:51:04] [Rank 0] step:8641/10000 train_time:750328ms step_avg:86.83ms +[2025-08-22 21:51:04] [Rank 0] step:8641/10000 train_time:750328ms step_avg:86.83ms +[2025-08-22 21:51:06] [Rank 0] step:8661/10000 train_time:752171ms step_avg:86.85ms +[2025-08-22 21:51:06] [Rank 0] step:8661/10000 train_time:752171ms step_avg:86.85ms +[2025-08-22 21:51:08] [Rank 0] step:8681/10000 train_time:754013ms step_avg:86.86ms +[2025-08-22 21:51:08] [Rank 0] step:8681/10000 train_time:754013ms step_avg:86.86ms +[2025-08-22 21:51:10] [Rank 0] step:8701/10000 train_time:755858ms step_avg:86.87ms +[2025-08-22 21:51:10] [Rank 0] step:8701/10000 train_time:755858ms step_avg:86.87ms +[2025-08-22 21:51:12] [Rank 0] step:8721/10000 train_time:757700ms step_avg:86.88ms +[2025-08-22 21:51:12] [Rank 0] step:8721/10000 train_time:757700ms step_avg:86.88ms +[2025-08-22 21:51:13] [Rank 0] step:8741/10000 train_time:759544ms step_avg:86.89ms +[2025-08-22 21:51:13] [Rank 0] step:8741/10000 train_time:759544ms step_avg:86.89ms +[2025-08-22 21:51:15] [Rank 0] step:8761/10000 train_time:761388ms step_avg:86.91ms +[2025-08-22 21:51:15] [Rank 0] step:8761/10000 train_time:761388ms step_avg:86.91ms +[2025-08-22 21:51:17] [Rank 0] step:8781/10000 train_time:763244ms step_avg:86.92ms +[2025-08-22 21:51:17] [Rank 0] step:8781/10000 train_time:763244ms step_avg:86.92ms +[2025-08-22 21:51:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:51:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:51:32] [Rank 0] PRINT: step:8800/10000 val_loss:3.8474 svd_entropy: attn_qk:H=0.7064,top10E=0.31,eRank=122.5,q75/q25=94.55 attn_vo:H=0.5889,top10E=0.46,eRank=67.6,q75/q25=113.59 mlp_w1:H=0.7553,top10E=0.27,eRank=187.9,q75/q25=15.13 mlp_w2:H=0.8703,top10E=0.15,eRank=338.8,q75/q25=11.87 vo_prod:H=0.4972,top10E=0.63,eRank=37.0,q75/q25=11884.00 train_time:765407ms step_avg:86.98ms +[2025-08-22 21:51:32] [Rank 0] PRINT: step:8800/10000 val_loss:3.8474 svd_entropy: attn_qk:H=0.7064,top10E=0.31,eRank=122.5,q75/q25=94.55 attn_vo:H=0.5889,top10E=0.46,eRank=67.6,q75/q25=113.59 mlp_w1:H=0.7553,top10E=0.27,eRank=187.9,q75/q25=15.13 mlp_w2:H=0.8703,top10E=0.15,eRank=338.8,q75/q25=11.87 vo_prod:H=0.4972,top10E=0.63,eRank=37.0,q75/q25=11884.00 train_time:765407ms step_avg:86.98ms +[2025-08-22 21:51:33] [Rank 0] step:8801/10000 train_time:765418ms step_avg:86.97ms +[2025-08-22 21:51:33] [Rank 0] step:8801/10000 train_time:765418ms step_avg:86.97ms +[2025-08-22 21:51:34] [Rank 0] step:8821/10000 train_time:766949ms step_avg:86.95ms +[2025-08-22 21:51:34] [Rank 0] step:8821/10000 train_time:766949ms step_avg:86.95ms +[2025-08-22 21:51:36] [Rank 0] step:8841/10000 train_time:768805ms step_avg:86.96ms +[2025-08-22 21:51:36] [Rank 0] step:8841/10000 train_time:768805ms step_avg:86.96ms +[2025-08-22 21:51:38] [Rank 0] step:8861/10000 train_time:770648ms step_avg:86.97ms +[2025-08-22 21:51:38] [Rank 0] step:8861/10000 train_time:770648ms step_avg:86.97ms +[2025-08-22 21:51:40] [Rank 0] step:8881/10000 train_time:772486ms step_avg:86.98ms +[2025-08-22 21:51:40] [Rank 0] step:8881/10000 train_time:772486ms step_avg:86.98ms +[2025-08-22 21:51:42] [Rank 0] step:8901/10000 train_time:774335ms step_avg:86.99ms +[2025-08-22 21:51:42] [Rank 0] step:8901/10000 train_time:774335ms step_avg:86.99ms +[2025-08-22 21:51:44] [Rank 0] step:8921/10000 train_time:776184ms step_avg:87.01ms +[2025-08-22 21:51:44] [Rank 0] step:8921/10000 train_time:776184ms step_avg:87.01ms +[2025-08-22 21:51:46] [Rank 0] step:8941/10000 train_time:778050ms step_avg:87.02ms +[2025-08-22 21:51:46] [Rank 0] step:8941/10000 train_time:778050ms step_avg:87.02ms +[2025-08-22 21:51:47] [Rank 0] step:8961/10000 train_time:779895ms step_avg:87.03ms +[2025-08-22 21:51:47] [Rank 0] step:8961/10000 train_time:779895ms step_avg:87.03ms +[2025-08-22 21:51:49] [Rank 0] step:8981/10000 train_time:781740ms step_avg:87.04ms +[2025-08-22 21:51:49] [Rank 0] step:8981/10000 train_time:781740ms step_avg:87.04ms +[2025-08-22 21:51:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:51:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:52:05] [Rank 0] PRINT: step:9000/10000 val_loss:3.8361 svd_entropy: attn_qk:H=0.7069,top10E=0.31,eRank=122.9,q75/q25=94.59 attn_vo:H=0.5897,top10E=0.46,eRank=68.0,q75/q25=113.49 mlp_w1:H=0.7557,top10E=0.27,eRank=188.5,q75/q25=15.13 mlp_w2:H=0.8708,top10E=0.15,eRank=339.7,q75/q25=11.81 vo_prod:H=0.4979,top10E=0.63,eRank=37.2,q75/q25=11975.20 train_time:783903ms step_avg:87.10ms +[2025-08-22 21:52:05] [Rank 0] PRINT: step:9000/10000 val_loss:3.8361 svd_entropy: attn_qk:H=0.7069,top10E=0.31,eRank=122.9,q75/q25=94.59 attn_vo:H=0.5897,top10E=0.46,eRank=68.0,q75/q25=113.49 mlp_w1:H=0.7557,top10E=0.27,eRank=188.5,q75/q25=15.13 mlp_w2:H=0.8708,top10E=0.15,eRank=339.7,q75/q25=11.81 vo_prod:H=0.4979,top10E=0.63,eRank=37.2,q75/q25=11975.20 train_time:783903ms step_avg:87.10ms +[2025-08-22 21:52:05] [Rank 0] step:9001/10000 train_time:783916ms step_avg:87.09ms +[2025-08-22 21:52:05] [Rank 0] step:9001/10000 train_time:783916ms step_avg:87.09ms +[2025-08-22 21:52:07] [Rank 0] step:9021/10000 train_time:785457ms step_avg:87.07ms +[2025-08-22 21:52:07] [Rank 0] step:9021/10000 train_time:785457ms step_avg:87.07ms +[2025-08-22 21:52:08] [Rank 0] step:9041/10000 train_time:787296ms step_avg:87.08ms +[2025-08-22 21:52:08] [Rank 0] step:9041/10000 train_time:787296ms step_avg:87.08ms +[2025-08-22 21:52:10] [Rank 0] step:9061/10000 train_time:789149ms step_avg:87.09ms +[2025-08-22 21:52:10] [Rank 0] step:9061/10000 train_time:789149ms step_avg:87.09ms +[2025-08-22 21:52:12] [Rank 0] step:9081/10000 train_time:791001ms step_avg:87.11ms +[2025-08-22 21:52:12] [Rank 0] step:9081/10000 train_time:791001ms step_avg:87.11ms +[2025-08-22 21:52:14] [Rank 0] step:9101/10000 train_time:792865ms step_avg:87.12ms +[2025-08-22 21:52:14] [Rank 0] step:9101/10000 train_time:792865ms step_avg:87.12ms +[2025-08-22 21:52:16] [Rank 0] step:9121/10000 train_time:794713ms step_avg:87.13ms +[2025-08-22 21:52:16] [Rank 0] step:9121/10000 train_time:794713ms step_avg:87.13ms +[2025-08-22 21:52:18] [Rank 0] step:9141/10000 train_time:796553ms step_avg:87.14ms +[2025-08-22 21:52:18] [Rank 0] step:9141/10000 train_time:796553ms step_avg:87.14ms +[2025-08-22 21:52:20] [Rank 0] step:9161/10000 train_time:798387ms step_avg:87.15ms +[2025-08-22 21:52:20] [Rank 0] step:9161/10000 train_time:798387ms step_avg:87.15ms +[2025-08-22 21:52:21] [Rank 0] step:9181/10000 train_time:800259ms step_avg:87.16ms +[2025-08-22 21:52:21] [Rank 0] step:9181/10000 train_time:800259ms step_avg:87.16ms +[2025-08-22 21:52:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:52:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:52:37] [Rank 0] PRINT: step:9200/10000 val_loss:3.8260 svd_entropy: attn_qk:H=0.7073,top10E=0.31,eRank=123.3,q75/q25=94.41 attn_vo:H=0.5903,top10E=0.46,eRank=68.3,q75/q25=114.10 mlp_w1:H=0.7560,top10E=0.27,eRank=188.9,q75/q25=15.13 mlp_w2:H=0.8711,top10E=0.15,eRank=340.4,q75/q25=11.78 vo_prod:H=0.4986,top10E=0.63,eRank=37.3,q75/q25=12135.73 train_time:802423ms step_avg:87.22ms +[2025-08-22 21:52:37] [Rank 0] PRINT: step:9200/10000 val_loss:3.8260 svd_entropy: attn_qk:H=0.7073,top10E=0.31,eRank=123.3,q75/q25=94.41 attn_vo:H=0.5903,top10E=0.46,eRank=68.3,q75/q25=114.10 mlp_w1:H=0.7560,top10E=0.27,eRank=188.9,q75/q25=15.13 mlp_w2:H=0.8711,top10E=0.15,eRank=340.4,q75/q25=11.78 vo_prod:H=0.4986,top10E=0.63,eRank=37.3,q75/q25=12135.73 train_time:802423ms step_avg:87.22ms +[2025-08-22 21:52:37] [Rank 0] step:9201/10000 train_time:802435ms step_avg:87.21ms +[2025-08-22 21:52:37] [Rank 0] step:9201/10000 train_time:802435ms step_avg:87.21ms +[2025-08-22 21:52:39] [Rank 0] step:9221/10000 train_time:803986ms step_avg:87.19ms +[2025-08-22 21:52:39] [Rank 0] step:9221/10000 train_time:803986ms step_avg:87.19ms +[2025-08-22 21:52:41] [Rank 0] step:9241/10000 train_time:805835ms step_avg:87.20ms +[2025-08-22 21:52:41] [Rank 0] step:9241/10000 train_time:805835ms step_avg:87.20ms +[2025-08-22 21:52:43] [Rank 0] step:9261/10000 train_time:807687ms step_avg:87.21ms +[2025-08-22 21:52:43] [Rank 0] step:9261/10000 train_time:807687ms step_avg:87.21ms +[2025-08-22 21:52:45] [Rank 0] step:9281/10000 train_time:809524ms step_avg:87.22ms +[2025-08-22 21:52:45] [Rank 0] step:9281/10000 train_time:809524ms step_avg:87.22ms +[2025-08-22 21:52:46] [Rank 0] step:9301/10000 train_time:811362ms step_avg:87.23ms +[2025-08-22 21:52:46] [Rank 0] step:9301/10000 train_time:811362ms step_avg:87.23ms +[2025-08-22 21:52:48] [Rank 0] step:9321/10000 train_time:813212ms step_avg:87.25ms +[2025-08-22 21:52:48] [Rank 0] step:9321/10000 train_time:813212ms step_avg:87.25ms +[2025-08-22 21:52:50] [Rank 0] step:9341/10000 train_time:815058ms step_avg:87.26ms +[2025-08-22 21:52:50] [Rank 0] step:9341/10000 train_time:815058ms step_avg:87.26ms +[2025-08-22 21:52:52] [Rank 0] step:9361/10000 train_time:816908ms step_avg:87.27ms +[2025-08-22 21:52:52] [Rank 0] step:9361/10000 train_time:816908ms step_avg:87.27ms +[2025-08-22 21:52:54] [Rank 0] step:9381/10000 train_time:818772ms step_avg:87.28ms +[2025-08-22 21:52:54] [Rank 0] step:9381/10000 train_time:818772ms step_avg:87.28ms +[2025-08-22 21:52:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:52:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:53:09] [Rank 0] PRINT: step:9400/10000 val_loss:3.8171 svd_entropy: attn_qk:H=0.7076,top10E=0.31,eRank=123.5,q75/q25=94.28 attn_vo:H=0.5909,top10E=0.46,eRank=68.5,q75/q25=114.28 mlp_w1:H=0.7563,top10E=0.27,eRank=189.3,q75/q25=15.13 mlp_w2:H=0.8715,top10E=0.15,eRank=341.1,q75/q25=11.74 vo_prod:H=0.4991,top10E=0.63,eRank=37.5,q75/q25=12075.42 train_time:820946ms step_avg:87.33ms +[2025-08-22 21:53:09] [Rank 0] PRINT: step:9400/10000 val_loss:3.8171 svd_entropy: attn_qk:H=0.7076,top10E=0.31,eRank=123.5,q75/q25=94.28 attn_vo:H=0.5909,top10E=0.46,eRank=68.5,q75/q25=114.28 mlp_w1:H=0.7563,top10E=0.27,eRank=189.3,q75/q25=15.13 mlp_w2:H=0.8715,top10E=0.15,eRank=341.1,q75/q25=11.74 vo_prod:H=0.4991,top10E=0.63,eRank=37.5,q75/q25=12075.42 train_time:820946ms step_avg:87.33ms +[2025-08-22 21:53:09] [Rank 0] step:9401/10000 train_time:820955ms step_avg:87.33ms +[2025-08-22 21:53:09] [Rank 0] step:9401/10000 train_time:820955ms step_avg:87.33ms +[2025-08-22 21:53:11] [Rank 0] step:9421/10000 train_time:822493ms step_avg:87.30ms +[2025-08-22 21:53:11] [Rank 0] step:9421/10000 train_time:822493ms step_avg:87.30ms +[2025-08-22 21:53:13] [Rank 0] step:9441/10000 train_time:824337ms step_avg:87.31ms +[2025-08-22 21:53:13] [Rank 0] step:9441/10000 train_time:824337ms step_avg:87.31ms +[2025-08-22 21:53:15] [Rank 0] step:9461/10000 train_time:826194ms step_avg:87.33ms +[2025-08-22 21:53:15] [Rank 0] step:9461/10000 train_time:826194ms step_avg:87.33ms +[2025-08-22 21:53:17] [Rank 0] step:9481/10000 train_time:828047ms step_avg:87.34ms +[2025-08-22 21:53:17] [Rank 0] step:9481/10000 train_time:828047ms step_avg:87.34ms +[2025-08-22 21:53:19] [Rank 0] step:9501/10000 train_time:829907ms step_avg:87.35ms +[2025-08-22 21:53:19] [Rank 0] step:9501/10000 train_time:829907ms step_avg:87.35ms +[2025-08-22 21:53:21] [Rank 0] step:9521/10000 train_time:831751ms step_avg:87.36ms +[2025-08-22 21:53:21] [Rank 0] step:9521/10000 train_time:831751ms step_avg:87.36ms +[2025-08-22 21:53:22] [Rank 0] step:9541/10000 train_time:833599ms step_avg:87.37ms +[2025-08-22 21:53:22] [Rank 0] step:9541/10000 train_time:833599ms step_avg:87.37ms +[2025-08-22 21:53:24] [Rank 0] step:9561/10000 train_time:835449ms step_avg:87.38ms +[2025-08-22 21:53:24] [Rank 0] step:9561/10000 train_time:835449ms step_avg:87.38ms +[2025-08-22 21:53:26] [Rank 0] step:9581/10000 train_time:837299ms step_avg:87.39ms +[2025-08-22 21:53:26] [Rank 0] step:9581/10000 train_time:837299ms step_avg:87.39ms +[2025-08-22 21:53:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:53:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:53:42] [Rank 0] PRINT: step:9600/10000 val_loss:3.8082 svd_entropy: attn_qk:H=0.7079,top10E=0.31,eRank=123.8,q75/q25=94.45 attn_vo:H=0.5914,top10E=0.46,eRank=68.8,q75/q25=114.36 mlp_w1:H=0.7566,top10E=0.27,eRank=189.7,q75/q25=15.13 mlp_w2:H=0.8717,top10E=0.15,eRank=341.6,q75/q25=11.71 vo_prod:H=0.4997,top10E=0.63,eRank=37.6,q75/q25=12064.00 train_time:839482ms step_avg:87.45ms +[2025-08-22 21:53:42] [Rank 0] PRINT: step:9600/10000 val_loss:3.8082 svd_entropy: attn_qk:H=0.7079,top10E=0.31,eRank=123.8,q75/q25=94.45 attn_vo:H=0.5914,top10E=0.46,eRank=68.8,q75/q25=114.36 mlp_w1:H=0.7566,top10E=0.27,eRank=189.7,q75/q25=15.13 mlp_w2:H=0.8717,top10E=0.15,eRank=341.6,q75/q25=11.71 vo_prod:H=0.4997,top10E=0.63,eRank=37.6,q75/q25=12064.00 train_time:839482ms step_avg:87.45ms +[2025-08-22 21:53:42] [Rank 0] step:9601/10000 train_time:839492ms step_avg:87.44ms +[2025-08-22 21:53:42] [Rank 0] step:9601/10000 train_time:839492ms step_avg:87.44ms +[2025-08-22 21:53:44] [Rank 0] step:9621/10000 train_time:841026ms step_avg:87.42ms +[2025-08-22 21:53:44] [Rank 0] step:9621/10000 train_time:841026ms step_avg:87.42ms +[2025-08-22 21:53:45] [Rank 0] step:9641/10000 train_time:842871ms step_avg:87.43ms +[2025-08-22 21:53:45] [Rank 0] step:9641/10000 train_time:842871ms step_avg:87.43ms +[2025-08-22 21:53:47] [Rank 0] step:9661/10000 train_time:844740ms step_avg:87.44ms +[2025-08-22 21:53:47] [Rank 0] step:9661/10000 train_time:844740ms step_avg:87.44ms +[2025-08-22 21:53:49] [Rank 0] step:9681/10000 train_time:846606ms step_avg:87.45ms +[2025-08-22 21:53:49] [Rank 0] step:9681/10000 train_time:846606ms step_avg:87.45ms +[2025-08-22 21:53:51] [Rank 0] step:9701/10000 train_time:848490ms step_avg:87.46ms +[2025-08-22 21:53:51] [Rank 0] step:9701/10000 train_time:848490ms step_avg:87.46ms +[2025-08-22 21:53:53] [Rank 0] step:9721/10000 train_time:850355ms step_avg:87.48ms +[2025-08-22 21:53:53] [Rank 0] step:9721/10000 train_time:850355ms step_avg:87.48ms +[2025-08-22 21:53:55] [Rank 0] step:9741/10000 train_time:852245ms step_avg:87.49ms +[2025-08-22 21:53:55] [Rank 0] step:9741/10000 train_time:852245ms step_avg:87.49ms +[2025-08-22 21:53:57] [Rank 0] step:9761/10000 train_time:854120ms step_avg:87.50ms +[2025-08-22 21:53:57] [Rank 0] step:9761/10000 train_time:854120ms step_avg:87.50ms +[2025-08-22 21:53:59] [Rank 0] step:9781/10000 train_time:856010ms step_avg:87.52ms +[2025-08-22 21:53:59] [Rank 0] step:9781/10000 train_time:856010ms step_avg:87.52ms +[2025-08-22 21:54:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:54:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:54:14] [Rank 0] PRINT: step:9800/10000 val_loss:3.8004 svd_entropy: attn_qk:H=0.7081,top10E=0.31,eRank=124.0,q75/q25=94.33 attn_vo:H=0.5917,top10E=0.46,eRank=68.9,q75/q25=114.43 mlp_w1:H=0.7568,top10E=0.27,eRank=189.9,q75/q25=15.12 mlp_w2:H=0.8719,top10E=0.15,eRank=342.0,q75/q25=11.68 vo_prod:H=0.4999,top10E=0.63,eRank=37.7,q75/q25=12038.65 train_time:858222ms step_avg:87.57ms +[2025-08-22 21:54:14] [Rank 0] PRINT: step:9800/10000 val_loss:3.8004 svd_entropy: attn_qk:H=0.7081,top10E=0.31,eRank=124.0,q75/q25=94.33 attn_vo:H=0.5917,top10E=0.46,eRank=68.9,q75/q25=114.43 mlp_w1:H=0.7568,top10E=0.27,eRank=189.9,q75/q25=15.12 mlp_w2:H=0.8719,top10E=0.15,eRank=342.0,q75/q25=11.68 vo_prod:H=0.4999,top10E=0.63,eRank=37.7,q75/q25=12038.65 train_time:858222ms step_avg:87.57ms +[2025-08-22 21:54:14] [Rank 0] step:9801/10000 train_time:858232ms step_avg:87.57ms +[2025-08-22 21:54:14] [Rank 0] step:9801/10000 train_time:858232ms step_avg:87.57ms +[2025-08-22 21:54:16] [Rank 0] step:9821/10000 train_time:859770ms step_avg:87.54ms +[2025-08-22 21:54:16] [Rank 0] step:9821/10000 train_time:859770ms step_avg:87.54ms +[2025-08-22 21:54:18] [Rank 0] step:9841/10000 train_time:861651ms step_avg:87.56ms +[2025-08-22 21:54:18] [Rank 0] step:9841/10000 train_time:861651ms step_avg:87.56ms +[2025-08-22 21:54:20] [Rank 0] step:9861/10000 train_time:863511ms step_avg:87.57ms +[2025-08-22 21:54:20] [Rank 0] step:9861/10000 train_time:863511ms step_avg:87.57ms +[2025-08-22 21:54:22] [Rank 0] step:9881/10000 train_time:865414ms step_avg:87.58ms +[2025-08-22 21:54:22] [Rank 0] step:9881/10000 train_time:865414ms step_avg:87.58ms +[2025-08-22 21:54:24] [Rank 0] step:9901/10000 train_time:867291ms step_avg:87.60ms +[2025-08-22 21:54:24] [Rank 0] step:9901/10000 train_time:867291ms step_avg:87.60ms +[2025-08-22 21:54:26] [Rank 0] step:9921/10000 train_time:869159ms step_avg:87.61ms +[2025-08-22 21:54:26] [Rank 0] step:9921/10000 train_time:869159ms step_avg:87.61ms +[2025-08-22 21:54:27] [Rank 0] step:9941/10000 train_time:871034ms step_avg:87.62ms +[2025-08-22 21:54:27] [Rank 0] step:9941/10000 train_time:871034ms step_avg:87.62ms +[2025-08-22 21:54:29] [Rank 0] step:9961/10000 train_time:872901ms step_avg:87.63ms +[2025-08-22 21:54:29] [Rank 0] step:9961/10000 train_time:872901ms step_avg:87.63ms +[2025-08-22 21:54:31] [Rank 0] step:9981/10000 train_time:874769ms step_avg:87.64ms +[2025-08-22 21:54:31] [Rank 0] step:9981/10000 train_time:874769ms step_avg:87.64ms +[2025-08-22 21:54:33] [Rank 0] step:10000/10000 train_time:876550ms step_avg:87.65ms +[2025-08-22 21:54:33] [Rank 0] step:10000/10000 train_time:876550ms step_avg:87.65ms +[2025-08-22 21:54:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:54:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:54:47] [Rank 0] PRINT: step:10000/10000 val_loss:3.7911 svd_entropy: attn_qk:H=0.7083,top10E=0.31,eRank=124.1,q75/q25=94.42 attn_vo:H=0.5920,top10E=0.46,eRank=69.0,q75/q25=114.44 mlp_w1:H=0.7569,top10E=0.27,eRank=190.1,q75/q25=15.10 mlp_w2:H=0.8721,top10E=0.15,eRank=342.3,q75/q25=11.67 vo_prod:H=0.5002,top10E=0.63,eRank=37.7,q75/q25=12220.91 train_time:876980ms step_avg:87.70ms +[2025-08-22 21:54:47] [Rank 0] PRINT: step:10000/10000 val_loss:3.7911 svd_entropy: attn_qk:H=0.7083,top10E=0.31,eRank=124.1,q75/q25=94.42 attn_vo:H=0.5920,top10E=0.46,eRank=69.0,q75/q25=114.44 mlp_w1:H=0.7569,top10E=0.27,eRank=190.1,q75/q25=15.10 mlp_w2:H=0.8721,top10E=0.15,eRank=342.3,q75/q25=11.67 vo_prod:H=0.5002,top10E=0.63,eRank=37.7,q75/q25=12220.91 train_time:876980ms step_avg:87.70ms +[2025-08-22 21:54:47] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 21:54:47 2025 --- +[2025-08-22 21:54:47] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 21:54:47 2025 --- +[2025-08-22 21:54:47] [Rank 0] PRINT: Peak memory allocated: 11559 MiB reserved: 11616 MiB +[2025-08-22 21:54:47] [Rank 0] PRINT: Peak memory allocated: 11559 MiB reserved: 11616 MiB diff --git a/logs_svd_gated/mode_6_param_gated_seed_41/config.json b/logs_svd_gated/mode_6_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..cbe01996ff0b4d26c992e256f0807ed2775bea75 --- /dev/null +++ b/logs_svd_gated/mode_6_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 6, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "d6a04c7d-a3ea-461c-a052-ed4aa9b2c259", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_6_param_gated_seed_41/training_log_d6a04c7d-a3ea-461c-a052-ed4aa9b2c259.txt b/logs_svd_gated/mode_6_param_gated_seed_41/training_log_d6a04c7d-a3ea-461c-a052-ed4aa9b2c259.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f810feed8c5877861e11355faeeb509d86c3424 --- /dev/null +++ b/logs_svd_gated/mode_6_param_gated_seed_41/training_log_d6a04c7d-a3ea-461c-a052-ed4aa9b2c259.txt @@ -0,0 +1,2926 @@ +[2025-08-22 11:27:36] [Rank 0] PRINT: --- Script Start: Fri Aug 22 11:27:36 2025 --- +[2025-08-22 11:27:36] [Rank 0] PRINT: --- Script Start: Fri Aug 22 11:27:36 2025 --- +[2025-08-22 11:27:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=6, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 11:27:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=6, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 11:27:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 11:27:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 11:27:36] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 11:27:36] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 11:27:36] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_6_param_gated_seed_41 +[2025-08-22 11:27:36] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_6_param_gated_seed_41 +[2025-08-22 11:27:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 11:27:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 11:27:36] [Rank 0] PRINT: Constructing model... +[2025-08-22 11:27:36] [Rank 0] PRINT: Constructing model... +[2025-08-22 11:27:38] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 11:27:38] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 11:27:38] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 11:27:38] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 11:27:38] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 11:27:38] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 11:27:38] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-08-22 11:27:38] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-08-22 11:27:38] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 11:27:38] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 11:27:38] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 11:27:38] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 11:27:38] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-08-22 11:27:38] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-08-22 11:27:38] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 11:27:38] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 11:27:38] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 11:27:38] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 11:27:38] [Rank 0] PRINT: Starting warmup... +[2025-08-22 11:27:38] [Rank 0] PRINT: Starting warmup... +[2025-08-22 11:28:23] [Rank 0] PRINT: Warmup complete. +[2025-08-22 11:28:23] [Rank 0] PRINT: Warmup complete. +[2025-08-22 11:28:24] [Rank 0] PRINT: Starting training... +[2025-08-22 11:28:24] [Rank 0] PRINT: Starting training... +[2025-08-22 11:28:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:28:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:28:42] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 11:28:42] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 11:28:44] [Rank 0] step:21/10000 train_time:1556ms step_avg:74.08ms +[2025-08-22 11:28:44] [Rank 0] step:21/10000 train_time:1556ms step_avg:74.08ms +[2025-08-22 11:28:46] [Rank 0] step:41/10000 train_time:3232ms step_avg:78.84ms +[2025-08-22 11:28:46] [Rank 0] step:41/10000 train_time:3232ms step_avg:78.84ms +[2025-08-22 11:28:47] [Rank 0] step:61/10000 train_time:4909ms step_avg:80.48ms +[2025-08-22 11:28:47] [Rank 0] step:61/10000 train_time:4909ms step_avg:80.48ms +[2025-08-22 11:28:49] [Rank 0] step:81/10000 train_time:6588ms step_avg:81.33ms +[2025-08-22 11:28:49] [Rank 0] step:81/10000 train_time:6588ms step_avg:81.33ms +[2025-08-22 11:28:51] [Rank 0] step:101/10000 train_time:8267ms step_avg:81.85ms +[2025-08-22 11:28:51] [Rank 0] step:101/10000 train_time:8267ms step_avg:81.85ms +[2025-08-22 11:28:53] [Rank 0] step:121/10000 train_time:9946ms step_avg:82.20ms +[2025-08-22 11:28:53] [Rank 0] step:121/10000 train_time:9946ms step_avg:82.20ms +[2025-08-22 11:28:54] [Rank 0] step:141/10000 train_time:11626ms step_avg:82.46ms +[2025-08-22 11:28:54] [Rank 0] step:141/10000 train_time:11626ms step_avg:82.46ms +[2025-08-22 11:28:56] [Rank 0] step:161/10000 train_time:13308ms step_avg:82.66ms +[2025-08-22 11:28:56] [Rank 0] step:161/10000 train_time:13308ms step_avg:82.66ms +[2025-08-22 11:28:58] [Rank 0] step:181/10000 train_time:14989ms step_avg:82.81ms +[2025-08-22 11:28:58] [Rank 0] step:181/10000 train_time:14989ms step_avg:82.81ms +[2025-08-22 11:28:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:28:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:29:13] [Rank 0] PRINT: step:200/10000 val_loss:6.5852 svd_entropy: attn_qk:H=0.3506,top10E=0.84,eRank=15.0,q75/q25=21.77 attn_vo:H=0.1784,top10E=0.98,eRank=3.6,q75/q25=80.19 mlp_w1:H=0.4699,top10E=0.73,eRank=24.0,q75/q25=6.82 mlp_w2:H=0.7291,top10E=0.26,eRank=130.7,q75/q25=21.03 vo_prod:H=0.0685,top10E=1.00,eRank=1.7,q75/q25=572.41 train_time:16837ms step_avg:84.19ms +[2025-08-22 11:29:13] [Rank 0] PRINT: step:200/10000 val_loss:6.5852 svd_entropy: attn_qk:H=0.3506,top10E=0.84,eRank=15.0,q75/q25=21.77 attn_vo:H=0.1784,top10E=0.98,eRank=3.6,q75/q25=80.19 mlp_w1:H=0.4699,top10E=0.73,eRank=24.0,q75/q25=6.82 mlp_w2:H=0.7291,top10E=0.26,eRank=130.7,q75/q25=21.03 vo_prod:H=0.0685,top10E=1.00,eRank=1.7,q75/q25=572.41 train_time:16837ms step_avg:84.19ms +[2025-08-22 11:29:13] [Rank 0] step:201/10000 train_time:16849ms step_avg:83.82ms +[2025-08-22 11:29:13] [Rank 0] step:201/10000 train_time:16849ms step_avg:83.82ms +[2025-08-22 11:29:15] [Rank 0] step:221/10000 train_time:18517ms step_avg:83.79ms +[2025-08-22 11:29:15] [Rank 0] step:221/10000 train_time:18517ms step_avg:83.79ms +[2025-08-22 11:29:16] [Rank 0] step:241/10000 train_time:20046ms step_avg:83.18ms +[2025-08-22 11:29:16] [Rank 0] step:241/10000 train_time:20046ms step_avg:83.18ms +[2025-08-22 11:29:18] [Rank 0] step:261/10000 train_time:21725ms step_avg:83.24ms +[2025-08-22 11:29:18] [Rank 0] step:261/10000 train_time:21725ms step_avg:83.24ms +[2025-08-22 11:29:20] [Rank 0] step:281/10000 train_time:23403ms step_avg:83.28ms +[2025-08-22 11:29:20] [Rank 0] step:281/10000 train_time:23403ms step_avg:83.28ms +[2025-08-22 11:29:21] [Rank 0] step:301/10000 train_time:25081ms step_avg:83.32ms +[2025-08-22 11:29:21] [Rank 0] step:301/10000 train_time:25081ms step_avg:83.32ms +[2025-08-22 11:29:23] [Rank 0] step:321/10000 train_time:26758ms step_avg:83.36ms +[2025-08-22 11:29:23] [Rank 0] step:321/10000 train_time:26758ms step_avg:83.36ms +[2025-08-22 11:29:25] [Rank 0] step:341/10000 train_time:28437ms step_avg:83.39ms +[2025-08-22 11:29:25] [Rank 0] step:341/10000 train_time:28437ms step_avg:83.39ms +[2025-08-22 11:29:26] [Rank 0] step:361/10000 train_time:30117ms step_avg:83.43ms +[2025-08-22 11:29:26] [Rank 0] step:361/10000 train_time:30117ms step_avg:83.43ms +[2025-08-22 11:29:28] [Rank 0] step:381/10000 train_time:31795ms step_avg:83.45ms +[2025-08-22 11:29:28] [Rank 0] step:381/10000 train_time:31795ms step_avg:83.45ms +[2025-08-22 11:29:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:29:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:29:43] [Rank 0] PRINT: step:400/10000 val_loss:5.7468 svd_entropy: attn_qk:H=0.4963,top10E=0.66,eRank=35.3,q75/q25=45.01 attn_vo:H=0.2959,top10E=0.91,eRank=9.6,q75/q25=46.88 mlp_w1:H=0.5710,top10E=0.59,eRank=51.1,q75/q25=7.27 mlp_w2:H=0.8656,top10E=0.11,eRank=327.9,q75/q25=16.32 vo_prod:H=0.1657,top10E=0.99,eRank=3.9,q75/q25=394.84 train_time:33643ms step_avg:84.11ms +[2025-08-22 11:29:43] [Rank 0] PRINT: step:400/10000 val_loss:5.7468 svd_entropy: attn_qk:H=0.4963,top10E=0.66,eRank=35.3,q75/q25=45.01 attn_vo:H=0.2959,top10E=0.91,eRank=9.6,q75/q25=46.88 mlp_w1:H=0.5710,top10E=0.59,eRank=51.1,q75/q25=7.27 mlp_w2:H=0.8656,top10E=0.11,eRank=327.9,q75/q25=16.32 vo_prod:H=0.1657,top10E=0.99,eRank=3.9,q75/q25=394.84 train_time:33643ms step_avg:84.11ms +[2025-08-22 11:29:43] [Rank 0] step:401/10000 train_time:33655ms step_avg:83.93ms +[2025-08-22 11:29:43] [Rank 0] step:401/10000 train_time:33655ms step_avg:83.93ms +[2025-08-22 11:29:45] [Rank 0] step:421/10000 train_time:35187ms step_avg:83.58ms +[2025-08-22 11:29:45] [Rank 0] step:421/10000 train_time:35187ms step_avg:83.58ms +[2025-08-22 11:29:46] [Rank 0] step:441/10000 train_time:36864ms step_avg:83.59ms +[2025-08-22 11:29:46] [Rank 0] step:441/10000 train_time:36864ms step_avg:83.59ms +[2025-08-22 11:29:48] [Rank 0] step:461/10000 train_time:38540ms step_avg:83.60ms +[2025-08-22 11:29:48] [Rank 0] step:461/10000 train_time:38540ms step_avg:83.60ms +[2025-08-22 11:29:50] [Rank 0] step:481/10000 train_time:40220ms step_avg:83.62ms +[2025-08-22 11:29:50] [Rank 0] step:481/10000 train_time:40220ms step_avg:83.62ms +[2025-08-22 11:29:51] [Rank 0] step:501/10000 train_time:41899ms step_avg:83.63ms +[2025-08-22 11:29:51] [Rank 0] step:501/10000 train_time:41899ms step_avg:83.63ms +[2025-08-22 11:29:53] [Rank 0] step:521/10000 train_time:43578ms step_avg:83.64ms +[2025-08-22 11:29:53] [Rank 0] step:521/10000 train_time:43578ms step_avg:83.64ms +[2025-08-22 11:29:55] [Rank 0] step:541/10000 train_time:45257ms step_avg:83.65ms +[2025-08-22 11:29:55] [Rank 0] step:541/10000 train_time:45257ms step_avg:83.65ms +[2025-08-22 11:29:56] [Rank 0] step:561/10000 train_time:46937ms step_avg:83.67ms +[2025-08-22 11:29:56] [Rank 0] step:561/10000 train_time:46937ms step_avg:83.67ms +[2025-08-22 11:29:58] [Rank 0] step:581/10000 train_time:48619ms step_avg:83.68ms +[2025-08-22 11:29:58] [Rank 0] step:581/10000 train_time:48619ms step_avg:83.68ms +[2025-08-22 11:30:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:30:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:30:13] [Rank 0] PRINT: step:600/10000 val_loss:5.3859 svd_entropy: attn_qk:H=0.5615,top10E=0.57,eRank=47.9,q75/q25=65.06 attn_vo:H=0.3522,top10E=0.85,eRank=15.3,q75/q25=38.71 mlp_w1:H=0.6059,top10E=0.54,eRank=67.0,q75/q25=7.31 mlp_w2:H=0.8886,top10E=0.10,eRank=379.6,q75/q25=10.88 vo_prod:H=0.2159,top10E=0.95,eRank=6.4,q75/q25=392.02 train_time:50467ms step_avg:84.11ms +[2025-08-22 11:30:13] [Rank 0] PRINT: step:600/10000 val_loss:5.3859 svd_entropy: attn_qk:H=0.5615,top10E=0.57,eRank=47.9,q75/q25=65.06 attn_vo:H=0.3522,top10E=0.85,eRank=15.3,q75/q25=38.71 mlp_w1:H=0.6059,top10E=0.54,eRank=67.0,q75/q25=7.31 mlp_w2:H=0.8886,top10E=0.10,eRank=379.6,q75/q25=10.88 vo_prod:H=0.2159,top10E=0.95,eRank=6.4,q75/q25=392.02 train_time:50467ms step_avg:84.11ms +[2025-08-22 11:30:13] [Rank 0] step:601/10000 train_time:50480ms step_avg:83.99ms +[2025-08-22 11:30:13] [Rank 0] step:601/10000 train_time:50480ms step_avg:83.99ms +[2025-08-22 11:30:15] [Rank 0] step:621/10000 train_time:52000ms step_avg:83.74ms +[2025-08-22 11:30:15] [Rank 0] step:621/10000 train_time:52000ms step_avg:83.74ms +[2025-08-22 11:30:17] [Rank 0] step:641/10000 train_time:53679ms step_avg:83.74ms +[2025-08-22 11:30:17] [Rank 0] step:641/10000 train_time:53679ms step_avg:83.74ms +[2025-08-22 11:30:18] [Rank 0] step:661/10000 train_time:55359ms step_avg:83.75ms +[2025-08-22 11:30:18] [Rank 0] step:661/10000 train_time:55359ms step_avg:83.75ms +[2025-08-22 11:30:20] [Rank 0] step:681/10000 train_time:57040ms step_avg:83.76ms +[2025-08-22 11:30:20] [Rank 0] step:681/10000 train_time:57040ms step_avg:83.76ms +[2025-08-22 11:30:22] [Rank 0] step:701/10000 train_time:58720ms step_avg:83.77ms +[2025-08-22 11:30:22] [Rank 0] step:701/10000 train_time:58720ms step_avg:83.77ms +[2025-08-22 11:30:23] [Rank 0] step:721/10000 train_time:60402ms step_avg:83.77ms +[2025-08-22 11:30:23] [Rank 0] step:721/10000 train_time:60402ms step_avg:83.77ms +[2025-08-22 11:30:25] [Rank 0] step:741/10000 train_time:62083ms step_avg:83.78ms +[2025-08-22 11:30:25] [Rank 0] step:741/10000 train_time:62083ms step_avg:83.78ms +[2025-08-22 11:30:27] [Rank 0] step:761/10000 train_time:63778ms step_avg:83.81ms +[2025-08-22 11:30:27] [Rank 0] step:761/10000 train_time:63778ms step_avg:83.81ms +[2025-08-22 11:30:28] [Rank 0] step:781/10000 train_time:65474ms step_avg:83.83ms +[2025-08-22 11:30:28] [Rank 0] step:781/10000 train_time:65474ms step_avg:83.83ms +[2025-08-22 11:30:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:30:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:30:44] [Rank 0] PRINT: step:800/10000 val_loss:5.1291 svd_entropy: attn_qk:H=0.5744,top10E=0.54,eRank=51.9,q75/q25=72.12 attn_vo:H=0.3828,top10E=0.82,eRank=19.7,q75/q25=40.66 mlp_w1:H=0.6265,top10E=0.50,eRank=77.8,q75/q25=7.42 mlp_w2:H=0.8952,top10E=0.10,eRank=396.3,q75/q25=9.39 vo_prod:H=0.2500,top10E=0.93,eRank=8.3,q75/q25=465.96 train_time:67341ms step_avg:84.18ms +[2025-08-22 11:30:44] [Rank 0] PRINT: step:800/10000 val_loss:5.1291 svd_entropy: attn_qk:H=0.5744,top10E=0.54,eRank=51.9,q75/q25=72.12 attn_vo:H=0.3828,top10E=0.82,eRank=19.7,q75/q25=40.66 mlp_w1:H=0.6265,top10E=0.50,eRank=77.8,q75/q25=7.42 mlp_w2:H=0.8952,top10E=0.10,eRank=396.3,q75/q25=9.39 vo_prod:H=0.2500,top10E=0.93,eRank=8.3,q75/q25=465.96 train_time:67341ms step_avg:84.18ms +[2025-08-22 11:30:44] [Rank 0] step:801/10000 train_time:67354ms step_avg:84.09ms +[2025-08-22 11:30:44] [Rank 0] step:801/10000 train_time:67354ms step_avg:84.09ms +[2025-08-22 11:30:46] [Rank 0] step:821/10000 train_time:68879ms step_avg:83.90ms +[2025-08-22 11:30:46] [Rank 0] step:821/10000 train_time:68879ms step_avg:83.90ms +[2025-08-22 11:30:47] [Rank 0] step:841/10000 train_time:70569ms step_avg:83.91ms +[2025-08-22 11:30:47] [Rank 0] step:841/10000 train_time:70569ms step_avg:83.91ms +[2025-08-22 11:30:49] [Rank 0] step:861/10000 train_time:72263ms step_avg:83.93ms +[2025-08-22 11:30:49] [Rank 0] step:861/10000 train_time:72263ms step_avg:83.93ms +[2025-08-22 11:30:51] [Rank 0] step:881/10000 train_time:73955ms step_avg:83.94ms +[2025-08-22 11:30:51] [Rank 0] step:881/10000 train_time:73955ms step_avg:83.94ms +[2025-08-22 11:30:52] [Rank 0] step:901/10000 train_time:75648ms step_avg:83.96ms +[2025-08-22 11:30:52] [Rank 0] step:901/10000 train_time:75648ms step_avg:83.96ms +[2025-08-22 11:30:54] [Rank 0] step:921/10000 train_time:77341ms step_avg:83.98ms +[2025-08-22 11:30:54] [Rank 0] step:921/10000 train_time:77341ms step_avg:83.98ms +[2025-08-22 11:30:56] [Rank 0] step:941/10000 train_time:79035ms step_avg:83.99ms +[2025-08-22 11:30:56] [Rank 0] step:941/10000 train_time:79035ms step_avg:83.99ms +[2025-08-22 11:30:57] [Rank 0] step:961/10000 train_time:80731ms step_avg:84.01ms +[2025-08-22 11:30:57] [Rank 0] step:961/10000 train_time:80731ms step_avg:84.01ms +[2025-08-22 11:30:59] [Rank 0] step:981/10000 train_time:82425ms step_avg:84.02ms +[2025-08-22 11:30:59] [Rank 0] step:981/10000 train_time:82425ms step_avg:84.02ms +[2025-08-22 11:31:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:31:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:31:15] [Rank 0] PRINT: step:1000/10000 val_loss:4.9477 svd_entropy: attn_qk:H=0.5828,top10E=0.53,eRank=55.6,q75/q25=72.32 attn_vo:H=0.4074,top10E=0.79,eRank=23.9,q75/q25=44.80 mlp_w1:H=0.6409,top10E=0.48,eRank=86.0,q75/q25=7.60 mlp_w2:H=0.8995,top10E=0.10,eRank=407.5,q75/q25=8.79 vo_prod:H=0.2788,top10E=0.90,eRank=10.3,q75/q25=601.38 train_time:84289ms step_avg:84.29ms +[2025-08-22 11:31:15] [Rank 0] PRINT: step:1000/10000 val_loss:4.9477 svd_entropy: attn_qk:H=0.5828,top10E=0.53,eRank=55.6,q75/q25=72.32 attn_vo:H=0.4074,top10E=0.79,eRank=23.9,q75/q25=44.80 mlp_w1:H=0.6409,top10E=0.48,eRank=86.0,q75/q25=7.60 mlp_w2:H=0.8995,top10E=0.10,eRank=407.5,q75/q25=8.79 vo_prod:H=0.2788,top10E=0.90,eRank=10.3,q75/q25=601.38 train_time:84289ms step_avg:84.29ms +[2025-08-22 11:31:15] [Rank 0] step:1001/10000 train_time:84301ms step_avg:84.22ms +[2025-08-22 11:31:15] [Rank 0] step:1001/10000 train_time:84301ms step_avg:84.22ms +[2025-08-22 11:31:16] [Rank 0] step:1021/10000 train_time:85841ms step_avg:84.08ms +[2025-08-22 11:31:16] [Rank 0] step:1021/10000 train_time:85841ms step_avg:84.08ms +[2025-08-22 11:31:18] [Rank 0] step:1041/10000 train_time:87531ms step_avg:84.08ms +[2025-08-22 11:31:18] [Rank 0] step:1041/10000 train_time:87531ms step_avg:84.08ms +[2025-08-22 11:31:20] [Rank 0] step:1061/10000 train_time:89222ms step_avg:84.09ms +[2025-08-22 11:31:20] [Rank 0] step:1061/10000 train_time:89222ms step_avg:84.09ms +[2025-08-22 11:31:21] [Rank 0] step:1081/10000 train_time:90915ms step_avg:84.10ms +[2025-08-22 11:31:21] [Rank 0] step:1081/10000 train_time:90915ms step_avg:84.10ms +[2025-08-22 11:31:23] [Rank 0] step:1101/10000 train_time:92611ms step_avg:84.12ms +[2025-08-22 11:31:23] [Rank 0] step:1101/10000 train_time:92611ms step_avg:84.12ms +[2025-08-22 11:31:25] [Rank 0] step:1121/10000 train_time:94305ms step_avg:84.13ms +[2025-08-22 11:31:25] [Rank 0] step:1121/10000 train_time:94305ms step_avg:84.13ms +[2025-08-22 11:31:27] [Rank 0] step:1141/10000 train_time:95998ms step_avg:84.14ms +[2025-08-22 11:31:27] [Rank 0] step:1141/10000 train_time:95998ms step_avg:84.14ms +[2025-08-22 11:31:28] [Rank 0] step:1161/10000 train_time:97693ms step_avg:84.15ms +[2025-08-22 11:31:28] [Rank 0] step:1161/10000 train_time:97693ms step_avg:84.15ms +[2025-08-22 11:31:30] [Rank 0] step:1181/10000 train_time:99389ms step_avg:84.16ms +[2025-08-22 11:31:30] [Rank 0] step:1181/10000 train_time:99389ms step_avg:84.16ms +[2025-08-22 11:31:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:31:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:31:45] [Rank 0] PRINT: step:1200/10000 val_loss:4.7966 svd_entropy: attn_qk:H=0.5884,top10E=0.52,eRank=58.4,q75/q25=74.41 attn_vo:H=0.4259,top10E=0.77,eRank=27.8,q75/q25=46.83 mlp_w1:H=0.6501,top10E=0.46,eRank=92.3,q75/q25=7.93 mlp_w2:H=0.9026,top10E=0.10,eRank=415.9,q75/q25=8.51 vo_prod:H=0.3020,top10E=0.88,eRank=12.3,q75/q25=742.37 train_time:101255ms step_avg:84.38ms +[2025-08-22 11:31:45] [Rank 0] PRINT: step:1200/10000 val_loss:4.7966 svd_entropy: attn_qk:H=0.5884,top10E=0.52,eRank=58.4,q75/q25=74.41 attn_vo:H=0.4259,top10E=0.77,eRank=27.8,q75/q25=46.83 mlp_w1:H=0.6501,top10E=0.46,eRank=92.3,q75/q25=7.93 mlp_w2:H=0.9026,top10E=0.10,eRank=415.9,q75/q25=8.51 vo_prod:H=0.3020,top10E=0.88,eRank=12.3,q75/q25=742.37 train_time:101255ms step_avg:84.38ms +[2025-08-22 11:31:45] [Rank 0] step:1201/10000 train_time:101268ms step_avg:84.32ms +[2025-08-22 11:31:45] [Rank 0] step:1201/10000 train_time:101268ms step_avg:84.32ms +[2025-08-22 11:31:47] [Rank 0] step:1221/10000 train_time:102800ms step_avg:84.19ms +[2025-08-22 11:31:47] [Rank 0] step:1221/10000 train_time:102800ms step_avg:84.19ms +[2025-08-22 11:31:49] [Rank 0] step:1241/10000 train_time:104494ms step_avg:84.20ms +[2025-08-22 11:31:49] [Rank 0] step:1241/10000 train_time:104494ms step_avg:84.20ms +[2025-08-22 11:31:50] [Rank 0] step:1261/10000 train_time:106186ms step_avg:84.21ms +[2025-08-22 11:31:50] [Rank 0] step:1261/10000 train_time:106186ms step_avg:84.21ms +[2025-08-22 11:31:52] [Rank 0] step:1281/10000 train_time:107882ms step_avg:84.22ms +[2025-08-22 11:31:52] [Rank 0] step:1281/10000 train_time:107882ms step_avg:84.22ms +[2025-08-22 11:31:54] [Rank 0] step:1301/10000 train_time:109578ms step_avg:84.23ms +[2025-08-22 11:31:54] [Rank 0] step:1301/10000 train_time:109578ms step_avg:84.23ms +[2025-08-22 11:31:55] [Rank 0] step:1321/10000 train_time:111277ms step_avg:84.24ms +[2025-08-22 11:31:55] [Rank 0] step:1321/10000 train_time:111277ms step_avg:84.24ms +[2025-08-22 11:31:57] [Rank 0] step:1341/10000 train_time:112975ms step_avg:84.25ms +[2025-08-22 11:31:57] [Rank 0] step:1341/10000 train_time:112975ms step_avg:84.25ms +[2025-08-22 11:31:59] [Rank 0] step:1361/10000 train_time:114674ms step_avg:84.26ms +[2025-08-22 11:31:59] [Rank 0] step:1361/10000 train_time:114674ms step_avg:84.26ms +[2025-08-22 11:32:01] [Rank 0] step:1381/10000 train_time:116372ms step_avg:84.27ms +[2025-08-22 11:32:01] [Rank 0] step:1381/10000 train_time:116372ms step_avg:84.27ms +[2025-08-22 11:32:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:32:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:32:16] [Rank 0] PRINT: step:1400/10000 val_loss:4.7070 svd_entropy: attn_qk:H=0.5942,top10E=0.50,eRank=61.2,q75/q25=76.63 attn_vo:H=0.4422,top10E=0.75,eRank=31.4,q75/q25=49.91 mlp_w1:H=0.6581,top10E=0.44,eRank=98.0,q75/q25=8.26 mlp_w2:H=0.9048,top10E=0.09,eRank=422.3,q75/q25=8.47 vo_prod:H=0.3230,top10E=0.87,eRank=14.3,q75/q25=935.17 train_time:118243ms step_avg:84.46ms +[2025-08-22 11:32:16] [Rank 0] PRINT: step:1400/10000 val_loss:4.7070 svd_entropy: attn_qk:H=0.5942,top10E=0.50,eRank=61.2,q75/q25=76.63 attn_vo:H=0.4422,top10E=0.75,eRank=31.4,q75/q25=49.91 mlp_w1:H=0.6581,top10E=0.44,eRank=98.0,q75/q25=8.26 mlp_w2:H=0.9048,top10E=0.09,eRank=422.3,q75/q25=8.47 vo_prod:H=0.3230,top10E=0.87,eRank=14.3,q75/q25=935.17 train_time:118243ms step_avg:84.46ms +[2025-08-22 11:32:16] [Rank 0] step:1401/10000 train_time:118255ms step_avg:84.41ms +[2025-08-22 11:32:16] [Rank 0] step:1401/10000 train_time:118255ms step_avg:84.41ms +[2025-08-22 11:32:18] [Rank 0] step:1421/10000 train_time:119797ms step_avg:84.30ms +[2025-08-22 11:32:18] [Rank 0] step:1421/10000 train_time:119797ms step_avg:84.30ms +[2025-08-22 11:32:19] [Rank 0] step:1441/10000 train_time:121490ms step_avg:84.31ms +[2025-08-22 11:32:19] [Rank 0] step:1441/10000 train_time:121490ms step_avg:84.31ms +[2025-08-22 11:32:21] [Rank 0] step:1461/10000 train_time:123182ms step_avg:84.31ms +[2025-08-22 11:32:21] [Rank 0] step:1461/10000 train_time:123182ms step_avg:84.31ms +[2025-08-22 11:32:23] [Rank 0] step:1481/10000 train_time:124876ms step_avg:84.32ms +[2025-08-22 11:32:23] [Rank 0] step:1481/10000 train_time:124876ms step_avg:84.32ms +[2025-08-22 11:32:24] [Rank 0] step:1501/10000 train_time:126613ms step_avg:84.35ms +[2025-08-22 11:32:24] [Rank 0] step:1501/10000 train_time:126613ms step_avg:84.35ms +[2025-08-22 11:32:26] [Rank 0] step:1521/10000 train_time:128285ms step_avg:84.34ms +[2025-08-22 11:32:26] [Rank 0] step:1521/10000 train_time:128285ms step_avg:84.34ms +[2025-08-22 11:32:28] [Rank 0] step:1541/10000 train_time:130048ms step_avg:84.39ms +[2025-08-22 11:32:28] [Rank 0] step:1541/10000 train_time:130048ms step_avg:84.39ms +[2025-08-22 11:32:30] [Rank 0] step:1561/10000 train_time:131755ms step_avg:84.40ms +[2025-08-22 11:32:30] [Rank 0] step:1561/10000 train_time:131755ms step_avg:84.40ms +[2025-08-22 11:32:31] [Rank 0] step:1581/10000 train_time:133460ms step_avg:84.42ms +[2025-08-22 11:32:31] [Rank 0] step:1581/10000 train_time:133460ms step_avg:84.42ms +[2025-08-22 11:32:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:32:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:32:46] [Rank 0] PRINT: step:1600/10000 val_loss:4.5914 svd_entropy: attn_qk:H=0.5990,top10E=0.50,eRank=63.6,q75/q25=78.45 attn_vo:H=0.4562,top10E=0.73,eRank=34.9,q75/q25=52.92 mlp_w1:H=0.6656,top10E=0.43,eRank=103.2,q75/q25=8.61 mlp_w2:H=0.9068,top10E=0.09,eRank=428.1,q75/q25=8.40 vo_prod:H=0.3397,top10E=0.85,eRank=16.2,q75/q25=1139.92 train_time:135338ms step_avg:84.59ms +[2025-08-22 11:32:46] [Rank 0] PRINT: step:1600/10000 val_loss:4.5914 svd_entropy: attn_qk:H=0.5990,top10E=0.50,eRank=63.6,q75/q25=78.45 attn_vo:H=0.4562,top10E=0.73,eRank=34.9,q75/q25=52.92 mlp_w1:H=0.6656,top10E=0.43,eRank=103.2,q75/q25=8.61 mlp_w2:H=0.9068,top10E=0.09,eRank=428.1,q75/q25=8.40 vo_prod:H=0.3397,top10E=0.85,eRank=16.2,q75/q25=1139.92 train_time:135338ms step_avg:84.59ms +[2025-08-22 11:32:47] [Rank 0] step:1601/10000 train_time:135351ms step_avg:84.54ms +[2025-08-22 11:32:47] [Rank 0] step:1601/10000 train_time:135351ms step_avg:84.54ms +[2025-08-22 11:32:48] [Rank 0] step:1621/10000 train_time:136896ms step_avg:84.45ms +[2025-08-22 11:32:48] [Rank 0] step:1621/10000 train_time:136896ms step_avg:84.45ms +[2025-08-22 11:32:50] [Rank 0] step:1641/10000 train_time:138598ms step_avg:84.46ms +[2025-08-22 11:32:50] [Rank 0] step:1641/10000 train_time:138598ms step_avg:84.46ms +[2025-08-22 11:32:52] [Rank 0] step:1661/10000 train_time:140302ms step_avg:84.47ms +[2025-08-22 11:32:52] [Rank 0] step:1661/10000 train_time:140302ms step_avg:84.47ms +[2025-08-22 11:32:53] [Rank 0] step:1681/10000 train_time:142007ms step_avg:84.48ms +[2025-08-22 11:32:53] [Rank 0] step:1681/10000 train_time:142007ms step_avg:84.48ms +[2025-08-22 11:32:55] [Rank 0] step:1701/10000 train_time:143715ms step_avg:84.49ms +[2025-08-22 11:32:55] [Rank 0] step:1701/10000 train_time:143715ms step_avg:84.49ms +[2025-08-22 11:32:57] [Rank 0] step:1721/10000 train_time:145421ms step_avg:84.50ms +[2025-08-22 11:32:57] [Rank 0] step:1721/10000 train_time:145421ms step_avg:84.50ms +[2025-08-22 11:32:58] [Rank 0] step:1741/10000 train_time:147125ms step_avg:84.51ms +[2025-08-22 11:32:58] [Rank 0] step:1741/10000 train_time:147125ms step_avg:84.51ms +[2025-08-22 11:33:00] [Rank 0] step:1761/10000 train_time:148829ms step_avg:84.51ms +[2025-08-22 11:33:00] [Rank 0] step:1761/10000 train_time:148829ms step_avg:84.51ms +[2025-08-22 11:33:02] [Rank 0] step:1781/10000 train_time:150534ms step_avg:84.52ms +[2025-08-22 11:33:02] [Rank 0] step:1781/10000 train_time:150534ms step_avg:84.52ms +[2025-08-22 11:33:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:33:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:33:17] [Rank 0] PRINT: step:1800/10000 val_loss:4.5096 svd_entropy: attn_qk:H=0.6037,top10E=0.49,eRank=65.9,q75/q25=78.62 attn_vo:H=0.4678,top10E=0.71,eRank=38.1,q75/q25=56.67 mlp_w1:H=0.6727,top10E=0.42,eRank=108.3,q75/q25=8.94 mlp_w2:H=0.9083,top10E=0.09,eRank=432.5,q75/q25=8.41 vo_prod:H=0.3547,top10E=0.84,eRank=18.0,q75/q25=1438.04 train_time:152409ms step_avg:84.67ms +[2025-08-22 11:33:17] [Rank 0] PRINT: step:1800/10000 val_loss:4.5096 svd_entropy: attn_qk:H=0.6037,top10E=0.49,eRank=65.9,q75/q25=78.62 attn_vo:H=0.4678,top10E=0.71,eRank=38.1,q75/q25=56.67 mlp_w1:H=0.6727,top10E=0.42,eRank=108.3,q75/q25=8.94 mlp_w2:H=0.9083,top10E=0.09,eRank=432.5,q75/q25=8.41 vo_prod:H=0.3547,top10E=0.84,eRank=18.0,q75/q25=1438.04 train_time:152409ms step_avg:84.67ms +[2025-08-22 11:33:17] [Rank 0] step:1801/10000 train_time:152421ms step_avg:84.63ms +[2025-08-22 11:33:17] [Rank 0] step:1801/10000 train_time:152421ms step_avg:84.63ms +[2025-08-22 11:33:19] [Rank 0] step:1821/10000 train_time:153955ms step_avg:84.54ms +[2025-08-22 11:33:19] [Rank 0] step:1821/10000 train_time:153955ms step_avg:84.54ms +[2025-08-22 11:33:21] [Rank 0] step:1841/10000 train_time:155657ms step_avg:84.55ms +[2025-08-22 11:33:21] [Rank 0] step:1841/10000 train_time:155657ms step_avg:84.55ms +[2025-08-22 11:33:22] [Rank 0] step:1861/10000 train_time:157358ms step_avg:84.56ms +[2025-08-22 11:33:22] [Rank 0] step:1861/10000 train_time:157358ms step_avg:84.56ms +[2025-08-22 11:33:24] [Rank 0] step:1881/10000 train_time:159061ms step_avg:84.56ms +[2025-08-22 11:33:24] [Rank 0] step:1881/10000 train_time:159061ms step_avg:84.56ms +[2025-08-22 11:33:26] [Rank 0] step:1901/10000 train_time:160763ms step_avg:84.57ms +[2025-08-22 11:33:26] [Rank 0] step:1901/10000 train_time:160763ms step_avg:84.57ms +[2025-08-22 11:33:27] [Rank 0] step:1921/10000 train_time:162544ms step_avg:84.61ms +[2025-08-22 11:33:27] [Rank 0] step:1921/10000 train_time:162544ms step_avg:84.61ms +[2025-08-22 11:33:29] [Rank 0] step:1941/10000 train_time:164180ms step_avg:84.59ms +[2025-08-22 11:33:29] [Rank 0] step:1941/10000 train_time:164180ms step_avg:84.59ms +[2025-08-22 11:33:31] [Rank 0] step:1961/10000 train_time:165884ms step_avg:84.59ms +[2025-08-22 11:33:31] [Rank 0] step:1961/10000 train_time:165884ms step_avg:84.59ms +[2025-08-22 11:33:32] [Rank 0] step:1981/10000 train_time:167589ms step_avg:84.60ms +[2025-08-22 11:33:32] [Rank 0] step:1981/10000 train_time:167589ms step_avg:84.60ms +[2025-08-22 11:33:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:33:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:33:48] [Rank 0] PRINT: step:2000/10000 val_loss:4.4656 svd_entropy: attn_qk:H=0.6083,top10E=0.48,eRank=68.2,q75/q25=79.02 attn_vo:H=0.4776,top10E=0.70,eRank=41.0,q75/q25=60.81 mlp_w1:H=0.6792,top10E=0.41,eRank=113.0,q75/q25=9.25 mlp_w2:H=0.9095,top10E=0.09,eRank=436.6,q75/q25=8.34 vo_prod:H=0.3642,top10E=0.83,eRank=19.6,q75/q25=1746.72 train_time:169465ms step_avg:84.73ms +[2025-08-22 11:33:48] [Rank 0] PRINT: step:2000/10000 val_loss:4.4656 svd_entropy: attn_qk:H=0.6083,top10E=0.48,eRank=68.2,q75/q25=79.02 attn_vo:H=0.4776,top10E=0.70,eRank=41.0,q75/q25=60.81 mlp_w1:H=0.6792,top10E=0.41,eRank=113.0,q75/q25=9.25 mlp_w2:H=0.9095,top10E=0.09,eRank=436.6,q75/q25=8.34 vo_prod:H=0.3642,top10E=0.83,eRank=19.6,q75/q25=1746.72 train_time:169465ms step_avg:84.73ms +[2025-08-22 11:33:48] [Rank 0] step:2001/10000 train_time:169478ms step_avg:84.70ms +[2025-08-22 11:33:48] [Rank 0] step:2001/10000 train_time:169478ms step_avg:84.70ms +[2025-08-22 11:33:49] [Rank 0] step:2021/10000 train_time:171011ms step_avg:84.62ms +[2025-08-22 11:33:49] [Rank 0] step:2021/10000 train_time:171011ms step_avg:84.62ms +[2025-08-22 11:33:52] [Rank 0] step:2041/10000 train_time:173216ms step_avg:84.87ms +[2025-08-22 11:33:52] [Rank 0] step:2041/10000 train_time:173216ms step_avg:84.87ms +[2025-08-22 11:33:53] [Rank 0] step:2061/10000 train_time:174918ms step_avg:84.87ms +[2025-08-22 11:33:53] [Rank 0] step:2061/10000 train_time:174918ms step_avg:84.87ms +[2025-08-22 11:33:55] [Rank 0] step:2081/10000 train_time:176620ms step_avg:84.87ms +[2025-08-22 11:33:55] [Rank 0] step:2081/10000 train_time:176620ms step_avg:84.87ms +[2025-08-22 11:33:57] [Rank 0] step:2101/10000 train_time:178324ms step_avg:84.88ms +[2025-08-22 11:33:57] [Rank 0] step:2101/10000 train_time:178324ms step_avg:84.88ms +[2025-08-22 11:33:59] [Rank 0] step:2121/10000 train_time:180028ms step_avg:84.88ms +[2025-08-22 11:33:59] [Rank 0] step:2121/10000 train_time:180028ms step_avg:84.88ms +[2025-08-22 11:34:00] [Rank 0] step:2141/10000 train_time:181734ms step_avg:84.88ms +[2025-08-22 11:34:00] [Rank 0] step:2141/10000 train_time:181734ms step_avg:84.88ms +[2025-08-22 11:34:02] [Rank 0] step:2161/10000 train_time:183442ms step_avg:84.89ms +[2025-08-22 11:34:02] [Rank 0] step:2161/10000 train_time:183442ms step_avg:84.89ms +[2025-08-22 11:34:04] [Rank 0] step:2181/10000 train_time:185153ms step_avg:84.89ms +[2025-08-22 11:34:04] [Rank 0] step:2181/10000 train_time:185153ms step_avg:84.89ms +[2025-08-22 11:34:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:34:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:34:19] [Rank 0] PRINT: step:2200/10000 val_loss:4.4064 svd_entropy: attn_qk:H=0.6130,top10E=0.47,eRank=70.4,q75/q25=78.51 attn_vo:H=0.4867,top10E=0.68,eRank=43.7,q75/q25=64.13 mlp_w1:H=0.6851,top10E=0.40,eRank=117.3,q75/q25=9.53 mlp_w2:H=0.9105,top10E=0.09,eRank=439.7,q75/q25=8.31 vo_prod:H=0.3750,top10E=0.82,eRank=20.9,q75/q25=2119.78 train_time:187028ms step_avg:85.01ms +[2025-08-22 11:34:19] [Rank 0] PRINT: step:2200/10000 val_loss:4.4064 svd_entropy: attn_qk:H=0.6130,top10E=0.47,eRank=70.4,q75/q25=78.51 attn_vo:H=0.4867,top10E=0.68,eRank=43.7,q75/q25=64.13 mlp_w1:H=0.6851,top10E=0.40,eRank=117.3,q75/q25=9.53 mlp_w2:H=0.9105,top10E=0.09,eRank=439.7,q75/q25=8.31 vo_prod:H=0.3750,top10E=0.82,eRank=20.9,q75/q25=2119.78 train_time:187028ms step_avg:85.01ms +[2025-08-22 11:34:19] [Rank 0] step:2201/10000 train_time:187040ms step_avg:84.98ms +[2025-08-22 11:34:19] [Rank 0] step:2201/10000 train_time:187040ms step_avg:84.98ms +[2025-08-22 11:34:21] [Rank 0] step:2221/10000 train_time:188590ms step_avg:84.91ms +[2025-08-22 11:34:21] [Rank 0] step:2221/10000 train_time:188590ms step_avg:84.91ms +[2025-08-22 11:34:22] [Rank 0] step:2241/10000 train_time:190324ms step_avg:84.93ms +[2025-08-22 11:34:22] [Rank 0] step:2241/10000 train_time:190324ms step_avg:84.93ms +[2025-08-22 11:34:24] [Rank 0] step:2261/10000 train_time:192068ms step_avg:84.95ms +[2025-08-22 11:34:24] [Rank 0] step:2261/10000 train_time:192068ms step_avg:84.95ms +[2025-08-22 11:34:26] [Rank 0] step:2281/10000 train_time:193814ms step_avg:84.97ms +[2025-08-22 11:34:26] [Rank 0] step:2281/10000 train_time:193814ms step_avg:84.97ms +[2025-08-22 11:34:28] [Rank 0] step:2301/10000 train_time:195559ms step_avg:84.99ms +[2025-08-22 11:34:28] [Rank 0] step:2301/10000 train_time:195559ms step_avg:84.99ms +[2025-08-22 11:34:29] [Rank 0] step:2321/10000 train_time:197308ms step_avg:85.01ms +[2025-08-22 11:34:29] [Rank 0] step:2321/10000 train_time:197308ms step_avg:85.01ms +[2025-08-22 11:34:31] [Rank 0] step:2341/10000 train_time:199057ms step_avg:85.03ms +[2025-08-22 11:34:31] [Rank 0] step:2341/10000 train_time:199057ms step_avg:85.03ms +[2025-08-22 11:34:33] [Rank 0] step:2361/10000 train_time:200808ms step_avg:85.05ms +[2025-08-22 11:34:33] [Rank 0] step:2361/10000 train_time:200808ms step_avg:85.05ms +[2025-08-22 11:34:35] [Rank 0] step:2381/10000 train_time:202558ms step_avg:85.07ms +[2025-08-22 11:34:35] [Rank 0] step:2381/10000 train_time:202558ms step_avg:85.07ms +[2025-08-22 11:34:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:34:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:34:50] [Rank 0] PRINT: step:2400/10000 val_loss:4.3357 svd_entropy: attn_qk:H=0.6176,top10E=0.46,eRank=72.5,q75/q25=78.37 attn_vo:H=0.4944,top10E=0.67,eRank=46.2,q75/q25=67.37 mlp_w1:H=0.6911,top10E=0.39,eRank=121.6,q75/q25=9.83 mlp_w2:H=0.9113,top10E=0.09,eRank=442.6,q75/q25=8.27 vo_prod:H=0.3811,top10E=0.81,eRank=22.2,q75/q25=2496.20 train_time:204483ms step_avg:85.20ms +[2025-08-22 11:34:50] [Rank 0] PRINT: step:2400/10000 val_loss:4.3357 svd_entropy: attn_qk:H=0.6176,top10E=0.46,eRank=72.5,q75/q25=78.37 attn_vo:H=0.4944,top10E=0.67,eRank=46.2,q75/q25=67.37 mlp_w1:H=0.6911,top10E=0.39,eRank=121.6,q75/q25=9.83 mlp_w2:H=0.9113,top10E=0.09,eRank=442.6,q75/q25=8.27 vo_prod:H=0.3811,top10E=0.81,eRank=22.2,q75/q25=2496.20 train_time:204483ms step_avg:85.20ms +[2025-08-22 11:34:50] [Rank 0] step:2401/10000 train_time:204496ms step_avg:85.17ms +[2025-08-22 11:34:50] [Rank 0] step:2401/10000 train_time:204496ms step_avg:85.17ms +[2025-08-22 11:34:52] [Rank 0] step:2421/10000 train_time:206092ms step_avg:85.13ms +[2025-08-22 11:34:52] [Rank 0] step:2421/10000 train_time:206092ms step_avg:85.13ms +[2025-08-22 11:34:53] [Rank 0] step:2441/10000 train_time:207837ms step_avg:85.14ms +[2025-08-22 11:34:53] [Rank 0] step:2441/10000 train_time:207837ms step_avg:85.14ms +[2025-08-22 11:34:55] [Rank 0] step:2461/10000 train_time:209584ms step_avg:85.16ms +[2025-08-22 11:34:55] [Rank 0] step:2461/10000 train_time:209584ms step_avg:85.16ms +[2025-08-22 11:34:57] [Rank 0] step:2481/10000 train_time:211331ms step_avg:85.18ms +[2025-08-22 11:34:57] [Rank 0] step:2481/10000 train_time:211331ms step_avg:85.18ms +[2025-08-22 11:34:59] [Rank 0] step:2501/10000 train_time:213079ms step_avg:85.20ms +[2025-08-22 11:34:59] [Rank 0] step:2501/10000 train_time:213079ms step_avg:85.20ms +[2025-08-22 11:35:00] [Rank 0] step:2521/10000 train_time:214827ms step_avg:85.22ms +[2025-08-22 11:35:00] [Rank 0] step:2521/10000 train_time:214827ms step_avg:85.22ms +[2025-08-22 11:35:02] [Rank 0] step:2541/10000 train_time:216577ms step_avg:85.23ms +[2025-08-22 11:35:02] [Rank 0] step:2541/10000 train_time:216577ms step_avg:85.23ms +[2025-08-22 11:35:04] [Rank 0] step:2561/10000 train_time:218327ms step_avg:85.25ms +[2025-08-22 11:35:04] [Rank 0] step:2561/10000 train_time:218327ms step_avg:85.25ms +[2025-08-22 11:35:06] [Rank 0] step:2581/10000 train_time:220077ms step_avg:85.27ms +[2025-08-22 11:35:06] [Rank 0] step:2581/10000 train_time:220077ms step_avg:85.27ms +[2025-08-22 11:35:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:35:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:35:21] [Rank 0] PRINT: step:2600/10000 val_loss:4.2939 svd_entropy: attn_qk:H=0.6218,top10E=0.46,eRank=74.6,q75/q25=78.99 attn_vo:H=0.5023,top10E=0.66,eRank=48.6,q75/q25=71.07 mlp_w1:H=0.6967,top10E=0.38,eRank=125.6,q75/q25=10.13 mlp_w2:H=0.9118,top10E=0.09,eRank=444.4,q75/q25=8.27 vo_prod:H=0.3897,top10E=0.80,eRank=23.4,q75/q25=3036.40 train_time:222003ms step_avg:85.39ms +[2025-08-22 11:35:21] [Rank 0] PRINT: step:2600/10000 val_loss:4.2939 svd_entropy: attn_qk:H=0.6218,top10E=0.46,eRank=74.6,q75/q25=78.99 attn_vo:H=0.5023,top10E=0.66,eRank=48.6,q75/q25=71.07 mlp_w1:H=0.6967,top10E=0.38,eRank=125.6,q75/q25=10.13 mlp_w2:H=0.9118,top10E=0.09,eRank=444.4,q75/q25=8.27 vo_prod:H=0.3897,top10E=0.80,eRank=23.4,q75/q25=3036.40 train_time:222003ms step_avg:85.39ms +[2025-08-22 11:35:21] [Rank 0] step:2601/10000 train_time:222016ms step_avg:85.36ms +[2025-08-22 11:35:21] [Rank 0] step:2601/10000 train_time:222016ms step_avg:85.36ms +[2025-08-22 11:35:23] [Rank 0] step:2621/10000 train_time:223608ms step_avg:85.31ms +[2025-08-22 11:35:23] [Rank 0] step:2621/10000 train_time:223608ms step_avg:85.31ms +[2025-08-22 11:35:25] [Rank 0] step:2641/10000 train_time:225354ms step_avg:85.33ms +[2025-08-22 11:35:25] [Rank 0] step:2641/10000 train_time:225354ms step_avg:85.33ms +[2025-08-22 11:35:26] [Rank 0] step:2661/10000 train_time:227100ms step_avg:85.34ms +[2025-08-22 11:35:26] [Rank 0] step:2661/10000 train_time:227100ms step_avg:85.34ms +[2025-08-22 11:35:28] [Rank 0] step:2681/10000 train_time:228849ms step_avg:85.36ms +[2025-08-22 11:35:28] [Rank 0] step:2681/10000 train_time:228849ms step_avg:85.36ms +[2025-08-22 11:35:30] [Rank 0] step:2701/10000 train_time:230599ms step_avg:85.38ms +[2025-08-22 11:35:30] [Rank 0] step:2701/10000 train_time:230599ms step_avg:85.38ms +[2025-08-22 11:35:32] [Rank 0] step:2721/10000 train_time:232349ms step_avg:85.39ms +[2025-08-22 11:35:32] [Rank 0] step:2721/10000 train_time:232349ms step_avg:85.39ms +[2025-08-22 11:35:33] [Rank 0] step:2741/10000 train_time:234101ms step_avg:85.41ms +[2025-08-22 11:35:33] [Rank 0] step:2741/10000 train_time:234101ms step_avg:85.41ms +[2025-08-22 11:35:35] [Rank 0] step:2761/10000 train_time:235923ms step_avg:85.45ms +[2025-08-22 11:35:35] [Rank 0] step:2761/10000 train_time:235923ms step_avg:85.45ms +[2025-08-22 11:35:37] [Rank 0] step:2781/10000 train_time:237675ms step_avg:85.46ms +[2025-08-22 11:35:37] [Rank 0] step:2781/10000 train_time:237675ms step_avg:85.46ms +[2025-08-22 11:35:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:35:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:35:52] [Rank 0] PRINT: step:2800/10000 val_loss:4.2644 svd_entropy: attn_qk:H=0.6254,top10E=0.45,eRank=76.7,q75/q25=79.14 attn_vo:H=0.5088,top10E=0.64,eRank=50.8,q75/q25=74.36 mlp_w1:H=0.7018,top10E=0.38,eRank=129.6,q75/q25=10.41 mlp_w2:H=0.9121,top10E=0.09,eRank=446.0,q75/q25=8.26 vo_prod:H=0.3963,top10E=0.79,eRank=24.6,q75/q25=3480.23 train_time:239603ms step_avg:85.57ms +[2025-08-22 11:35:52] [Rank 0] PRINT: step:2800/10000 val_loss:4.2644 svd_entropy: attn_qk:H=0.6254,top10E=0.45,eRank=76.7,q75/q25=79.14 attn_vo:H=0.5088,top10E=0.64,eRank=50.8,q75/q25=74.36 mlp_w1:H=0.7018,top10E=0.38,eRank=129.6,q75/q25=10.41 mlp_w2:H=0.9121,top10E=0.09,eRank=446.0,q75/q25=8.26 vo_prod:H=0.3963,top10E=0.79,eRank=24.6,q75/q25=3480.23 train_time:239603ms step_avg:85.57ms +[2025-08-22 11:35:53] [Rank 0] step:2801/10000 train_time:239616ms step_avg:85.55ms +[2025-08-22 11:35:53] [Rank 0] step:2801/10000 train_time:239616ms step_avg:85.55ms +[2025-08-22 11:35:54] [Rank 0] step:2821/10000 train_time:241211ms step_avg:85.51ms +[2025-08-22 11:35:54] [Rank 0] step:2821/10000 train_time:241211ms step_avg:85.51ms +[2025-08-22 11:35:56] [Rank 0] step:2841/10000 train_time:242956ms step_avg:85.52ms +[2025-08-22 11:35:56] [Rank 0] step:2841/10000 train_time:242956ms step_avg:85.52ms +[2025-08-22 11:35:58] [Rank 0] step:2861/10000 train_time:244705ms step_avg:85.53ms +[2025-08-22 11:35:58] [Rank 0] step:2861/10000 train_time:244705ms step_avg:85.53ms +[2025-08-22 11:36:00] [Rank 0] step:2881/10000 train_time:246453ms step_avg:85.54ms +[2025-08-22 11:36:00] [Rank 0] step:2881/10000 train_time:246453ms step_avg:85.54ms +[2025-08-22 11:36:01] [Rank 0] step:2901/10000 train_time:248200ms step_avg:85.56ms +[2025-08-22 11:36:01] [Rank 0] step:2901/10000 train_time:248200ms step_avg:85.56ms +[2025-08-22 11:36:03] [Rank 0] step:2921/10000 train_time:249948ms step_avg:85.57ms +[2025-08-22 11:36:03] [Rank 0] step:2921/10000 train_time:249948ms step_avg:85.57ms +[2025-08-22 11:36:05] [Rank 0] step:2941/10000 train_time:251698ms step_avg:85.58ms +[2025-08-22 11:36:05] [Rank 0] step:2941/10000 train_time:251698ms step_avg:85.58ms +[2025-08-22 11:36:07] [Rank 0] step:2961/10000 train_time:253448ms step_avg:85.60ms +[2025-08-22 11:36:07] [Rank 0] step:2961/10000 train_time:253448ms step_avg:85.60ms +[2025-08-22 11:36:08] [Rank 0] step:2981/10000 train_time:255204ms step_avg:85.61ms +[2025-08-22 11:36:08] [Rank 0] step:2981/10000 train_time:255204ms step_avg:85.61ms +[2025-08-22 11:36:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:36:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:36:24] [Rank 0] PRINT: step:3000/10000 val_loss:4.2269 svd_entropy: attn_qk:H=0.6287,top10E=0.45,eRank=78.5,q75/q25=79.31 attn_vo:H=0.5151,top10E=0.63,eRank=52.9,q75/q25=77.21 mlp_w1:H=0.7065,top10E=0.37,eRank=133.3,q75/q25=10.73 mlp_w2:H=0.9123,top10E=0.09,eRank=447.1,q75/q25=8.26 vo_prod:H=0.4027,top10E=0.78,eRank=25.7,q75/q25=4140.86 train_time:257138ms step_avg:85.71ms +[2025-08-22 11:36:24] [Rank 0] PRINT: step:3000/10000 val_loss:4.2269 svd_entropy: attn_qk:H=0.6287,top10E=0.45,eRank=78.5,q75/q25=79.31 attn_vo:H=0.5151,top10E=0.63,eRank=52.9,q75/q25=77.21 mlp_w1:H=0.7065,top10E=0.37,eRank=133.3,q75/q25=10.73 mlp_w2:H=0.9123,top10E=0.09,eRank=447.1,q75/q25=8.26 vo_prod:H=0.4027,top10E=0.78,eRank=25.7,q75/q25=4140.86 train_time:257138ms step_avg:85.71ms +[2025-08-22 11:36:24] [Rank 0] step:3001/10000 train_time:257151ms step_avg:85.69ms +[2025-08-22 11:36:24] [Rank 0] step:3001/10000 train_time:257151ms step_avg:85.69ms +[2025-08-22 11:36:26] [Rank 0] step:3021/10000 train_time:258738ms step_avg:85.65ms +[2025-08-22 11:36:26] [Rank 0] step:3021/10000 train_time:258738ms step_avg:85.65ms +[2025-08-22 11:36:27] [Rank 0] step:3041/10000 train_time:260490ms step_avg:85.66ms +[2025-08-22 11:36:27] [Rank 0] step:3041/10000 train_time:260490ms step_avg:85.66ms +[2025-08-22 11:36:29] [Rank 0] step:3061/10000 train_time:262243ms step_avg:85.67ms +[2025-08-22 11:36:29] [Rank 0] step:3061/10000 train_time:262243ms step_avg:85.67ms +[2025-08-22 11:36:31] [Rank 0] step:3081/10000 train_time:263998ms step_avg:85.69ms +[2025-08-22 11:36:31] [Rank 0] step:3081/10000 train_time:263998ms step_avg:85.69ms +[2025-08-22 11:36:33] [Rank 0] step:3101/10000 train_time:265758ms step_avg:85.70ms +[2025-08-22 11:36:33] [Rank 0] step:3101/10000 train_time:265758ms step_avg:85.70ms +[2025-08-22 11:36:34] [Rank 0] step:3121/10000 train_time:267515ms step_avg:85.71ms +[2025-08-22 11:36:34] [Rank 0] step:3121/10000 train_time:267515ms step_avg:85.71ms +[2025-08-22 11:36:36] [Rank 0] step:3141/10000 train_time:269273ms step_avg:85.73ms +[2025-08-22 11:36:36] [Rank 0] step:3141/10000 train_time:269273ms step_avg:85.73ms +[2025-08-22 11:36:38] [Rank 0] step:3161/10000 train_time:271033ms step_avg:85.74ms +[2025-08-22 11:36:38] [Rank 0] step:3161/10000 train_time:271033ms step_avg:85.74ms +[2025-08-22 11:36:40] [Rank 0] step:3181/10000 train_time:272792ms step_avg:85.76ms +[2025-08-22 11:36:40] [Rank 0] step:3181/10000 train_time:272792ms step_avg:85.76ms +[2025-08-22 11:36:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:36:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:36:55] [Rank 0] PRINT: step:3200/10000 val_loss:4.1950 svd_entropy: attn_qk:H=0.6315,top10E=0.44,eRank=80.2,q75/q25=79.28 attn_vo:H=0.5210,top10E=0.62,eRank=54.9,q75/q25=80.60 mlp_w1:H=0.7105,top10E=0.36,eRank=136.7,q75/q25=10.97 mlp_w2:H=0.9125,top10E=0.09,eRank=448.1,q75/q25=8.26 vo_prod:H=0.4087,top10E=0.78,eRank=26.8,q75/q25=4652.87 train_time:274729ms step_avg:85.85ms +[2025-08-22 11:36:55] [Rank 0] PRINT: step:3200/10000 val_loss:4.1950 svd_entropy: attn_qk:H=0.6315,top10E=0.44,eRank=80.2,q75/q25=79.28 attn_vo:H=0.5210,top10E=0.62,eRank=54.9,q75/q25=80.60 mlp_w1:H=0.7105,top10E=0.36,eRank=136.7,q75/q25=10.97 mlp_w2:H=0.9125,top10E=0.09,eRank=448.1,q75/q25=8.26 vo_prod:H=0.4087,top10E=0.78,eRank=26.8,q75/q25=4652.87 train_time:274729ms step_avg:85.85ms +[2025-08-22 11:36:55] [Rank 0] step:3201/10000 train_time:274742ms step_avg:85.83ms +[2025-08-22 11:36:55] [Rank 0] step:3201/10000 train_time:274742ms step_avg:85.83ms +[2025-08-22 11:36:57] [Rank 0] step:3221/10000 train_time:276327ms step_avg:85.79ms +[2025-08-22 11:36:57] [Rank 0] step:3221/10000 train_time:276327ms step_avg:85.79ms +[2025-08-22 11:36:59] [Rank 0] step:3241/10000 train_time:278083ms step_avg:85.80ms +[2025-08-22 11:36:59] [Rank 0] step:3241/10000 train_time:278083ms step_avg:85.80ms +[2025-08-22 11:37:01] [Rank 0] step:3261/10000 train_time:279840ms step_avg:85.81ms +[2025-08-22 11:37:01] [Rank 0] step:3261/10000 train_time:279840ms step_avg:85.81ms +[2025-08-22 11:37:02] [Rank 0] step:3281/10000 train_time:281600ms step_avg:85.83ms +[2025-08-22 11:37:02] [Rank 0] step:3281/10000 train_time:281600ms step_avg:85.83ms +[2025-08-22 11:37:04] [Rank 0] step:3301/10000 train_time:283359ms step_avg:85.84ms +[2025-08-22 11:37:04] [Rank 0] step:3301/10000 train_time:283359ms step_avg:85.84ms +[2025-08-22 11:37:06] [Rank 0] step:3321/10000 train_time:285118ms step_avg:85.85ms +[2025-08-22 11:37:06] [Rank 0] step:3321/10000 train_time:285118ms step_avg:85.85ms +[2025-08-22 11:37:08] [Rank 0] step:3341/10000 train_time:286879ms step_avg:85.87ms +[2025-08-22 11:37:08] [Rank 0] step:3341/10000 train_time:286879ms step_avg:85.87ms +[2025-08-22 11:37:10] [Rank 0] step:3361/10000 train_time:288638ms step_avg:85.88ms +[2025-08-22 11:37:10] [Rank 0] step:3361/10000 train_time:288638ms step_avg:85.88ms +[2025-08-22 11:37:11] [Rank 0] step:3381/10000 train_time:290398ms step_avg:85.89ms +[2025-08-22 11:37:11] [Rank 0] step:3381/10000 train_time:290398ms step_avg:85.89ms +[2025-08-22 11:37:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:37:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:37:27] [Rank 0] PRINT: step:3400/10000 val_loss:4.1677 svd_entropy: attn_qk:H=0.6347,top10E=0.44,eRank=82.0,q75/q25=79.49 attn_vo:H=0.5265,top10E=0.61,eRank=56.9,q75/q25=83.03 mlp_w1:H=0.7148,top10E=0.36,eRank=140.2,q75/q25=11.22 mlp_w2:H=0.9125,top10E=0.09,eRank=448.9,q75/q25=8.28 vo_prod:H=0.4141,top10E=0.77,eRank=27.8,q75/q25=5331.12 train_time:292336ms step_avg:85.98ms +[2025-08-22 11:37:27] [Rank 0] PRINT: step:3400/10000 val_loss:4.1677 svd_entropy: attn_qk:H=0.6347,top10E=0.44,eRank=82.0,q75/q25=79.49 attn_vo:H=0.5265,top10E=0.61,eRank=56.9,q75/q25=83.03 mlp_w1:H=0.7148,top10E=0.36,eRank=140.2,q75/q25=11.22 mlp_w2:H=0.9125,top10E=0.09,eRank=448.9,q75/q25=8.28 vo_prod:H=0.4141,top10E=0.77,eRank=27.8,q75/q25=5331.12 train_time:292336ms step_avg:85.98ms +[2025-08-22 11:37:27] [Rank 0] step:3401/10000 train_time:292349ms step_avg:85.96ms +[2025-08-22 11:37:27] [Rank 0] step:3401/10000 train_time:292349ms step_avg:85.96ms +[2025-08-22 11:37:29] [Rank 0] step:3421/10000 train_time:293942ms step_avg:85.92ms +[2025-08-22 11:37:29] [Rank 0] step:3421/10000 train_time:293942ms step_avg:85.92ms +[2025-08-22 11:37:30] [Rank 0] step:3441/10000 train_time:295694ms step_avg:85.93ms +[2025-08-22 11:37:30] [Rank 0] step:3441/10000 train_time:295694ms step_avg:85.93ms +[2025-08-22 11:37:32] [Rank 0] step:3461/10000 train_time:297450ms step_avg:85.94ms +[2025-08-22 11:37:32] [Rank 0] step:3461/10000 train_time:297450ms step_avg:85.94ms +[2025-08-22 11:37:34] [Rank 0] step:3481/10000 train_time:299207ms step_avg:85.95ms +[2025-08-22 11:37:34] [Rank 0] step:3481/10000 train_time:299207ms step_avg:85.95ms +[2025-08-22 11:37:36] [Rank 0] step:3501/10000 train_time:300965ms step_avg:85.97ms +[2025-08-22 11:37:36] [Rank 0] step:3501/10000 train_time:300965ms step_avg:85.97ms +[2025-08-22 11:37:37] [Rank 0] step:3521/10000 train_time:302723ms step_avg:85.98ms +[2025-08-22 11:37:37] [Rank 0] step:3521/10000 train_time:302723ms step_avg:85.98ms +[2025-08-22 11:37:39] [Rank 0] step:3541/10000 train_time:304480ms step_avg:85.99ms +[2025-08-22 11:37:39] [Rank 0] step:3541/10000 train_time:304480ms step_avg:85.99ms +[2025-08-22 11:37:41] [Rank 0] step:3561/10000 train_time:306237ms step_avg:86.00ms +[2025-08-22 11:37:41] [Rank 0] step:3561/10000 train_time:306237ms step_avg:86.00ms +[2025-08-22 11:37:43] [Rank 0] step:3581/10000 train_time:307996ms step_avg:86.01ms +[2025-08-22 11:37:43] [Rank 0] step:3581/10000 train_time:307996ms step_avg:86.01ms +[2025-08-22 11:37:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:37:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:37:58] [Rank 0] PRINT: step:3600/10000 val_loss:4.1576 svd_entropy: attn_qk:H=0.6372,top10E=0.43,eRank=83.5,q75/q25=80.35 attn_vo:H=0.5322,top10E=0.60,eRank=58.8,q75/q25=84.89 mlp_w1:H=0.7185,top10E=0.35,eRank=143.5,q75/q25=11.42 mlp_w2:H=0.9125,top10E=0.09,eRank=449.5,q75/q25=8.30 vo_prod:H=0.4197,top10E=0.76,eRank=28.6,q75/q25=6033.11 train_time:309932ms step_avg:86.09ms +[2025-08-22 11:37:58] [Rank 0] PRINT: step:3600/10000 val_loss:4.1576 svd_entropy: attn_qk:H=0.6372,top10E=0.43,eRank=83.5,q75/q25=80.35 attn_vo:H=0.5322,top10E=0.60,eRank=58.8,q75/q25=84.89 mlp_w1:H=0.7185,top10E=0.35,eRank=143.5,q75/q25=11.42 mlp_w2:H=0.9125,top10E=0.09,eRank=449.5,q75/q25=8.30 vo_prod:H=0.4197,top10E=0.76,eRank=28.6,q75/q25=6033.11 train_time:309932ms step_avg:86.09ms +[2025-08-22 11:37:58] [Rank 0] step:3601/10000 train_time:309944ms step_avg:86.07ms +[2025-08-22 11:37:58] [Rank 0] step:3601/10000 train_time:309944ms step_avg:86.07ms +[2025-08-22 11:38:00] [Rank 0] step:3621/10000 train_time:311543ms step_avg:86.04ms +[2025-08-22 11:38:00] [Rank 0] step:3621/10000 train_time:311543ms step_avg:86.04ms +[2025-08-22 11:38:02] [Rank 0] step:3641/10000 train_time:313298ms step_avg:86.05ms +[2025-08-22 11:38:02] [Rank 0] step:3641/10000 train_time:313298ms step_avg:86.05ms +[2025-08-22 11:38:04] [Rank 0] step:3661/10000 train_time:315054ms step_avg:86.06ms +[2025-08-22 11:38:04] [Rank 0] step:3661/10000 train_time:315054ms step_avg:86.06ms +[2025-08-22 11:38:05] [Rank 0] step:3681/10000 train_time:316810ms step_avg:86.07ms +[2025-08-22 11:38:05] [Rank 0] step:3681/10000 train_time:316810ms step_avg:86.07ms +[2025-08-22 11:38:07] [Rank 0] step:3701/10000 train_time:318567ms step_avg:86.08ms +[2025-08-22 11:38:07] [Rank 0] step:3701/10000 train_time:318567ms step_avg:86.08ms +[2025-08-22 11:38:09] [Rank 0] step:3721/10000 train_time:320349ms step_avg:86.09ms +[2025-08-22 11:38:09] [Rank 0] step:3721/10000 train_time:320349ms step_avg:86.09ms +[2025-08-22 11:38:11] [Rank 0] step:3741/10000 train_time:322141ms step_avg:86.11ms +[2025-08-22 11:38:11] [Rank 0] step:3741/10000 train_time:322141ms step_avg:86.11ms +[2025-08-22 11:38:12] [Rank 0] step:3761/10000 train_time:323931ms step_avg:86.13ms +[2025-08-22 11:38:12] [Rank 0] step:3761/10000 train_time:323931ms step_avg:86.13ms +[2025-08-22 11:38:14] [Rank 0] step:3781/10000 train_time:325725ms step_avg:86.15ms +[2025-08-22 11:38:14] [Rank 0] step:3781/10000 train_time:325725ms step_avg:86.15ms +[2025-08-22 11:38:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:38:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:38:30] [Rank 0] PRINT: step:3800/10000 val_loss:4.1139 svd_entropy: attn_qk:H=0.6398,top10E=0.43,eRank=85.1,q75/q25=80.18 attn_vo:H=0.5369,top10E=0.59,eRank=60.6,q75/q25=87.62 mlp_w1:H=0.7222,top10E=0.35,eRank=146.7,q75/q25=11.61 mlp_w2:H=0.9125,top10E=0.09,eRank=450.0,q75/q25=8.36 vo_prod:H=0.4249,top10E=0.76,eRank=29.6,q75/q25=6608.65 train_time:327703ms step_avg:86.24ms +[2025-08-22 11:38:30] [Rank 0] PRINT: step:3800/10000 val_loss:4.1139 svd_entropy: attn_qk:H=0.6398,top10E=0.43,eRank=85.1,q75/q25=80.18 attn_vo:H=0.5369,top10E=0.59,eRank=60.6,q75/q25=87.62 mlp_w1:H=0.7222,top10E=0.35,eRank=146.7,q75/q25=11.61 mlp_w2:H=0.9125,top10E=0.09,eRank=450.0,q75/q25=8.36 vo_prod:H=0.4249,top10E=0.76,eRank=29.6,q75/q25=6608.65 train_time:327703ms step_avg:86.24ms +[2025-08-22 11:38:30] [Rank 0] step:3801/10000 train_time:327715ms step_avg:86.22ms +[2025-08-22 11:38:30] [Rank 0] step:3801/10000 train_time:327715ms step_avg:86.22ms +[2025-08-22 11:38:32] [Rank 0] step:3821/10000 train_time:329335ms step_avg:86.19ms +[2025-08-22 11:38:32] [Rank 0] step:3821/10000 train_time:329335ms step_avg:86.19ms +[2025-08-22 11:38:34] [Rank 0] step:3841/10000 train_time:331131ms step_avg:86.21ms +[2025-08-22 11:38:34] [Rank 0] step:3841/10000 train_time:331131ms step_avg:86.21ms +[2025-08-22 11:38:35] [Rank 0] step:3861/10000 train_time:332924ms step_avg:86.23ms +[2025-08-22 11:38:35] [Rank 0] step:3861/10000 train_time:332924ms step_avg:86.23ms +[2025-08-22 11:38:37] [Rank 0] step:3881/10000 train_time:334714ms step_avg:86.24ms +[2025-08-22 11:38:37] [Rank 0] step:3881/10000 train_time:334714ms step_avg:86.24ms +[2025-08-22 11:38:39] [Rank 0] step:3901/10000 train_time:336508ms step_avg:86.26ms +[2025-08-22 11:38:39] [Rank 0] step:3901/10000 train_time:336508ms step_avg:86.26ms +[2025-08-22 11:38:41] [Rank 0] step:3921/10000 train_time:338301ms step_avg:86.28ms +[2025-08-22 11:38:41] [Rank 0] step:3921/10000 train_time:338301ms step_avg:86.28ms +[2025-08-22 11:38:42] [Rank 0] step:3941/10000 train_time:340096ms step_avg:86.30ms +[2025-08-22 11:38:42] [Rank 0] step:3941/10000 train_time:340096ms step_avg:86.30ms +[2025-08-22 11:38:44] [Rank 0] step:3961/10000 train_time:341889ms step_avg:86.31ms +[2025-08-22 11:38:44] [Rank 0] step:3961/10000 train_time:341889ms step_avg:86.31ms +[2025-08-22 11:38:46] [Rank 0] step:3981/10000 train_time:343683ms step_avg:86.33ms +[2025-08-22 11:38:46] [Rank 0] step:3981/10000 train_time:343683ms step_avg:86.33ms +[2025-08-22 11:38:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:38:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:39:01] [Rank 0] PRINT: step:4000/10000 val_loss:4.0894 svd_entropy: attn_qk:H=0.6422,top10E=0.42,eRank=86.6,q75/q25=80.49 attn_vo:H=0.5415,top10E=0.59,eRank=62.3,q75/q25=88.39 mlp_w1:H=0.7252,top10E=0.34,eRank=149.6,q75/q25=11.74 mlp_w2:H=0.9125,top10E=0.09,eRank=450.5,q75/q25=8.37 vo_prod:H=0.4293,top10E=0.75,eRank=30.5,q75/q25=7081.44 train_time:345669ms step_avg:86.42ms +[2025-08-22 11:39:01] [Rank 0] PRINT: step:4000/10000 val_loss:4.0894 svd_entropy: attn_qk:H=0.6422,top10E=0.42,eRank=86.6,q75/q25=80.49 attn_vo:H=0.5415,top10E=0.59,eRank=62.3,q75/q25=88.39 mlp_w1:H=0.7252,top10E=0.34,eRank=149.6,q75/q25=11.74 mlp_w2:H=0.9125,top10E=0.09,eRank=450.5,q75/q25=8.37 vo_prod:H=0.4293,top10E=0.75,eRank=30.5,q75/q25=7081.44 train_time:345669ms step_avg:86.42ms +[2025-08-22 11:39:02] [Rank 0] step:4001/10000 train_time:345681ms step_avg:86.40ms +[2025-08-22 11:39:02] [Rank 0] step:4001/10000 train_time:345681ms step_avg:86.40ms +[2025-08-22 11:39:03] [Rank 0] step:4021/10000 train_time:347313ms step_avg:86.37ms +[2025-08-22 11:39:03] [Rank 0] step:4021/10000 train_time:347313ms step_avg:86.37ms +[2025-08-22 11:39:05] [Rank 0] step:4041/10000 train_time:349108ms step_avg:86.39ms +[2025-08-22 11:39:05] [Rank 0] step:4041/10000 train_time:349108ms step_avg:86.39ms +[2025-08-22 11:39:07] [Rank 0] step:4061/10000 train_time:350903ms step_avg:86.41ms +[2025-08-22 11:39:07] [Rank 0] step:4061/10000 train_time:350903ms step_avg:86.41ms +[2025-08-22 11:39:09] [Rank 0] step:4081/10000 train_time:353208ms step_avg:86.55ms +[2025-08-22 11:39:09] [Rank 0] step:4081/10000 train_time:353208ms step_avg:86.55ms +[2025-08-22 11:39:11] [Rank 0] step:4101/10000 train_time:355005ms step_avg:86.57ms +[2025-08-22 11:39:11] [Rank 0] step:4101/10000 train_time:355005ms step_avg:86.57ms +[2025-08-22 11:39:13] [Rank 0] step:4121/10000 train_time:356802ms step_avg:86.58ms +[2025-08-22 11:39:13] [Rank 0] step:4121/10000 train_time:356802ms step_avg:86.58ms +[2025-08-22 11:39:15] [Rank 0] step:4141/10000 train_time:358599ms step_avg:86.60ms +[2025-08-22 11:39:15] [Rank 0] step:4141/10000 train_time:358599ms step_avg:86.60ms +[2025-08-22 11:39:16] [Rank 0] step:4161/10000 train_time:360397ms step_avg:86.61ms +[2025-08-22 11:39:16] [Rank 0] step:4161/10000 train_time:360397ms step_avg:86.61ms +[2025-08-22 11:39:18] [Rank 0] step:4181/10000 train_time:362197ms step_avg:86.63ms +[2025-08-22 11:39:18] [Rank 0] step:4181/10000 train_time:362197ms step_avg:86.63ms +[2025-08-22 11:39:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:39:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:39:34] [Rank 0] PRINT: step:4200/10000 val_loss:4.0784 svd_entropy: attn_qk:H=0.6447,top10E=0.42,eRank=88.0,q75/q25=80.50 attn_vo:H=0.5459,top10E=0.58,eRank=64.0,q75/q25=90.95 mlp_w1:H=0.7276,top10E=0.34,eRank=152.4,q75/q25=11.90 mlp_w2:H=0.9125,top10E=0.09,eRank=450.9,q75/q25=8.41 vo_prod:H=0.4348,top10E=0.74,eRank=31.3,q75/q25=7499.07 train_time:364176ms step_avg:86.71ms +[2025-08-22 11:39:34] [Rank 0] PRINT: step:4200/10000 val_loss:4.0784 svd_entropy: attn_qk:H=0.6447,top10E=0.42,eRank=88.0,q75/q25=80.50 attn_vo:H=0.5459,top10E=0.58,eRank=64.0,q75/q25=90.95 mlp_w1:H=0.7276,top10E=0.34,eRank=152.4,q75/q25=11.90 mlp_w2:H=0.9125,top10E=0.09,eRank=450.9,q75/q25=8.41 vo_prod:H=0.4348,top10E=0.74,eRank=31.3,q75/q25=7499.07 train_time:364176ms step_avg:86.71ms +[2025-08-22 11:39:34] [Rank 0] step:4201/10000 train_time:364189ms step_avg:86.69ms +[2025-08-22 11:39:34] [Rank 0] step:4201/10000 train_time:364189ms step_avg:86.69ms +[2025-08-22 11:39:36] [Rank 0] step:4221/10000 train_time:365808ms step_avg:86.66ms +[2025-08-22 11:39:36] [Rank 0] step:4221/10000 train_time:365808ms step_avg:86.66ms +[2025-08-22 11:39:37] [Rank 0] step:4241/10000 train_time:367603ms step_avg:86.68ms +[2025-08-22 11:39:37] [Rank 0] step:4241/10000 train_time:367603ms step_avg:86.68ms +[2025-08-22 11:39:39] [Rank 0] step:4261/10000 train_time:369397ms step_avg:86.69ms +[2025-08-22 11:39:39] [Rank 0] step:4261/10000 train_time:369397ms step_avg:86.69ms +[2025-08-22 11:39:41] [Rank 0] step:4281/10000 train_time:371192ms step_avg:86.71ms +[2025-08-22 11:39:41] [Rank 0] step:4281/10000 train_time:371192ms step_avg:86.71ms +[2025-08-22 11:39:43] [Rank 0] step:4301/10000 train_time:372986ms step_avg:86.72ms +[2025-08-22 11:39:43] [Rank 0] step:4301/10000 train_time:372986ms step_avg:86.72ms +[2025-08-22 11:39:45] [Rank 0] step:4321/10000 train_time:374781ms step_avg:86.73ms +[2025-08-22 11:39:45] [Rank 0] step:4321/10000 train_time:374781ms step_avg:86.73ms +[2025-08-22 11:39:46] [Rank 0] step:4341/10000 train_time:376573ms step_avg:86.75ms +[2025-08-22 11:39:46] [Rank 0] step:4341/10000 train_time:376573ms step_avg:86.75ms +[2025-08-22 11:39:48] [Rank 0] step:4361/10000 train_time:378390ms step_avg:86.77ms +[2025-08-22 11:39:48] [Rank 0] step:4361/10000 train_time:378390ms step_avg:86.77ms +[2025-08-22 11:39:50] [Rank 0] step:4381/10000 train_time:380208ms step_avg:86.79ms +[2025-08-22 11:39:50] [Rank 0] step:4381/10000 train_time:380208ms step_avg:86.79ms +[2025-08-22 11:39:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:39:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:40:06] [Rank 0] PRINT: step:4400/10000 val_loss:4.0746 svd_entropy: attn_qk:H=0.6468,top10E=0.42,eRank=89.4,q75/q25=80.09 attn_vo:H=0.5499,top10E=0.57,eRank=65.5,q75/q25=92.27 mlp_w1:H=0.7306,top10E=0.33,eRank=155.2,q75/q25=12.04 mlp_w2:H=0.9124,top10E=0.09,eRank=451.2,q75/q25=8.49 vo_prod:H=0.4378,top10E=0.74,eRank=32.1,q75/q25=7819.21 train_time:382183ms step_avg:86.86ms +[2025-08-22 11:40:06] [Rank 0] PRINT: step:4400/10000 val_loss:4.0746 svd_entropy: attn_qk:H=0.6468,top10E=0.42,eRank=89.4,q75/q25=80.09 attn_vo:H=0.5499,top10E=0.57,eRank=65.5,q75/q25=92.27 mlp_w1:H=0.7306,top10E=0.33,eRank=155.2,q75/q25=12.04 mlp_w2:H=0.9124,top10E=0.09,eRank=451.2,q75/q25=8.49 vo_prod:H=0.4378,top10E=0.74,eRank=32.1,q75/q25=7819.21 train_time:382183ms step_avg:86.86ms +[2025-08-22 11:40:06] [Rank 0] step:4401/10000 train_time:382196ms step_avg:86.84ms +[2025-08-22 11:40:06] [Rank 0] step:4401/10000 train_time:382196ms step_avg:86.84ms +[2025-08-22 11:40:07] [Rank 0] step:4421/10000 train_time:383828ms step_avg:86.82ms +[2025-08-22 11:40:07] [Rank 0] step:4421/10000 train_time:383828ms step_avg:86.82ms +[2025-08-22 11:40:09] [Rank 0] step:4441/10000 train_time:385617ms step_avg:86.83ms +[2025-08-22 11:40:09] [Rank 0] step:4441/10000 train_time:385617ms step_avg:86.83ms +[2025-08-22 11:40:11] [Rank 0] step:4461/10000 train_time:387412ms step_avg:86.84ms +[2025-08-22 11:40:11] [Rank 0] step:4461/10000 train_time:387412ms step_avg:86.84ms +[2025-08-22 11:40:13] [Rank 0] step:4481/10000 train_time:389210ms step_avg:86.86ms +[2025-08-22 11:40:13] [Rank 0] step:4481/10000 train_time:389210ms step_avg:86.86ms +[2025-08-22 11:40:15] [Rank 0] step:4501/10000 train_time:391010ms step_avg:86.87ms +[2025-08-22 11:40:15] [Rank 0] step:4501/10000 train_time:391010ms step_avg:86.87ms +[2025-08-22 11:40:16] [Rank 0] step:4521/10000 train_time:392809ms step_avg:86.89ms +[2025-08-22 11:40:16] [Rank 0] step:4521/10000 train_time:392809ms step_avg:86.89ms +[2025-08-22 11:40:18] [Rank 0] step:4541/10000 train_time:394610ms step_avg:86.90ms +[2025-08-22 11:40:18] [Rank 0] step:4541/10000 train_time:394610ms step_avg:86.90ms +[2025-08-22 11:40:20] [Rank 0] step:4561/10000 train_time:396411ms step_avg:86.91ms +[2025-08-22 11:40:20] [Rank 0] step:4561/10000 train_time:396411ms step_avg:86.91ms +[2025-08-22 11:40:22] [Rank 0] step:4581/10000 train_time:398215ms step_avg:86.93ms +[2025-08-22 11:40:22] [Rank 0] step:4581/10000 train_time:398215ms step_avg:86.93ms +[2025-08-22 11:40:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:40:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:40:37] [Rank 0] PRINT: step:4600/10000 val_loss:4.0355 svd_entropy: attn_qk:H=0.6492,top10E=0.41,eRank=90.8,q75/q25=80.86 attn_vo:H=0.5540,top10E=0.56,eRank=67.1,q75/q25=94.61 mlp_w1:H=0.7327,top10E=0.33,eRank=157.8,q75/q25=12.18 mlp_w2:H=0.9123,top10E=0.09,eRank=451.4,q75/q25=8.52 vo_prod:H=0.4415,top10E=0.73,eRank=32.9,q75/q25=8062.33 train_time:400200ms step_avg:87.00ms +[2025-08-22 11:40:37] [Rank 0] PRINT: step:4600/10000 val_loss:4.0355 svd_entropy: attn_qk:H=0.6492,top10E=0.41,eRank=90.8,q75/q25=80.86 attn_vo:H=0.5540,top10E=0.56,eRank=67.1,q75/q25=94.61 mlp_w1:H=0.7327,top10E=0.33,eRank=157.8,q75/q25=12.18 mlp_w2:H=0.9123,top10E=0.09,eRank=451.4,q75/q25=8.52 vo_prod:H=0.4415,top10E=0.73,eRank=32.9,q75/q25=8062.33 train_time:400200ms step_avg:87.00ms +[2025-08-22 11:40:37] [Rank 0] step:4601/10000 train_time:400212ms step_avg:86.98ms +[2025-08-22 11:40:37] [Rank 0] step:4601/10000 train_time:400212ms step_avg:86.98ms +[2025-08-22 11:40:39] [Rank 0] step:4621/10000 train_time:401834ms step_avg:86.96ms +[2025-08-22 11:40:39] [Rank 0] step:4621/10000 train_time:401834ms step_avg:86.96ms +[2025-08-22 11:40:41] [Rank 0] step:4641/10000 train_time:403635ms step_avg:86.97ms +[2025-08-22 11:40:41] [Rank 0] step:4641/10000 train_time:403635ms step_avg:86.97ms +[2025-08-22 11:40:43] [Rank 0] step:4661/10000 train_time:405433ms step_avg:86.98ms +[2025-08-22 11:40:43] [Rank 0] step:4661/10000 train_time:405433ms step_avg:86.98ms +[2025-08-22 11:40:45] [Rank 0] step:4681/10000 train_time:407234ms step_avg:87.00ms +[2025-08-22 11:40:45] [Rank 0] step:4681/10000 train_time:407234ms step_avg:87.00ms +[2025-08-22 11:40:46] [Rank 0] step:4701/10000 train_time:409036ms step_avg:87.01ms +[2025-08-22 11:40:46] [Rank 0] step:4701/10000 train_time:409036ms step_avg:87.01ms +[2025-08-22 11:40:48] [Rank 0] step:4721/10000 train_time:410836ms step_avg:87.02ms +[2025-08-22 11:40:48] [Rank 0] step:4721/10000 train_time:410836ms step_avg:87.02ms +[2025-08-22 11:40:50] [Rank 0] step:4741/10000 train_time:412777ms step_avg:87.07ms +[2025-08-22 11:40:50] [Rank 0] step:4741/10000 train_time:412777ms step_avg:87.07ms +[2025-08-22 11:40:52] [Rank 0] step:4761/10000 train_time:414439ms step_avg:87.05ms +[2025-08-22 11:40:52] [Rank 0] step:4761/10000 train_time:414439ms step_avg:87.05ms +[2025-08-22 11:40:54] [Rank 0] step:4781/10000 train_time:416301ms step_avg:87.07ms +[2025-08-22 11:40:54] [Rank 0] step:4781/10000 train_time:416301ms step_avg:87.07ms +[2025-08-22 11:40:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:40:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:41:09] [Rank 0] PRINT: step:4800/10000 val_loss:4.0290 svd_entropy: attn_qk:H=0.6513,top10E=0.41,eRank=92.2,q75/q25=81.24 attn_vo:H=0.5579,top10E=0.55,eRank=68.7,q75/q25=95.35 mlp_w1:H=0.7348,top10E=0.33,eRank=160.3,q75/q25=12.31 mlp_w2:H=0.9121,top10E=0.09,eRank=451.5,q75/q25=8.55 vo_prod:H=0.4457,top10E=0.73,eRank=33.6,q75/q25=8618.16 train_time:418283ms step_avg:87.14ms +[2025-08-22 11:41:09] [Rank 0] PRINT: step:4800/10000 val_loss:4.0290 svd_entropy: attn_qk:H=0.6513,top10E=0.41,eRank=92.2,q75/q25=81.24 attn_vo:H=0.5579,top10E=0.55,eRank=68.7,q75/q25=95.35 mlp_w1:H=0.7348,top10E=0.33,eRank=160.3,q75/q25=12.31 mlp_w2:H=0.9121,top10E=0.09,eRank=451.5,q75/q25=8.55 vo_prod:H=0.4457,top10E=0.73,eRank=33.6,q75/q25=8618.16 train_time:418283ms step_avg:87.14ms +[2025-08-22 11:41:09] [Rank 0] step:4801/10000 train_time:418296ms step_avg:87.13ms +[2025-08-22 11:41:09] [Rank 0] step:4801/10000 train_time:418296ms step_avg:87.13ms +[2025-08-22 11:41:11] [Rank 0] step:4821/10000 train_time:419923ms step_avg:87.10ms +[2025-08-22 11:41:11] [Rank 0] step:4821/10000 train_time:419923ms step_avg:87.10ms +[2025-08-22 11:41:13] [Rank 0] step:4841/10000 train_time:421719ms step_avg:87.11ms +[2025-08-22 11:41:13] [Rank 0] step:4841/10000 train_time:421719ms step_avg:87.11ms +[2025-08-22 11:41:15] [Rank 0] step:4861/10000 train_time:423518ms step_avg:87.13ms +[2025-08-22 11:41:15] [Rank 0] step:4861/10000 train_time:423518ms step_avg:87.13ms +[2025-08-22 11:41:16] [Rank 0] step:4881/10000 train_time:425315ms step_avg:87.14ms +[2025-08-22 11:41:16] [Rank 0] step:4881/10000 train_time:425315ms step_avg:87.14ms +[2025-08-22 11:41:18] [Rank 0] step:4901/10000 train_time:427112ms step_avg:87.15ms +[2025-08-22 11:41:18] [Rank 0] step:4901/10000 train_time:427112ms step_avg:87.15ms +[2025-08-22 11:41:20] [Rank 0] step:4921/10000 train_time:428912ms step_avg:87.16ms +[2025-08-22 11:41:20] [Rank 0] step:4921/10000 train_time:428912ms step_avg:87.16ms +[2025-08-22 11:41:22] [Rank 0] step:4941/10000 train_time:430714ms step_avg:87.17ms +[2025-08-22 11:41:22] [Rank 0] step:4941/10000 train_time:430714ms step_avg:87.17ms +[2025-08-22 11:41:24] [Rank 0] step:4961/10000 train_time:432512ms step_avg:87.18ms +[2025-08-22 11:41:24] [Rank 0] step:4961/10000 train_time:432512ms step_avg:87.18ms +[2025-08-22 11:41:25] [Rank 0] step:4981/10000 train_time:434317ms step_avg:87.19ms +[2025-08-22 11:41:25] [Rank 0] step:4981/10000 train_time:434317ms step_avg:87.19ms +[2025-08-22 11:41:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:41:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:41:41] [Rank 0] PRINT: step:5000/10000 val_loss:4.0129 svd_entropy: attn_qk:H=0.6533,top10E=0.41,eRank=93.5,q75/q25=81.44 attn_vo:H=0.5617,top10E=0.55,eRank=70.2,q75/q25=96.52 mlp_w1:H=0.7366,top10E=0.32,eRank=162.7,q75/q25=12.36 mlp_w2:H=0.9120,top10E=0.09,eRank=451.5,q75/q25=8.64 vo_prod:H=0.4498,top10E=0.72,eRank=34.4,q75/q25=8731.23 train_time:436303ms step_avg:87.26ms +[2025-08-22 11:41:41] [Rank 0] PRINT: step:5000/10000 val_loss:4.0129 svd_entropy: attn_qk:H=0.6533,top10E=0.41,eRank=93.5,q75/q25=81.44 attn_vo:H=0.5617,top10E=0.55,eRank=70.2,q75/q25=96.52 mlp_w1:H=0.7366,top10E=0.32,eRank=162.7,q75/q25=12.36 mlp_w2:H=0.9120,top10E=0.09,eRank=451.5,q75/q25=8.64 vo_prod:H=0.4498,top10E=0.72,eRank=34.4,q75/q25=8731.23 train_time:436303ms step_avg:87.26ms +[2025-08-22 11:41:41] [Rank 0] step:5001/10000 train_time:436316ms step_avg:87.25ms +[2025-08-22 11:41:41] [Rank 0] step:5001/10000 train_time:436316ms step_avg:87.25ms +[2025-08-22 11:41:43] [Rank 0] step:5021/10000 train_time:437954ms step_avg:87.22ms +[2025-08-22 11:41:43] [Rank 0] step:5021/10000 train_time:437954ms step_avg:87.22ms +[2025-08-22 11:41:45] [Rank 0] step:5041/10000 train_time:439756ms step_avg:87.24ms +[2025-08-22 11:41:45] [Rank 0] step:5041/10000 train_time:439756ms step_avg:87.24ms +[2025-08-22 11:41:46] [Rank 0] step:5061/10000 train_time:441550ms step_avg:87.25ms +[2025-08-22 11:41:46] [Rank 0] step:5061/10000 train_time:441550ms step_avg:87.25ms +[2025-08-22 11:41:48] [Rank 0] step:5081/10000 train_time:443350ms step_avg:87.26ms +[2025-08-22 11:41:48] [Rank 0] step:5081/10000 train_time:443350ms step_avg:87.26ms +[2025-08-22 11:41:50] [Rank 0] step:5101/10000 train_time:445148ms step_avg:87.27ms +[2025-08-22 11:41:50] [Rank 0] step:5101/10000 train_time:445148ms step_avg:87.27ms +[2025-08-22 11:41:52] [Rank 0] step:5121/10000 train_time:446949ms step_avg:87.28ms +[2025-08-22 11:41:52] [Rank 0] step:5121/10000 train_time:446949ms step_avg:87.28ms +[2025-08-22 11:41:54] [Rank 0] step:5141/10000 train_time:448753ms step_avg:87.29ms +[2025-08-22 11:41:54] [Rank 0] step:5141/10000 train_time:448753ms step_avg:87.29ms +[2025-08-22 11:41:55] [Rank 0] step:5161/10000 train_time:450551ms step_avg:87.30ms +[2025-08-22 11:41:55] [Rank 0] step:5161/10000 train_time:450551ms step_avg:87.30ms +[2025-08-22 11:41:57] [Rank 0] step:5181/10000 train_time:452353ms step_avg:87.31ms +[2025-08-22 11:41:57] [Rank 0] step:5181/10000 train_time:452353ms step_avg:87.31ms +[2025-08-22 11:41:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:41:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:42:13] [Rank 0] PRINT: step:5200/10000 val_loss:3.9965 svd_entropy: attn_qk:H=0.6552,top10E=0.40,eRank=94.8,q75/q25=81.44 attn_vo:H=0.5650,top10E=0.54,eRank=71.6,q75/q25=97.24 mlp_w1:H=0.7385,top10E=0.32,eRank=165.0,q75/q25=12.41 mlp_w2:H=0.9118,top10E=0.09,eRank=451.6,q75/q25=8.69 vo_prod:H=0.4520,top10E=0.71,eRank=35.0,q75/q25=8734.60 train_time:454357ms step_avg:87.38ms +[2025-08-22 11:42:13] [Rank 0] PRINT: step:5200/10000 val_loss:3.9965 svd_entropy: attn_qk:H=0.6552,top10E=0.40,eRank=94.8,q75/q25=81.44 attn_vo:H=0.5650,top10E=0.54,eRank=71.6,q75/q25=97.24 mlp_w1:H=0.7385,top10E=0.32,eRank=165.0,q75/q25=12.41 mlp_w2:H=0.9118,top10E=0.09,eRank=451.6,q75/q25=8.69 vo_prod:H=0.4520,top10E=0.71,eRank=35.0,q75/q25=8734.60 train_time:454357ms step_avg:87.38ms +[2025-08-22 11:42:13] [Rank 0] step:5201/10000 train_time:454369ms step_avg:87.36ms +[2025-08-22 11:42:13] [Rank 0] step:5201/10000 train_time:454369ms step_avg:87.36ms +[2025-08-22 11:42:15] [Rank 0] step:5221/10000 train_time:456029ms step_avg:87.35ms +[2025-08-22 11:42:15] [Rank 0] step:5221/10000 train_time:456029ms step_avg:87.35ms +[2025-08-22 11:42:16] [Rank 0] step:5241/10000 train_time:457853ms step_avg:87.36ms +[2025-08-22 11:42:16] [Rank 0] step:5241/10000 train_time:457853ms step_avg:87.36ms +[2025-08-22 11:42:18] [Rank 0] step:5261/10000 train_time:459682ms step_avg:87.38ms +[2025-08-22 11:42:18] [Rank 0] step:5261/10000 train_time:459682ms step_avg:87.38ms +[2025-08-22 11:42:20] [Rank 0] step:5281/10000 train_time:461510ms step_avg:87.39ms +[2025-08-22 11:42:20] [Rank 0] step:5281/10000 train_time:461510ms step_avg:87.39ms +[2025-08-22 11:42:22] [Rank 0] step:5301/10000 train_time:463350ms step_avg:87.41ms +[2025-08-22 11:42:22] [Rank 0] step:5301/10000 train_time:463350ms step_avg:87.41ms +[2025-08-22 11:42:24] [Rank 0] step:5321/10000 train_time:465178ms step_avg:87.42ms +[2025-08-22 11:42:24] [Rank 0] step:5321/10000 train_time:465178ms step_avg:87.42ms +[2025-08-22 11:42:26] [Rank 0] step:5341/10000 train_time:467009ms step_avg:87.44ms +[2025-08-22 11:42:26] [Rank 0] step:5341/10000 train_time:467009ms step_avg:87.44ms +[2025-08-22 11:42:27] [Rank 0] step:5361/10000 train_time:468844ms step_avg:87.45ms +[2025-08-22 11:42:27] [Rank 0] step:5361/10000 train_time:468844ms step_avg:87.45ms +[2025-08-22 11:42:29] [Rank 0] step:5381/10000 train_time:470677ms step_avg:87.47ms +[2025-08-22 11:42:29] [Rank 0] step:5381/10000 train_time:470677ms step_avg:87.47ms +[2025-08-22 11:42:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:42:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:42:45] [Rank 0] PRINT: step:5400/10000 val_loss:3.9806 svd_entropy: attn_qk:H=0.6571,top10E=0.40,eRank=96.1,q75/q25=81.22 attn_vo:H=0.5683,top10E=0.53,eRank=73.0,q75/q25=97.46 mlp_w1:H=0.7409,top10E=0.32,eRank=167.5,q75/q25=12.39 mlp_w2:H=0.9117,top10E=0.09,eRank=451.7,q75/q25=8.76 vo_prod:H=0.4549,top10E=0.71,eRank=35.6,q75/q25=8972.53 train_time:472690ms step_avg:87.54ms +[2025-08-22 11:42:45] [Rank 0] PRINT: step:5400/10000 val_loss:3.9806 svd_entropy: attn_qk:H=0.6571,top10E=0.40,eRank=96.1,q75/q25=81.22 attn_vo:H=0.5683,top10E=0.53,eRank=73.0,q75/q25=97.46 mlp_w1:H=0.7409,top10E=0.32,eRank=167.5,q75/q25=12.39 mlp_w2:H=0.9117,top10E=0.09,eRank=451.7,q75/q25=8.76 vo_prod:H=0.4549,top10E=0.71,eRank=35.6,q75/q25=8972.53 train_time:472690ms step_avg:87.54ms +[2025-08-22 11:42:45] [Rank 0] step:5401/10000 train_time:472703ms step_avg:87.52ms +[2025-08-22 11:42:45] [Rank 0] step:5401/10000 train_time:472703ms step_avg:87.52ms +[2025-08-22 11:42:47] [Rank 0] step:5421/10000 train_time:474357ms step_avg:87.50ms +[2025-08-22 11:42:47] [Rank 0] step:5421/10000 train_time:474357ms step_avg:87.50ms +[2025-08-22 11:42:49] [Rank 0] step:5441/10000 train_time:476186ms step_avg:87.52ms +[2025-08-22 11:42:49] [Rank 0] step:5441/10000 train_time:476186ms step_avg:87.52ms +[2025-08-22 11:42:50] [Rank 0] step:5461/10000 train_time:478022ms step_avg:87.53ms +[2025-08-22 11:42:50] [Rank 0] step:5461/10000 train_time:478022ms step_avg:87.53ms +[2025-08-22 11:42:52] [Rank 0] step:5481/10000 train_time:479854ms step_avg:87.55ms +[2025-08-22 11:42:52] [Rank 0] step:5481/10000 train_time:479854ms step_avg:87.55ms +[2025-08-22 11:42:54] [Rank 0] step:5501/10000 train_time:481693ms step_avg:87.56ms +[2025-08-22 11:42:54] [Rank 0] step:5501/10000 train_time:481693ms step_avg:87.56ms +[2025-08-22 11:42:56] [Rank 0] step:5521/10000 train_time:483534ms step_avg:87.58ms +[2025-08-22 11:42:56] [Rank 0] step:5521/10000 train_time:483534ms step_avg:87.58ms +[2025-08-22 11:42:58] [Rank 0] step:5541/10000 train_time:485411ms step_avg:87.60ms +[2025-08-22 11:42:58] [Rank 0] step:5541/10000 train_time:485411ms step_avg:87.60ms +[2025-08-22 11:43:00] [Rank 0] step:5561/10000 train_time:487249ms step_avg:87.62ms +[2025-08-22 11:43:00] [Rank 0] step:5561/10000 train_time:487249ms step_avg:87.62ms +[2025-08-22 11:43:02] [Rank 0] step:5581/10000 train_time:489093ms step_avg:87.64ms +[2025-08-22 11:43:02] [Rank 0] step:5581/10000 train_time:489093ms step_avg:87.64ms +[2025-08-22 11:43:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:43:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:43:17] [Rank 0] PRINT: step:5600/10000 val_loss:3.9726 svd_entropy: attn_qk:H=0.6587,top10E=0.40,eRank=97.2,q75/q25=81.56 attn_vo:H=0.5716,top10E=0.53,eRank=74.4,q75/q25=98.49 mlp_w1:H=0.7421,top10E=0.32,eRank=169.5,q75/q25=12.41 mlp_w2:H=0.9116,top10E=0.09,eRank=451.7,q75/q25=8.79 vo_prod:H=0.4577,top10E=0.71,eRank=36.2,q75/q25=9286.15 train_time:491122ms step_avg:87.70ms +[2025-08-22 11:43:17] [Rank 0] PRINT: step:5600/10000 val_loss:3.9726 svd_entropy: attn_qk:H=0.6587,top10E=0.40,eRank=97.2,q75/q25=81.56 attn_vo:H=0.5716,top10E=0.53,eRank=74.4,q75/q25=98.49 mlp_w1:H=0.7421,top10E=0.32,eRank=169.5,q75/q25=12.41 mlp_w2:H=0.9116,top10E=0.09,eRank=451.7,q75/q25=8.79 vo_prod:H=0.4577,top10E=0.71,eRank=36.2,q75/q25=9286.15 train_time:491122ms step_avg:87.70ms +[2025-08-22 11:43:17] [Rank 0] step:5601/10000 train_time:491134ms step_avg:87.69ms +[2025-08-22 11:43:17] [Rank 0] step:5601/10000 train_time:491134ms step_avg:87.69ms +[2025-08-22 11:43:19] [Rank 0] step:5621/10000 train_time:492786ms step_avg:87.67ms +[2025-08-22 11:43:19] [Rank 0] step:5621/10000 train_time:492786ms step_avg:87.67ms +[2025-08-22 11:43:21] [Rank 0] step:5641/10000 train_time:494616ms step_avg:87.68ms +[2025-08-22 11:43:21] [Rank 0] step:5641/10000 train_time:494616ms step_avg:87.68ms +[2025-08-22 11:43:23] [Rank 0] step:5661/10000 train_time:496442ms step_avg:87.70ms +[2025-08-22 11:43:23] [Rank 0] step:5661/10000 train_time:496442ms step_avg:87.70ms +[2025-08-22 11:43:25] [Rank 0] step:5681/10000 train_time:498274ms step_avg:87.71ms +[2025-08-22 11:43:25] [Rank 0] step:5681/10000 train_time:498274ms step_avg:87.71ms +[2025-08-22 11:43:26] [Rank 0] step:5701/10000 train_time:500103ms step_avg:87.72ms +[2025-08-22 11:43:26] [Rank 0] step:5701/10000 train_time:500103ms step_avg:87.72ms +[2025-08-22 11:43:28] [Rank 0] step:5721/10000 train_time:501939ms step_avg:87.74ms +[2025-08-22 11:43:28] [Rank 0] step:5721/10000 train_time:501939ms step_avg:87.74ms +[2025-08-22 11:43:30] [Rank 0] step:5741/10000 train_time:503774ms step_avg:87.75ms +[2025-08-22 11:43:30] [Rank 0] step:5741/10000 train_time:503774ms step_avg:87.75ms +[2025-08-22 11:43:32] [Rank 0] step:5761/10000 train_time:505607ms step_avg:87.76ms +[2025-08-22 11:43:32] [Rank 0] step:5761/10000 train_time:505607ms step_avg:87.76ms +[2025-08-22 11:43:34] [Rank 0] step:5781/10000 train_time:507442ms step_avg:87.78ms +[2025-08-22 11:43:34] [Rank 0] step:5781/10000 train_time:507442ms step_avg:87.78ms +[2025-08-22 11:43:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:43:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:43:49] [Rank 0] PRINT: step:5800/10000 val_loss:3.9688 svd_entropy: attn_qk:H=0.6607,top10E=0.40,eRank=98.4,q75/q25=81.47 attn_vo:H=0.5746,top10E=0.52,eRank=75.7,q75/q25=99.50 mlp_w1:H=0.7432,top10E=0.31,eRank=171.5,q75/q25=12.43 mlp_w2:H=0.9114,top10E=0.09,eRank=451.7,q75/q25=8.83 vo_prod:H=0.4604,top10E=0.70,eRank=36.8,q75/q25=9553.85 train_time:509462ms step_avg:87.84ms +[2025-08-22 11:43:49] [Rank 0] PRINT: step:5800/10000 val_loss:3.9688 svd_entropy: attn_qk:H=0.6607,top10E=0.40,eRank=98.4,q75/q25=81.47 attn_vo:H=0.5746,top10E=0.52,eRank=75.7,q75/q25=99.50 mlp_w1:H=0.7432,top10E=0.31,eRank=171.5,q75/q25=12.43 mlp_w2:H=0.9114,top10E=0.09,eRank=451.7,q75/q25=8.83 vo_prod:H=0.4604,top10E=0.70,eRank=36.8,q75/q25=9553.85 train_time:509462ms step_avg:87.84ms +[2025-08-22 11:43:49] [Rank 0] step:5801/10000 train_time:509474ms step_avg:87.83ms +[2025-08-22 11:43:49] [Rank 0] step:5801/10000 train_time:509474ms step_avg:87.83ms +[2025-08-22 11:43:51] [Rank 0] step:5821/10000 train_time:511139ms step_avg:87.81ms +[2025-08-22 11:43:51] [Rank 0] step:5821/10000 train_time:511139ms step_avg:87.81ms +[2025-08-22 11:43:53] [Rank 0] step:5841/10000 train_time:512965ms step_avg:87.82ms +[2025-08-22 11:43:53] [Rank 0] step:5841/10000 train_time:512965ms step_avg:87.82ms +[2025-08-22 11:43:55] [Rank 0] step:5861/10000 train_time:514799ms step_avg:87.83ms +[2025-08-22 11:43:55] [Rank 0] step:5861/10000 train_time:514799ms step_avg:87.83ms +[2025-08-22 11:43:57] [Rank 0] step:5881/10000 train_time:516632ms step_avg:87.85ms +[2025-08-22 11:43:57] [Rank 0] step:5881/10000 train_time:516632ms step_avg:87.85ms +[2025-08-22 11:43:59] [Rank 0] step:5901/10000 train_time:518462ms step_avg:87.86ms +[2025-08-22 11:43:59] [Rank 0] step:5901/10000 train_time:518462ms step_avg:87.86ms +[2025-08-22 11:44:00] [Rank 0] step:5921/10000 train_time:520295ms step_avg:87.87ms +[2025-08-22 11:44:00] [Rank 0] step:5921/10000 train_time:520295ms step_avg:87.87ms +[2025-08-22 11:44:02] [Rank 0] step:5941/10000 train_time:522133ms step_avg:87.89ms +[2025-08-22 11:44:02] [Rank 0] step:5941/10000 train_time:522133ms step_avg:87.89ms +[2025-08-22 11:44:04] [Rank 0] step:5961/10000 train_time:523970ms step_avg:87.90ms +[2025-08-22 11:44:04] [Rank 0] step:5961/10000 train_time:523970ms step_avg:87.90ms +[2025-08-22 11:44:06] [Rank 0] step:5981/10000 train_time:525806ms step_avg:87.91ms +[2025-08-22 11:44:06] [Rank 0] step:5981/10000 train_time:525806ms step_avg:87.91ms +[2025-08-22 11:44:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:44:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:44:22] [Rank 0] PRINT: step:6000/10000 val_loss:3.9446 svd_entropy: attn_qk:H=0.6624,top10E=0.39,eRank=99.6,q75/q25=81.93 attn_vo:H=0.5776,top10E=0.52,eRank=77.0,q75/q25=99.08 mlp_w1:H=0.7447,top10E=0.31,eRank=173.6,q75/q25=12.38 mlp_w2:H=0.9113,top10E=0.09,eRank=451.7,q75/q25=8.93 vo_prod:H=0.4638,top10E=0.70,eRank=37.5,q75/q25=9584.62 train_time:527823ms step_avg:87.97ms +[2025-08-22 11:44:22] [Rank 0] PRINT: step:6000/10000 val_loss:3.9446 svd_entropy: attn_qk:H=0.6624,top10E=0.39,eRank=99.6,q75/q25=81.93 attn_vo:H=0.5776,top10E=0.52,eRank=77.0,q75/q25=99.08 mlp_w1:H=0.7447,top10E=0.31,eRank=173.6,q75/q25=12.38 mlp_w2:H=0.9113,top10E=0.09,eRank=451.7,q75/q25=8.93 vo_prod:H=0.4638,top10E=0.70,eRank=37.5,q75/q25=9584.62 train_time:527823ms step_avg:87.97ms +[2025-08-22 11:44:22] [Rank 0] step:6001/10000 train_time:527835ms step_avg:87.96ms +[2025-08-22 11:44:22] [Rank 0] step:6001/10000 train_time:527835ms step_avg:87.96ms +[2025-08-22 11:44:24] [Rank 0] step:6021/10000 train_time:529512ms step_avg:87.94ms +[2025-08-22 11:44:24] [Rank 0] step:6021/10000 train_time:529512ms step_avg:87.94ms +[2025-08-22 11:44:25] [Rank 0] step:6041/10000 train_time:531344ms step_avg:87.96ms +[2025-08-22 11:44:25] [Rank 0] step:6041/10000 train_time:531344ms step_avg:87.96ms +[2025-08-22 11:44:27] [Rank 0] step:6061/10000 train_time:533187ms step_avg:87.97ms +[2025-08-22 11:44:27] [Rank 0] step:6061/10000 train_time:533187ms step_avg:87.97ms +[2025-08-22 11:44:29] [Rank 0] step:6081/10000 train_time:535022ms step_avg:87.98ms +[2025-08-22 11:44:29] [Rank 0] step:6081/10000 train_time:535022ms step_avg:87.98ms +[2025-08-22 11:44:31] [Rank 0] step:6101/10000 train_time:536864ms step_avg:88.00ms +[2025-08-22 11:44:31] [Rank 0] step:6101/10000 train_time:536864ms step_avg:88.00ms +[2025-08-22 11:44:33] [Rank 0] step:6121/10000 train_time:538974ms step_avg:88.05ms +[2025-08-22 11:44:33] [Rank 0] step:6121/10000 train_time:538974ms step_avg:88.05ms +[2025-08-22 11:44:35] [Rank 0] step:6141/10000 train_time:540823ms step_avg:88.07ms +[2025-08-22 11:44:35] [Rank 0] step:6141/10000 train_time:540823ms step_avg:88.07ms +[2025-08-22 11:44:37] [Rank 0] step:6161/10000 train_time:542665ms step_avg:88.08ms +[2025-08-22 11:44:37] [Rank 0] step:6161/10000 train_time:542665ms step_avg:88.08ms +[2025-08-22 11:44:39] [Rank 0] step:6181/10000 train_time:544501ms step_avg:88.09ms +[2025-08-22 11:44:39] [Rank 0] step:6181/10000 train_time:544501ms step_avg:88.09ms +[2025-08-22 11:44:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:44:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:44:54] [Rank 0] PRINT: step:6200/10000 val_loss:3.9303 svd_entropy: attn_qk:H=0.6639,top10E=0.39,eRank=100.6,q75/q25=82.32 attn_vo:H=0.5805,top10E=0.51,eRank=78.2,q75/q25=99.88 mlp_w1:H=0.7457,top10E=0.31,eRank=175.5,q75/q25=12.37 mlp_w2:H=0.9111,top10E=0.09,eRank=451.6,q75/q25=8.99 vo_prod:H=0.4662,top10E=0.69,eRank=38.0,q75/q25=9546.24 train_time:546526ms step_avg:88.15ms +[2025-08-22 11:44:54] [Rank 0] PRINT: step:6200/10000 val_loss:3.9303 svd_entropy: attn_qk:H=0.6639,top10E=0.39,eRank=100.6,q75/q25=82.32 attn_vo:H=0.5805,top10E=0.51,eRank=78.2,q75/q25=99.88 mlp_w1:H=0.7457,top10E=0.31,eRank=175.5,q75/q25=12.37 mlp_w2:H=0.9111,top10E=0.09,eRank=451.6,q75/q25=8.99 vo_prod:H=0.4662,top10E=0.69,eRank=38.0,q75/q25=9546.24 train_time:546526ms step_avg:88.15ms +[2025-08-22 11:44:54] [Rank 0] step:6201/10000 train_time:546539ms step_avg:88.14ms +[2025-08-22 11:44:54] [Rank 0] step:6201/10000 train_time:546539ms step_avg:88.14ms +[2025-08-22 11:44:56] [Rank 0] step:6221/10000 train_time:548200ms step_avg:88.12ms +[2025-08-22 11:44:56] [Rank 0] step:6221/10000 train_time:548200ms step_avg:88.12ms +[2025-08-22 11:44:58] [Rank 0] step:6241/10000 train_time:550031ms step_avg:88.13ms +[2025-08-22 11:44:58] [Rank 0] step:6241/10000 train_time:550031ms step_avg:88.13ms +[2025-08-22 11:45:00] [Rank 0] step:6261/10000 train_time:551864ms step_avg:88.14ms +[2025-08-22 11:45:00] [Rank 0] step:6261/10000 train_time:551864ms step_avg:88.14ms +[2025-08-22 11:45:02] [Rank 0] step:6281/10000 train_time:553703ms step_avg:88.16ms +[2025-08-22 11:45:02] [Rank 0] step:6281/10000 train_time:553703ms step_avg:88.16ms +[2025-08-22 11:45:04] [Rank 0] step:6301/10000 train_time:555540ms step_avg:88.17ms +[2025-08-22 11:45:04] [Rank 0] step:6301/10000 train_time:555540ms step_avg:88.17ms +[2025-08-22 11:45:05] [Rank 0] step:6321/10000 train_time:557378ms step_avg:88.18ms +[2025-08-22 11:45:05] [Rank 0] step:6321/10000 train_time:557378ms step_avg:88.18ms +[2025-08-22 11:45:07] [Rank 0] step:6341/10000 train_time:559219ms step_avg:88.19ms +[2025-08-22 11:45:07] [Rank 0] step:6341/10000 train_time:559219ms step_avg:88.19ms +[2025-08-22 11:45:09] [Rank 0] step:6361/10000 train_time:561061ms step_avg:88.20ms +[2025-08-22 11:45:09] [Rank 0] step:6361/10000 train_time:561061ms step_avg:88.20ms +[2025-08-22 11:45:11] [Rank 0] step:6381/10000 train_time:562903ms step_avg:88.22ms +[2025-08-22 11:45:11] [Rank 0] step:6381/10000 train_time:562903ms step_avg:88.22ms +[2025-08-22 11:45:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:45:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:45:26] [Rank 0] PRINT: step:6400/10000 val_loss:3.9174 svd_entropy: attn_qk:H=0.6653,top10E=0.39,eRank=101.6,q75/q25=82.74 attn_vo:H=0.5831,top10E=0.51,eRank=79.4,q75/q25=100.53 mlp_w1:H=0.7473,top10E=0.31,eRank=177.5,q75/q25=12.36 mlp_w2:H=0.9110,top10E=0.09,eRank=451.5,q75/q25=9.00 vo_prod:H=0.4682,top10E=0.69,eRank=38.6,q75/q25=9890.27 train_time:564923ms step_avg:88.27ms +[2025-08-22 11:45:26] [Rank 0] PRINT: step:6400/10000 val_loss:3.9174 svd_entropy: attn_qk:H=0.6653,top10E=0.39,eRank=101.6,q75/q25=82.74 attn_vo:H=0.5831,top10E=0.51,eRank=79.4,q75/q25=100.53 mlp_w1:H=0.7473,top10E=0.31,eRank=177.5,q75/q25=12.36 mlp_w2:H=0.9110,top10E=0.09,eRank=451.5,q75/q25=9.00 vo_prod:H=0.4682,top10E=0.69,eRank=38.6,q75/q25=9890.27 train_time:564923ms step_avg:88.27ms +[2025-08-22 11:45:27] [Rank 0] step:6401/10000 train_time:564937ms step_avg:88.26ms +[2025-08-22 11:45:27] [Rank 0] step:6401/10000 train_time:564937ms step_avg:88.26ms +[2025-08-22 11:45:28] [Rank 0] step:6421/10000 train_time:566598ms step_avg:88.24ms +[2025-08-22 11:45:28] [Rank 0] step:6421/10000 train_time:566598ms step_avg:88.24ms +[2025-08-22 11:45:30] [Rank 0] step:6441/10000 train_time:568432ms step_avg:88.25ms +[2025-08-22 11:45:30] [Rank 0] step:6441/10000 train_time:568432ms step_avg:88.25ms +[2025-08-22 11:45:32] [Rank 0] step:6461/10000 train_time:570272ms step_avg:88.26ms +[2025-08-22 11:45:32] [Rank 0] step:6461/10000 train_time:570272ms step_avg:88.26ms +[2025-08-22 11:45:34] [Rank 0] step:6481/10000 train_time:572114ms step_avg:88.28ms +[2025-08-22 11:45:34] [Rank 0] step:6481/10000 train_time:572114ms step_avg:88.28ms +[2025-08-22 11:45:36] [Rank 0] step:6501/10000 train_time:573947ms step_avg:88.29ms +[2025-08-22 11:45:36] [Rank 0] step:6501/10000 train_time:573947ms step_avg:88.29ms +[2025-08-22 11:45:38] [Rank 0] step:6521/10000 train_time:575779ms step_avg:88.30ms +[2025-08-22 11:45:38] [Rank 0] step:6521/10000 train_time:575779ms step_avg:88.30ms +[2025-08-22 11:45:39] [Rank 0] step:6541/10000 train_time:577616ms step_avg:88.31ms +[2025-08-22 11:45:39] [Rank 0] step:6541/10000 train_time:577616ms step_avg:88.31ms +[2025-08-22 11:45:41] [Rank 0] step:6561/10000 train_time:579456ms step_avg:88.32ms +[2025-08-22 11:45:41] [Rank 0] step:6561/10000 train_time:579456ms step_avg:88.32ms +[2025-08-22 11:45:43] [Rank 0] step:6581/10000 train_time:581289ms step_avg:88.33ms +[2025-08-22 11:45:43] [Rank 0] step:6581/10000 train_time:581289ms step_avg:88.33ms +[2025-08-22 11:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:45:58] [Rank 0] PRINT: step:6600/10000 val_loss:3.9035 svd_entropy: attn_qk:H=0.6667,top10E=0.39,eRank=102.6,q75/q25=83.14 attn_vo:H=0.5855,top10E=0.50,eRank=80.5,q75/q25=100.83 mlp_w1:H=0.7477,top10E=0.31,eRank=179.0,q75/q25=12.31 mlp_w2:H=0.9108,top10E=0.09,eRank=451.3,q75/q25=9.12 vo_prod:H=0.4710,top10E=0.68,eRank=39.2,q75/q25=9949.77 train_time:583311ms step_avg:88.38ms +[2025-08-22 11:45:58] [Rank 0] PRINT: step:6600/10000 val_loss:3.9035 svd_entropy: attn_qk:H=0.6667,top10E=0.39,eRank=102.6,q75/q25=83.14 attn_vo:H=0.5855,top10E=0.50,eRank=80.5,q75/q25=100.83 mlp_w1:H=0.7477,top10E=0.31,eRank=179.0,q75/q25=12.31 mlp_w2:H=0.9108,top10E=0.09,eRank=451.3,q75/q25=9.12 vo_prod:H=0.4710,top10E=0.68,eRank=39.2,q75/q25=9949.77 train_time:583311ms step_avg:88.38ms +[2025-08-22 11:45:59] [Rank 0] step:6601/10000 train_time:583324ms step_avg:88.37ms +[2025-08-22 11:45:59] [Rank 0] step:6601/10000 train_time:583324ms step_avg:88.37ms +[2025-08-22 11:46:00] [Rank 0] step:6621/10000 train_time:584998ms step_avg:88.35ms +[2025-08-22 11:46:00] [Rank 0] step:6621/10000 train_time:584998ms step_avg:88.35ms +[2025-08-22 11:46:02] [Rank 0] step:6641/10000 train_time:586836ms step_avg:88.37ms +[2025-08-22 11:46:02] [Rank 0] step:6641/10000 train_time:586836ms step_avg:88.37ms +[2025-08-22 11:46:04] [Rank 0] step:6661/10000 train_time:588670ms step_avg:88.38ms +[2025-08-22 11:46:04] [Rank 0] step:6661/10000 train_time:588670ms step_avg:88.38ms +[2025-08-22 11:46:06] [Rank 0] step:6681/10000 train_time:590521ms step_avg:88.39ms +[2025-08-22 11:46:06] [Rank 0] step:6681/10000 train_time:590521ms step_avg:88.39ms +[2025-08-22 11:46:08] [Rank 0] step:6701/10000 train_time:592449ms step_avg:88.41ms +[2025-08-22 11:46:08] [Rank 0] step:6701/10000 train_time:592449ms step_avg:88.41ms +[2025-08-22 11:46:10] [Rank 0] step:6721/10000 train_time:594325ms step_avg:88.43ms +[2025-08-22 11:46:10] [Rank 0] step:6721/10000 train_time:594325ms step_avg:88.43ms +[2025-08-22 11:46:12] [Rank 0] step:6741/10000 train_time:596187ms step_avg:88.44ms +[2025-08-22 11:46:12] [Rank 0] step:6741/10000 train_time:596187ms step_avg:88.44ms +[2025-08-22 11:46:13] [Rank 0] step:6761/10000 train_time:598052ms step_avg:88.46ms +[2025-08-22 11:46:13] [Rank 0] step:6761/10000 train_time:598052ms step_avg:88.46ms +[2025-08-22 11:46:15] [Rank 0] step:6781/10000 train_time:599919ms step_avg:88.47ms +[2025-08-22 11:46:15] [Rank 0] step:6781/10000 train_time:599919ms step_avg:88.47ms +[2025-08-22 11:46:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:46:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:46:31] [Rank 0] PRINT: step:6800/10000 val_loss:3.8873 svd_entropy: attn_qk:H=0.6677,top10E=0.39,eRank=103.4,q75/q25=83.27 attn_vo:H=0.5877,top10E=0.50,eRank=81.5,q75/q25=101.35 mlp_w1:H=0.7486,top10E=0.31,eRank=180.5,q75/q25=12.29 mlp_w2:H=0.9106,top10E=0.09,eRank=451.3,q75/q25=9.15 vo_prod:H=0.4727,top10E=0.68,eRank=39.6,q75/q25=10003.77 train_time:601977ms step_avg:88.53ms +[2025-08-22 11:46:31] [Rank 0] PRINT: step:6800/10000 val_loss:3.8873 svd_entropy: attn_qk:H=0.6677,top10E=0.39,eRank=103.4,q75/q25=83.27 attn_vo:H=0.5877,top10E=0.50,eRank=81.5,q75/q25=101.35 mlp_w1:H=0.7486,top10E=0.31,eRank=180.5,q75/q25=12.29 mlp_w2:H=0.9106,top10E=0.09,eRank=451.3,q75/q25=9.15 vo_prod:H=0.4727,top10E=0.68,eRank=39.6,q75/q25=10003.77 train_time:601977ms step_avg:88.53ms +[2025-08-22 11:46:31] [Rank 0] step:6801/10000 train_time:601990ms step_avg:88.51ms +[2025-08-22 11:46:31] [Rank 0] step:6801/10000 train_time:601990ms step_avg:88.51ms +[2025-08-22 11:46:33] [Rank 0] step:6821/10000 train_time:603685ms step_avg:88.50ms +[2025-08-22 11:46:33] [Rank 0] step:6821/10000 train_time:603685ms step_avg:88.50ms +[2025-08-22 11:46:35] [Rank 0] step:6841/10000 train_time:605546ms step_avg:88.52ms +[2025-08-22 11:46:35] [Rank 0] step:6841/10000 train_time:605546ms step_avg:88.52ms +[2025-08-22 11:46:37] [Rank 0] step:6861/10000 train_time:607413ms step_avg:88.53ms +[2025-08-22 11:46:37] [Rank 0] step:6861/10000 train_time:607413ms step_avg:88.53ms +[2025-08-22 11:46:38] [Rank 0] step:6881/10000 train_time:609278ms step_avg:88.55ms +[2025-08-22 11:46:38] [Rank 0] step:6881/10000 train_time:609278ms step_avg:88.55ms +[2025-08-22 11:46:40] [Rank 0] step:6901/10000 train_time:611144ms step_avg:88.56ms +[2025-08-22 11:46:40] [Rank 0] step:6901/10000 train_time:611144ms step_avg:88.56ms +[2025-08-22 11:46:42] [Rank 0] step:6921/10000 train_time:613005ms step_avg:88.57ms +[2025-08-22 11:46:42] [Rank 0] step:6921/10000 train_time:613005ms step_avg:88.57ms +[2025-08-22 11:46:44] [Rank 0] step:6941/10000 train_time:614881ms step_avg:88.59ms +[2025-08-22 11:46:44] [Rank 0] step:6941/10000 train_time:614881ms step_avg:88.59ms +[2025-08-22 11:46:46] [Rank 0] step:6961/10000 train_time:616765ms step_avg:88.60ms +[2025-08-22 11:46:46] [Rank 0] step:6961/10000 train_time:616765ms step_avg:88.60ms +[2025-08-22 11:46:48] [Rank 0] step:6981/10000 train_time:618639ms step_avg:88.62ms +[2025-08-22 11:46:48] [Rank 0] step:6981/10000 train_time:618639ms step_avg:88.62ms +[2025-08-22 11:46:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:46:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:47:03] [Rank 0] PRINT: step:7000/10000 val_loss:3.8715 svd_entropy: attn_qk:H=0.6689,top10E=0.38,eRank=104.3,q75/q25=83.07 attn_vo:H=0.5896,top10E=0.49,eRank=82.4,q75/q25=101.60 mlp_w1:H=0.7491,top10E=0.31,eRank=181.8,q75/q25=12.26 mlp_w2:H=0.9105,top10E=0.09,eRank=451.2,q75/q25=9.17 vo_prod:H=0.4745,top10E=0.68,eRank=40.1,q75/q25=10117.23 train_time:620700ms step_avg:88.67ms +[2025-08-22 11:47:03] [Rank 0] PRINT: step:7000/10000 val_loss:3.8715 svd_entropy: attn_qk:H=0.6689,top10E=0.38,eRank=104.3,q75/q25=83.07 attn_vo:H=0.5896,top10E=0.49,eRank=82.4,q75/q25=101.60 mlp_w1:H=0.7491,top10E=0.31,eRank=181.8,q75/q25=12.26 mlp_w2:H=0.9105,top10E=0.09,eRank=451.2,q75/q25=9.17 vo_prod:H=0.4745,top10E=0.68,eRank=40.1,q75/q25=10117.23 train_time:620700ms step_avg:88.67ms +[2025-08-22 11:47:03] [Rank 0] step:7001/10000 train_time:620713ms step_avg:88.66ms +[2025-08-22 11:47:03] [Rank 0] step:7001/10000 train_time:620713ms step_avg:88.66ms +[2025-08-22 11:47:05] [Rank 0] step:7021/10000 train_time:622402ms step_avg:88.65ms +[2025-08-22 11:47:05] [Rank 0] step:7021/10000 train_time:622402ms step_avg:88.65ms +[2025-08-22 11:47:07] [Rank 0] step:7041/10000 train_time:624268ms step_avg:88.66ms +[2025-08-22 11:47:07] [Rank 0] step:7041/10000 train_time:624268ms step_avg:88.66ms +[2025-08-22 11:47:09] [Rank 0] step:7061/10000 train_time:626131ms step_avg:88.67ms +[2025-08-22 11:47:09] [Rank 0] step:7061/10000 train_time:626131ms step_avg:88.67ms +[2025-08-22 11:47:11] [Rank 0] step:7081/10000 train_time:627997ms step_avg:88.69ms +[2025-08-22 11:47:11] [Rank 0] step:7081/10000 train_time:627997ms step_avg:88.69ms +[2025-08-22 11:47:13] [Rank 0] step:7101/10000 train_time:629864ms step_avg:88.70ms +[2025-08-22 11:47:13] [Rank 0] step:7101/10000 train_time:629864ms step_avg:88.70ms +[2025-08-22 11:47:14] [Rank 0] step:7121/10000 train_time:631729ms step_avg:88.71ms +[2025-08-22 11:47:14] [Rank 0] step:7121/10000 train_time:631729ms step_avg:88.71ms +[2025-08-22 11:47:16] [Rank 0] step:7141/10000 train_time:633593ms step_avg:88.73ms +[2025-08-22 11:47:16] [Rank 0] step:7141/10000 train_time:633593ms step_avg:88.73ms +[2025-08-22 11:47:18] [Rank 0] step:7161/10000 train_time:635461ms step_avg:88.74ms +[2025-08-22 11:47:18] [Rank 0] step:7161/10000 train_time:635461ms step_avg:88.74ms +[2025-08-22 11:47:20] [Rank 0] step:7181/10000 train_time:637326ms step_avg:88.75ms +[2025-08-22 11:47:20] [Rank 0] step:7181/10000 train_time:637326ms step_avg:88.75ms +[2025-08-22 11:47:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:47:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:47:35] [Rank 0] PRINT: step:7200/10000 val_loss:3.8623 svd_entropy: attn_qk:H=0.6699,top10E=0.38,eRank=105.0,q75/q25=83.53 attn_vo:H=0.5915,top10E=0.49,eRank=83.3,q75/q25=101.77 mlp_w1:H=0.7500,top10E=0.30,eRank=183.2,q75/q25=12.26 mlp_w2:H=0.9104,top10E=0.09,eRank=451.2,q75/q25=9.24 vo_prod:H=0.4766,top10E=0.67,eRank=40.5,q75/q25=10198.22 train_time:639387ms step_avg:88.80ms +[2025-08-22 11:47:35] [Rank 0] PRINT: step:7200/10000 val_loss:3.8623 svd_entropy: attn_qk:H=0.6699,top10E=0.38,eRank=105.0,q75/q25=83.53 attn_vo:H=0.5915,top10E=0.49,eRank=83.3,q75/q25=101.77 mlp_w1:H=0.7500,top10E=0.30,eRank=183.2,q75/q25=12.26 mlp_w2:H=0.9104,top10E=0.09,eRank=451.2,q75/q25=9.24 vo_prod:H=0.4766,top10E=0.67,eRank=40.5,q75/q25=10198.22 train_time:639387ms step_avg:88.80ms +[2025-08-22 11:47:36] [Rank 0] step:7201/10000 train_time:639400ms step_avg:88.79ms +[2025-08-22 11:47:36] [Rank 0] step:7201/10000 train_time:639400ms step_avg:88.79ms +[2025-08-22 11:47:37] [Rank 0] step:7221/10000 train_time:641098ms step_avg:88.78ms +[2025-08-22 11:47:37] [Rank 0] step:7221/10000 train_time:641098ms step_avg:88.78ms +[2025-08-22 11:47:39] [Rank 0] step:7241/10000 train_time:642957ms step_avg:88.79ms +[2025-08-22 11:47:39] [Rank 0] step:7241/10000 train_time:642957ms step_avg:88.79ms +[2025-08-22 11:47:41] [Rank 0] step:7261/10000 train_time:644819ms step_avg:88.81ms +[2025-08-22 11:47:41] [Rank 0] step:7261/10000 train_time:644819ms step_avg:88.81ms +[2025-08-22 11:47:43] [Rank 0] step:7281/10000 train_time:646690ms step_avg:88.82ms +[2025-08-22 11:47:43] [Rank 0] step:7281/10000 train_time:646690ms step_avg:88.82ms +[2025-08-22 11:47:45] [Rank 0] step:7301/10000 train_time:648555ms step_avg:88.83ms +[2025-08-22 11:47:45] [Rank 0] step:7301/10000 train_time:648555ms step_avg:88.83ms +[2025-08-22 11:47:47] [Rank 0] step:7321/10000 train_time:650431ms step_avg:88.84ms +[2025-08-22 11:47:47] [Rank 0] step:7321/10000 train_time:650431ms step_avg:88.84ms +[2025-08-22 11:47:49] [Rank 0] step:7341/10000 train_time:652299ms step_avg:88.86ms +[2025-08-22 11:47:49] [Rank 0] step:7341/10000 train_time:652299ms step_avg:88.86ms +[2025-08-22 11:47:51] [Rank 0] step:7361/10000 train_time:654173ms step_avg:88.87ms +[2025-08-22 11:47:51] [Rank 0] step:7361/10000 train_time:654173ms step_avg:88.87ms +[2025-08-22 11:47:52] [Rank 0] step:7381/10000 train_time:656049ms step_avg:88.88ms +[2025-08-22 11:47:52] [Rank 0] step:7381/10000 train_time:656049ms step_avg:88.88ms +[2025-08-22 11:47:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:47:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:48:08] [Rank 0] PRINT: step:7400/10000 val_loss:3.8401 svd_entropy: attn_qk:H=0.6710,top10E=0.38,eRank=105.7,q75/q25=83.43 attn_vo:H=0.5931,top10E=0.49,eRank=84.1,q75/q25=100.80 mlp_w1:H=0.7507,top10E=0.30,eRank=184.4,q75/q25=12.19 mlp_w2:H=0.9103,top10E=0.09,eRank=451.1,q75/q25=9.26 vo_prod:H=0.4783,top10E=0.67,eRank=40.9,q75/q25=9838.10 train_time:658089ms step_avg:88.93ms +[2025-08-22 11:48:08] [Rank 0] PRINT: step:7400/10000 val_loss:3.8401 svd_entropy: attn_qk:H=0.6710,top10E=0.38,eRank=105.7,q75/q25=83.43 attn_vo:H=0.5931,top10E=0.49,eRank=84.1,q75/q25=100.80 mlp_w1:H=0.7507,top10E=0.30,eRank=184.4,q75/q25=12.19 mlp_w2:H=0.9103,top10E=0.09,eRank=451.1,q75/q25=9.26 vo_prod:H=0.4783,top10E=0.67,eRank=40.9,q75/q25=9838.10 train_time:658089ms step_avg:88.93ms +[2025-08-22 11:48:08] [Rank 0] step:7401/10000 train_time:658101ms step_avg:88.92ms +[2025-08-22 11:48:08] [Rank 0] step:7401/10000 train_time:658101ms step_avg:88.92ms +[2025-08-22 11:48:10] [Rank 0] step:7421/10000 train_time:659788ms step_avg:88.91ms +[2025-08-22 11:48:10] [Rank 0] step:7421/10000 train_time:659788ms step_avg:88.91ms +[2025-08-22 11:48:12] [Rank 0] step:7441/10000 train_time:661654ms step_avg:88.92ms +[2025-08-22 11:48:12] [Rank 0] step:7441/10000 train_time:661654ms step_avg:88.92ms +[2025-08-22 11:48:13] [Rank 0] step:7461/10000 train_time:663518ms step_avg:88.93ms +[2025-08-22 11:48:13] [Rank 0] step:7461/10000 train_time:663518ms step_avg:88.93ms +[2025-08-22 11:48:15] [Rank 0] step:7481/10000 train_time:665393ms step_avg:88.94ms +[2025-08-22 11:48:15] [Rank 0] step:7481/10000 train_time:665393ms step_avg:88.94ms +[2025-08-22 11:48:17] [Rank 0] step:7501/10000 train_time:667267ms step_avg:88.96ms +[2025-08-22 11:48:17] [Rank 0] step:7501/10000 train_time:667267ms step_avg:88.96ms +[2025-08-22 11:48:19] [Rank 0] step:7521/10000 train_time:669140ms step_avg:88.97ms +[2025-08-22 11:48:19] [Rank 0] step:7521/10000 train_time:669140ms step_avg:88.97ms +[2025-08-22 11:48:21] [Rank 0] step:7541/10000 train_time:671025ms step_avg:88.98ms +[2025-08-22 11:48:21] [Rank 0] step:7541/10000 train_time:671025ms step_avg:88.98ms +[2025-08-22 11:48:23] [Rank 0] step:7561/10000 train_time:672886ms step_avg:88.99ms +[2025-08-22 11:48:23] [Rank 0] step:7561/10000 train_time:672886ms step_avg:88.99ms +[2025-08-22 11:48:25] [Rank 0] step:7581/10000 train_time:674769ms step_avg:89.01ms +[2025-08-22 11:48:25] [Rank 0] step:7581/10000 train_time:674769ms step_avg:89.01ms +[2025-08-22 11:48:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:48:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:48:40] [Rank 0] PRINT: step:7600/10000 val_loss:3.8330 svd_entropy: attn_qk:H=0.6719,top10E=0.38,eRank=106.4,q75/q25=83.13 attn_vo:H=0.5947,top10E=0.48,eRank=84.8,q75/q25=100.18 mlp_w1:H=0.7509,top10E=0.30,eRank=185.4,q75/q25=12.15 mlp_w2:H=0.9102,top10E=0.09,eRank=451.1,q75/q25=9.31 vo_prod:H=0.4797,top10E=0.67,eRank=41.3,q75/q25=9760.77 train_time:676839ms step_avg:89.06ms +[2025-08-22 11:48:40] [Rank 0] PRINT: step:7600/10000 val_loss:3.8330 svd_entropy: attn_qk:H=0.6719,top10E=0.38,eRank=106.4,q75/q25=83.13 attn_vo:H=0.5947,top10E=0.48,eRank=84.8,q75/q25=100.18 mlp_w1:H=0.7509,top10E=0.30,eRank=185.4,q75/q25=12.15 mlp_w2:H=0.9102,top10E=0.09,eRank=451.1,q75/q25=9.31 vo_prod:H=0.4797,top10E=0.67,eRank=41.3,q75/q25=9760.77 train_time:676839ms step_avg:89.06ms +[2025-08-22 11:48:40] [Rank 0] step:7601/10000 train_time:676851ms step_avg:89.05ms +[2025-08-22 11:48:40] [Rank 0] step:7601/10000 train_time:676851ms step_avg:89.05ms +[2025-08-22 11:48:42] [Rank 0] step:7621/10000 train_time:678558ms step_avg:89.04ms +[2025-08-22 11:48:42] [Rank 0] step:7621/10000 train_time:678558ms step_avg:89.04ms +[2025-08-22 11:48:44] [Rank 0] step:7641/10000 train_time:680427ms step_avg:89.05ms +[2025-08-22 11:48:44] [Rank 0] step:7641/10000 train_time:680427ms step_avg:89.05ms +[2025-08-22 11:48:46] [Rank 0] step:7661/10000 train_time:682301ms step_avg:89.06ms +[2025-08-22 11:48:46] [Rank 0] step:7661/10000 train_time:682301ms step_avg:89.06ms +[2025-08-22 11:48:48] [Rank 0] step:7681/10000 train_time:684171ms step_avg:89.07ms +[2025-08-22 11:48:48] [Rank 0] step:7681/10000 train_time:684171ms step_avg:89.07ms +[2025-08-22 11:48:50] [Rank 0] step:7701/10000 train_time:686043ms step_avg:89.08ms +[2025-08-22 11:48:50] [Rank 0] step:7701/10000 train_time:686043ms step_avg:89.08ms +[2025-08-22 11:48:52] [Rank 0] step:7721/10000 train_time:687933ms step_avg:89.10ms +[2025-08-22 11:48:52] [Rank 0] step:7721/10000 train_time:687933ms step_avg:89.10ms +[2025-08-22 11:48:53] [Rank 0] step:7741/10000 train_time:689810ms step_avg:89.11ms +[2025-08-22 11:48:53] [Rank 0] step:7741/10000 train_time:689810ms step_avg:89.11ms +[2025-08-22 11:48:55] [Rank 0] step:7761/10000 train_time:691692ms step_avg:89.12ms +[2025-08-22 11:48:55] [Rank 0] step:7761/10000 train_time:691692ms step_avg:89.12ms +[2025-08-22 11:48:57] [Rank 0] step:7781/10000 train_time:693577ms step_avg:89.14ms +[2025-08-22 11:48:57] [Rank 0] step:7781/10000 train_time:693577ms step_avg:89.14ms +[2025-08-22 11:48:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:48:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:49:13] [Rank 0] PRINT: step:7800/10000 val_loss:3.8206 svd_entropy: attn_qk:H=0.6727,top10E=0.38,eRank=106.9,q75/q25=83.42 attn_vo:H=0.5961,top10E=0.48,eRank=85.5,q75/q25=100.73 mlp_w1:H=0.7513,top10E=0.30,eRank=186.3,q75/q25=12.11 mlp_w2:H=0.9101,top10E=0.09,eRank=451.0,q75/q25=9.35 vo_prod:H=0.4810,top10E=0.66,eRank=41.6,q75/q25=9818.18 train_time:695653ms step_avg:89.19ms +[2025-08-22 11:49:13] [Rank 0] PRINT: step:7800/10000 val_loss:3.8206 svd_entropy: attn_qk:H=0.6727,top10E=0.38,eRank=106.9,q75/q25=83.42 attn_vo:H=0.5961,top10E=0.48,eRank=85.5,q75/q25=100.73 mlp_w1:H=0.7513,top10E=0.30,eRank=186.3,q75/q25=12.11 mlp_w2:H=0.9101,top10E=0.09,eRank=451.0,q75/q25=9.35 vo_prod:H=0.4810,top10E=0.66,eRank=41.6,q75/q25=9818.18 train_time:695653ms step_avg:89.19ms +[2025-08-22 11:49:13] [Rank 0] step:7801/10000 train_time:695666ms step_avg:89.18ms +[2025-08-22 11:49:13] [Rank 0] step:7801/10000 train_time:695666ms step_avg:89.18ms +[2025-08-22 11:49:15] [Rank 0] step:7821/10000 train_time:697345ms step_avg:89.16ms +[2025-08-22 11:49:15] [Rank 0] step:7821/10000 train_time:697345ms step_avg:89.16ms +[2025-08-22 11:49:16] [Rank 0] step:7841/10000 train_time:699213ms step_avg:89.17ms +[2025-08-22 11:49:16] [Rank 0] step:7841/10000 train_time:699213ms step_avg:89.17ms +[2025-08-22 11:49:18] [Rank 0] step:7861/10000 train_time:701087ms step_avg:89.19ms +[2025-08-22 11:49:18] [Rank 0] step:7861/10000 train_time:701087ms step_avg:89.19ms +[2025-08-22 11:49:20] [Rank 0] step:7881/10000 train_time:702963ms step_avg:89.20ms +[2025-08-22 11:49:20] [Rank 0] step:7881/10000 train_time:702963ms step_avg:89.20ms +[2025-08-22 11:49:22] [Rank 0] step:7901/10000 train_time:704832ms step_avg:89.21ms +[2025-08-22 11:49:22] [Rank 0] step:7901/10000 train_time:704832ms step_avg:89.21ms +[2025-08-22 11:49:24] [Rank 0] step:7921/10000 train_time:706706ms step_avg:89.22ms +[2025-08-22 11:49:24] [Rank 0] step:7921/10000 train_time:706706ms step_avg:89.22ms +[2025-08-22 11:49:26] [Rank 0] step:7941/10000 train_time:708585ms step_avg:89.23ms +[2025-08-22 11:49:26] [Rank 0] step:7941/10000 train_time:708585ms step_avg:89.23ms +[2025-08-22 11:49:28] [Rank 0] step:7961/10000 train_time:710462ms step_avg:89.24ms +[2025-08-22 11:49:28] [Rank 0] step:7961/10000 train_time:710462ms step_avg:89.24ms +[2025-08-22 11:49:30] [Rank 0] step:7981/10000 train_time:712335ms step_avg:89.25ms +[2025-08-22 11:49:30] [Rank 0] step:7981/10000 train_time:712335ms step_avg:89.25ms +[2025-08-22 11:49:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:49:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:49:45] [Rank 0] PRINT: step:8000/10000 val_loss:3.8010 svd_entropy: attn_qk:H=0.6734,top10E=0.38,eRank=107.5,q75/q25=83.58 attn_vo:H=0.5974,top10E=0.48,eRank=86.1,q75/q25=99.77 mlp_w1:H=0.7516,top10E=0.30,eRank=187.2,q75/q25=12.08 mlp_w2:H=0.9101,top10E=0.09,eRank=451.0,q75/q25=9.39 vo_prod:H=0.4820,top10E=0.66,eRank=41.9,q75/q25=9910.87 train_time:714396ms step_avg:89.30ms +[2025-08-22 11:49:45] [Rank 0] PRINT: step:8000/10000 val_loss:3.8010 svd_entropy: attn_qk:H=0.6734,top10E=0.38,eRank=107.5,q75/q25=83.58 attn_vo:H=0.5974,top10E=0.48,eRank=86.1,q75/q25=99.77 mlp_w1:H=0.7516,top10E=0.30,eRank=187.2,q75/q25=12.08 mlp_w2:H=0.9101,top10E=0.09,eRank=451.0,q75/q25=9.39 vo_prod:H=0.4820,top10E=0.66,eRank=41.9,q75/q25=9910.87 train_time:714396ms step_avg:89.30ms +[2025-08-22 11:49:45] [Rank 0] step:8001/10000 train_time:714408ms step_avg:89.29ms +[2025-08-22 11:49:45] [Rank 0] step:8001/10000 train_time:714408ms step_avg:89.29ms +[2025-08-22 11:49:47] [Rank 0] step:8021/10000 train_time:716105ms step_avg:89.28ms +[2025-08-22 11:49:47] [Rank 0] step:8021/10000 train_time:716105ms step_avg:89.28ms +[2025-08-22 11:49:49] [Rank 0] step:8041/10000 train_time:717983ms step_avg:89.29ms +[2025-08-22 11:49:49] [Rank 0] step:8041/10000 train_time:717983ms step_avg:89.29ms +[2025-08-22 11:49:51] [Rank 0] step:8061/10000 train_time:719854ms step_avg:89.30ms +[2025-08-22 11:49:51] [Rank 0] step:8061/10000 train_time:719854ms step_avg:89.30ms +[2025-08-22 11:49:53] [Rank 0] step:8081/10000 train_time:721717ms step_avg:89.31ms +[2025-08-22 11:49:53] [Rank 0] step:8081/10000 train_time:721717ms step_avg:89.31ms +[2025-08-22 11:49:55] [Rank 0] step:8101/10000 train_time:723595ms step_avg:89.32ms +[2025-08-22 11:49:55] [Rank 0] step:8101/10000 train_time:723595ms step_avg:89.32ms +[2025-08-22 11:49:57] [Rank 0] step:8121/10000 train_time:725464ms step_avg:89.33ms +[2025-08-22 11:49:57] [Rank 0] step:8121/10000 train_time:725464ms step_avg:89.33ms +[2025-08-22 11:49:59] [Rank 0] step:8141/10000 train_time:727811ms step_avg:89.40ms +[2025-08-22 11:49:59] [Rank 0] step:8141/10000 train_time:727811ms step_avg:89.40ms +[2025-08-22 11:50:01] [Rank 0] step:8161/10000 train_time:729697ms step_avg:89.41ms +[2025-08-22 11:50:01] [Rank 0] step:8161/10000 train_time:729697ms step_avg:89.41ms +[2025-08-22 11:50:03] [Rank 0] step:8181/10000 train_time:731601ms step_avg:89.43ms +[2025-08-22 11:50:03] [Rank 0] step:8181/10000 train_time:731601ms step_avg:89.43ms +[2025-08-22 11:50:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:50:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:50:18] [Rank 0] PRINT: step:8200/10000 val_loss:3.7900 svd_entropy: attn_qk:H=0.6741,top10E=0.38,eRank=108.0,q75/q25=83.40 attn_vo:H=0.5987,top10E=0.48,eRank=86.8,q75/q25=100.58 mlp_w1:H=0.7518,top10E=0.30,eRank=188.0,q75/q25=12.05 mlp_w2:H=0.9100,top10E=0.09,eRank=451.1,q75/q25=9.45 vo_prod:H=0.4833,top10E=0.66,eRank=42.2,q75/q25=9941.49 train_time:733717ms step_avg:89.48ms +[2025-08-22 11:50:18] [Rank 0] PRINT: step:8200/10000 val_loss:3.7900 svd_entropy: attn_qk:H=0.6741,top10E=0.38,eRank=108.0,q75/q25=83.40 attn_vo:H=0.5987,top10E=0.48,eRank=86.8,q75/q25=100.58 mlp_w1:H=0.7518,top10E=0.30,eRank=188.0,q75/q25=12.05 mlp_w2:H=0.9100,top10E=0.09,eRank=451.1,q75/q25=9.45 vo_prod:H=0.4833,top10E=0.66,eRank=42.2,q75/q25=9941.49 train_time:733717ms step_avg:89.48ms +[2025-08-22 11:50:18] [Rank 0] step:8201/10000 train_time:733729ms step_avg:89.47ms +[2025-08-22 11:50:18] [Rank 0] step:8201/10000 train_time:733729ms step_avg:89.47ms +[2025-08-22 11:50:20] [Rank 0] step:8221/10000 train_time:735450ms step_avg:89.46ms +[2025-08-22 11:50:20] [Rank 0] step:8221/10000 train_time:735450ms step_avg:89.46ms +[2025-08-22 11:50:22] [Rank 0] step:8241/10000 train_time:737358ms step_avg:89.47ms +[2025-08-22 11:50:22] [Rank 0] step:8241/10000 train_time:737358ms step_avg:89.47ms +[2025-08-22 11:50:24] [Rank 0] step:8261/10000 train_time:739262ms step_avg:89.49ms +[2025-08-22 11:50:24] [Rank 0] step:8261/10000 train_time:739262ms step_avg:89.49ms +[2025-08-22 11:50:26] [Rank 0] step:8281/10000 train_time:741172ms step_avg:89.50ms +[2025-08-22 11:50:26] [Rank 0] step:8281/10000 train_time:741172ms step_avg:89.50ms +[2025-08-22 11:50:28] [Rank 0] step:8301/10000 train_time:743077ms step_avg:89.52ms +[2025-08-22 11:50:28] [Rank 0] step:8301/10000 train_time:743077ms step_avg:89.52ms +[2025-08-22 11:50:30] [Rank 0] step:8321/10000 train_time:744975ms step_avg:89.53ms +[2025-08-22 11:50:30] [Rank 0] step:8321/10000 train_time:744975ms step_avg:89.53ms +[2025-08-22 11:50:32] [Rank 0] step:8341/10000 train_time:746882ms step_avg:89.54ms +[2025-08-22 11:50:32] [Rank 0] step:8341/10000 train_time:746882ms step_avg:89.54ms +[2025-08-22 11:50:34] [Rank 0] step:8361/10000 train_time:748788ms step_avg:89.56ms +[2025-08-22 11:50:34] [Rank 0] step:8361/10000 train_time:748788ms step_avg:89.56ms +[2025-08-22 11:50:36] [Rank 0] step:8381/10000 train_time:750692ms step_avg:89.57ms +[2025-08-22 11:50:36] [Rank 0] step:8381/10000 train_time:750692ms step_avg:89.57ms +[2025-08-22 11:50:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:50:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:50:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.7766 svd_entropy: attn_qk:H=0.6747,top10E=0.38,eRank=108.4,q75/q25=83.88 attn_vo:H=0.5998,top10E=0.47,eRank=87.3,q75/q25=100.70 mlp_w1:H=0.7523,top10E=0.30,eRank=188.7,q75/q25=12.04 mlp_w2:H=0.9100,top10E=0.09,eRank=451.1,q75/q25=9.44 vo_prod:H=0.4844,top10E=0.66,eRank=42.5,q75/q25=9901.02 train_time:752781ms step_avg:89.62ms +[2025-08-22 11:50:51] [Rank 0] PRINT: step:8400/10000 val_loss:3.7766 svd_entropy: attn_qk:H=0.6747,top10E=0.38,eRank=108.4,q75/q25=83.88 attn_vo:H=0.5998,top10E=0.47,eRank=87.3,q75/q25=100.70 mlp_w1:H=0.7523,top10E=0.30,eRank=188.7,q75/q25=12.04 mlp_w2:H=0.9100,top10E=0.09,eRank=451.1,q75/q25=9.44 vo_prod:H=0.4844,top10E=0.66,eRank=42.5,q75/q25=9901.02 train_time:752781ms step_avg:89.62ms +[2025-08-22 11:50:51] [Rank 0] step:8401/10000 train_time:752794ms step_avg:89.61ms +[2025-08-22 11:50:51] [Rank 0] step:8401/10000 train_time:752794ms step_avg:89.61ms +[2025-08-22 11:50:53] [Rank 0] step:8421/10000 train_time:754500ms step_avg:89.60ms +[2025-08-22 11:50:53] [Rank 0] step:8421/10000 train_time:754500ms step_avg:89.60ms +[2025-08-22 11:50:55] [Rank 0] step:8441/10000 train_time:756395ms step_avg:89.61ms +[2025-08-22 11:50:55] [Rank 0] step:8441/10000 train_time:756395ms step_avg:89.61ms +[2025-08-22 11:50:57] [Rank 0] step:8461/10000 train_time:758288ms step_avg:89.62ms +[2025-08-22 11:50:57] [Rank 0] step:8461/10000 train_time:758288ms step_avg:89.62ms +[2025-08-22 11:50:59] [Rank 0] step:8481/10000 train_time:760191ms step_avg:89.63ms +[2025-08-22 11:50:59] [Rank 0] step:8481/10000 train_time:760191ms step_avg:89.63ms +[2025-08-22 11:51:01] [Rank 0] step:8501/10000 train_time:762110ms step_avg:89.65ms +[2025-08-22 11:51:01] [Rank 0] step:8501/10000 train_time:762110ms step_avg:89.65ms +[2025-08-22 11:51:03] [Rank 0] step:8521/10000 train_time:764018ms step_avg:89.66ms +[2025-08-22 11:51:03] [Rank 0] step:8521/10000 train_time:764018ms step_avg:89.66ms +[2025-08-22 11:51:04] [Rank 0] step:8541/10000 train_time:765933ms step_avg:89.68ms +[2025-08-22 11:51:04] [Rank 0] step:8541/10000 train_time:765933ms step_avg:89.68ms +[2025-08-22 11:51:06] [Rank 0] step:8561/10000 train_time:767838ms step_avg:89.69ms +[2025-08-22 11:51:06] [Rank 0] step:8561/10000 train_time:767838ms step_avg:89.69ms +[2025-08-22 11:51:08] [Rank 0] step:8581/10000 train_time:769743ms step_avg:89.70ms +[2025-08-22 11:51:08] [Rank 0] step:8581/10000 train_time:769743ms step_avg:89.70ms +[2025-08-22 11:51:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:51:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:51:24] [Rank 0] PRINT: step:8600/10000 val_loss:3.7674 svd_entropy: attn_qk:H=0.6753,top10E=0.38,eRank=108.9,q75/q25=83.71 attn_vo:H=0.6008,top10E=0.47,eRank=87.8,q75/q25=100.45 mlp_w1:H=0.7526,top10E=0.30,eRank=189.4,q75/q25=12.01 mlp_w2:H=0.9099,top10E=0.09,eRank=451.1,q75/q25=9.44 vo_prod:H=0.4855,top10E=0.66,eRank=42.7,q75/q25=9919.25 train_time:771826ms step_avg:89.75ms +[2025-08-22 11:51:24] [Rank 0] PRINT: step:8600/10000 val_loss:3.7674 svd_entropy: attn_qk:H=0.6753,top10E=0.38,eRank=108.9,q75/q25=83.71 attn_vo:H=0.6008,top10E=0.47,eRank=87.8,q75/q25=100.45 mlp_w1:H=0.7526,top10E=0.30,eRank=189.4,q75/q25=12.01 mlp_w2:H=0.9099,top10E=0.09,eRank=451.1,q75/q25=9.44 vo_prod:H=0.4855,top10E=0.66,eRank=42.7,q75/q25=9919.25 train_time:771826ms step_avg:89.75ms +[2025-08-22 11:51:24] [Rank 0] step:8601/10000 train_time:771839ms step_avg:89.74ms +[2025-08-22 11:51:24] [Rank 0] step:8601/10000 train_time:771839ms step_avg:89.74ms +[2025-08-22 11:51:26] [Rank 0] step:8621/10000 train_time:773554ms step_avg:89.73ms +[2025-08-22 11:51:26] [Rank 0] step:8621/10000 train_time:773554ms step_avg:89.73ms +[2025-08-22 11:51:28] [Rank 0] step:8641/10000 train_time:775447ms step_avg:89.74ms +[2025-08-22 11:51:28] [Rank 0] step:8641/10000 train_time:775447ms step_avg:89.74ms +[2025-08-22 11:51:30] [Rank 0] step:8661/10000 train_time:777346ms step_avg:89.75ms +[2025-08-22 11:51:30] [Rank 0] step:8661/10000 train_time:777346ms step_avg:89.75ms +[2025-08-22 11:51:31] [Rank 0] step:8681/10000 train_time:779241ms step_avg:89.76ms +[2025-08-22 11:51:31] [Rank 0] step:8681/10000 train_time:779241ms step_avg:89.76ms +[2025-08-22 11:51:33] [Rank 0] step:8701/10000 train_time:781137ms step_avg:89.78ms +[2025-08-22 11:51:33] [Rank 0] step:8701/10000 train_time:781137ms step_avg:89.78ms +[2025-08-22 11:51:35] [Rank 0] step:8721/10000 train_time:783038ms step_avg:89.79ms +[2025-08-22 11:51:35] [Rank 0] step:8721/10000 train_time:783038ms step_avg:89.79ms +[2025-08-22 11:51:37] [Rank 0] step:8741/10000 train_time:784930ms step_avg:89.80ms +[2025-08-22 11:51:37] [Rank 0] step:8741/10000 train_time:784930ms step_avg:89.80ms +[2025-08-22 11:51:39] [Rank 0] step:8761/10000 train_time:786825ms step_avg:89.81ms +[2025-08-22 11:51:39] [Rank 0] step:8761/10000 train_time:786825ms step_avg:89.81ms +[2025-08-22 11:51:41] [Rank 0] step:8781/10000 train_time:788730ms step_avg:89.82ms +[2025-08-22 11:51:41] [Rank 0] step:8781/10000 train_time:788730ms step_avg:89.82ms +[2025-08-22 11:51:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:51:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:51:57] [Rank 0] PRINT: step:8800/10000 val_loss:3.7554 svd_entropy: attn_qk:H=0.6758,top10E=0.38,eRank=109.2,q75/q25=83.28 attn_vo:H=0.6016,top10E=0.47,eRank=88.2,q75/q25=100.12 mlp_w1:H=0.7529,top10E=0.30,eRank=189.9,q75/q25=11.98 mlp_w2:H=0.9099,top10E=0.09,eRank=451.2,q75/q25=9.46 vo_prod:H=0.4863,top10E=0.66,eRank=42.9,q75/q25=9933.80 train_time:790820ms step_avg:89.87ms +[2025-08-22 11:51:57] [Rank 0] PRINT: step:8800/10000 val_loss:3.7554 svd_entropy: attn_qk:H=0.6758,top10E=0.38,eRank=109.2,q75/q25=83.28 attn_vo:H=0.6016,top10E=0.47,eRank=88.2,q75/q25=100.12 mlp_w1:H=0.7529,top10E=0.30,eRank=189.9,q75/q25=11.98 mlp_w2:H=0.9099,top10E=0.09,eRank=451.2,q75/q25=9.46 vo_prod:H=0.4863,top10E=0.66,eRank=42.9,q75/q25=9933.80 train_time:790820ms step_avg:89.87ms +[2025-08-22 11:51:57] [Rank 0] step:8801/10000 train_time:790831ms step_avg:89.86ms +[2025-08-22 11:51:57] [Rank 0] step:8801/10000 train_time:790831ms step_avg:89.86ms +[2025-08-22 11:51:59] [Rank 0] step:8821/10000 train_time:792548ms step_avg:89.85ms +[2025-08-22 11:51:59] [Rank 0] step:8821/10000 train_time:792548ms step_avg:89.85ms +[2025-08-22 11:52:00] [Rank 0] step:8841/10000 train_time:794468ms step_avg:89.86ms +[2025-08-22 11:52:00] [Rank 0] step:8841/10000 train_time:794468ms step_avg:89.86ms +[2025-08-22 11:52:02] [Rank 0] step:8861/10000 train_time:796362ms step_avg:89.87ms +[2025-08-22 11:52:02] [Rank 0] step:8861/10000 train_time:796362ms step_avg:89.87ms +[2025-08-22 11:52:04] [Rank 0] step:8881/10000 train_time:798258ms step_avg:89.88ms +[2025-08-22 11:52:04] [Rank 0] step:8881/10000 train_time:798258ms step_avg:89.88ms +[2025-08-22 11:52:06] [Rank 0] step:8901/10000 train_time:800160ms step_avg:89.90ms +[2025-08-22 11:52:06] [Rank 0] step:8901/10000 train_time:800160ms step_avg:89.90ms +[2025-08-22 11:52:08] [Rank 0] step:8921/10000 train_time:802065ms step_avg:89.91ms +[2025-08-22 11:52:08] [Rank 0] step:8921/10000 train_time:802065ms step_avg:89.91ms +[2025-08-22 11:52:10] [Rank 0] step:8941/10000 train_time:803980ms step_avg:89.92ms +[2025-08-22 11:52:10] [Rank 0] step:8941/10000 train_time:803980ms step_avg:89.92ms +[2025-08-22 11:52:12] [Rank 0] step:8961/10000 train_time:805879ms step_avg:89.93ms +[2025-08-22 11:52:12] [Rank 0] step:8961/10000 train_time:805879ms step_avg:89.93ms +[2025-08-22 11:52:14] [Rank 0] step:8981/10000 train_time:807777ms step_avg:89.94ms +[2025-08-22 11:52:14] [Rank 0] step:8981/10000 train_time:807777ms step_avg:89.94ms +[2025-08-22 11:52:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:52:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:52:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.7448 svd_entropy: attn_qk:H=0.6763,top10E=0.37,eRank=109.5,q75/q25=83.35 attn_vo:H=0.6024,top10E=0.47,eRank=88.6,q75/q25=100.36 mlp_w1:H=0.7533,top10E=0.30,eRank=190.5,q75/q25=11.98 mlp_w2:H=0.9099,top10E=0.09,eRank=451.2,q75/q25=9.46 vo_prod:H=0.4872,top10E=0.65,eRank=43.1,q75/q25=10099.55 train_time:809867ms step_avg:89.99ms +[2025-08-22 11:52:29] [Rank 0] PRINT: step:9000/10000 val_loss:3.7448 svd_entropy: attn_qk:H=0.6763,top10E=0.37,eRank=109.5,q75/q25=83.35 attn_vo:H=0.6024,top10E=0.47,eRank=88.6,q75/q25=100.36 mlp_w1:H=0.7533,top10E=0.30,eRank=190.5,q75/q25=11.98 mlp_w2:H=0.9099,top10E=0.09,eRank=451.2,q75/q25=9.46 vo_prod:H=0.4872,top10E=0.65,eRank=43.1,q75/q25=10099.55 train_time:809867ms step_avg:89.99ms +[2025-08-22 11:52:29] [Rank 0] step:9001/10000 train_time:809879ms step_avg:89.98ms +[2025-08-22 11:52:29] [Rank 0] step:9001/10000 train_time:809879ms step_avg:89.98ms +[2025-08-22 11:52:31] [Rank 0] step:9021/10000 train_time:811589ms step_avg:89.97ms +[2025-08-22 11:52:31] [Rank 0] step:9021/10000 train_time:811589ms step_avg:89.97ms +[2025-08-22 11:52:33] [Rank 0] step:9041/10000 train_time:813486ms step_avg:89.98ms +[2025-08-22 11:52:33] [Rank 0] step:9041/10000 train_time:813486ms step_avg:89.98ms +[2025-08-22 11:52:35] [Rank 0] step:9061/10000 train_time:815400ms step_avg:89.99ms +[2025-08-22 11:52:35] [Rank 0] step:9061/10000 train_time:815400ms step_avg:89.99ms +[2025-08-22 11:52:37] [Rank 0] step:9081/10000 train_time:817312ms step_avg:90.00ms +[2025-08-22 11:52:37] [Rank 0] step:9081/10000 train_time:817312ms step_avg:90.00ms +[2025-08-22 11:52:39] [Rank 0] step:9101/10000 train_time:819237ms step_avg:90.02ms +[2025-08-22 11:52:39] [Rank 0] step:9101/10000 train_time:819237ms step_avg:90.02ms +[2025-08-22 11:52:41] [Rank 0] step:9121/10000 train_time:821147ms step_avg:90.03ms +[2025-08-22 11:52:41] [Rank 0] step:9121/10000 train_time:821147ms step_avg:90.03ms +[2025-08-22 11:52:43] [Rank 0] step:9141/10000 train_time:823044ms step_avg:90.04ms +[2025-08-22 11:52:43] [Rank 0] step:9141/10000 train_time:823044ms step_avg:90.04ms +[2025-08-22 11:52:45] [Rank 0] step:9161/10000 train_time:824938ms step_avg:90.05ms +[2025-08-22 11:52:45] [Rank 0] step:9161/10000 train_time:824938ms step_avg:90.05ms +[2025-08-22 11:52:47] [Rank 0] step:9181/10000 train_time:826874ms step_avg:90.06ms +[2025-08-22 11:52:47] [Rank 0] step:9181/10000 train_time:826874ms step_avg:90.06ms +[2025-08-22 11:52:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:52:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:53:02] [Rank 0] PRINT: step:9200/10000 val_loss:3.7356 svd_entropy: attn_qk:H=0.6767,top10E=0.37,eRank=109.8,q75/q25=83.44 attn_vo:H=0.6030,top10E=0.47,eRank=88.9,q75/q25=100.54 mlp_w1:H=0.7536,top10E=0.30,eRank=190.9,q75/q25=11.95 mlp_w2:H=0.9099,top10E=0.09,eRank=451.3,q75/q25=9.50 vo_prod:H=0.4882,top10E=0.65,eRank=43.3,q75/q25=10040.18 train_time:828972ms step_avg:90.11ms +[2025-08-22 11:53:02] [Rank 0] PRINT: step:9200/10000 val_loss:3.7356 svd_entropy: attn_qk:H=0.6767,top10E=0.37,eRank=109.8,q75/q25=83.44 attn_vo:H=0.6030,top10E=0.47,eRank=88.9,q75/q25=100.54 mlp_w1:H=0.7536,top10E=0.30,eRank=190.9,q75/q25=11.95 mlp_w2:H=0.9099,top10E=0.09,eRank=451.3,q75/q25=9.50 vo_prod:H=0.4882,top10E=0.65,eRank=43.3,q75/q25=10040.18 train_time:828972ms step_avg:90.11ms +[2025-08-22 11:53:02] [Rank 0] step:9201/10000 train_time:828984ms step_avg:90.10ms +[2025-08-22 11:53:02] [Rank 0] step:9201/10000 train_time:828984ms step_avg:90.10ms +[2025-08-22 11:53:04] [Rank 0] step:9221/10000 train_time:830719ms step_avg:90.09ms +[2025-08-22 11:53:04] [Rank 0] step:9221/10000 train_time:830719ms step_avg:90.09ms +[2025-08-22 11:53:06] [Rank 0] step:9241/10000 train_time:832630ms step_avg:90.10ms +[2025-08-22 11:53:06] [Rank 0] step:9241/10000 train_time:832630ms step_avg:90.10ms +[2025-08-22 11:53:08] [Rank 0] step:9261/10000 train_time:834542ms step_avg:90.11ms +[2025-08-22 11:53:08] [Rank 0] step:9261/10000 train_time:834542ms step_avg:90.11ms +[2025-08-22 11:53:10] [Rank 0] step:9281/10000 train_time:836433ms step_avg:90.12ms +[2025-08-22 11:53:10] [Rank 0] step:9281/10000 train_time:836433ms step_avg:90.12ms +[2025-08-22 11:53:12] [Rank 0] step:9301/10000 train_time:838333ms step_avg:90.13ms +[2025-08-22 11:53:12] [Rank 0] step:9301/10000 train_time:838333ms step_avg:90.13ms +[2025-08-22 11:53:14] [Rank 0] step:9321/10000 train_time:840243ms step_avg:90.15ms +[2025-08-22 11:53:14] [Rank 0] step:9321/10000 train_time:840243ms step_avg:90.15ms +[2025-08-22 11:53:15] [Rank 0] step:9341/10000 train_time:842152ms step_avg:90.16ms +[2025-08-22 11:53:15] [Rank 0] step:9341/10000 train_time:842152ms step_avg:90.16ms +[2025-08-22 11:53:17] [Rank 0] step:9361/10000 train_time:844065ms step_avg:90.17ms +[2025-08-22 11:53:17] [Rank 0] step:9361/10000 train_time:844065ms step_avg:90.17ms +[2025-08-22 11:53:19] [Rank 0] step:9381/10000 train_time:845987ms step_avg:90.18ms +[2025-08-22 11:53:19] [Rank 0] step:9381/10000 train_time:845987ms step_avg:90.18ms +[2025-08-22 11:53:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:53:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:53:35] [Rank 0] PRINT: step:9400/10000 val_loss:3.7270 svd_entropy: attn_qk:H=0.6770,top10E=0.37,eRank=110.0,q75/q25=83.76 attn_vo:H=0.6036,top10E=0.47,eRank=89.1,q75/q25=100.32 mlp_w1:H=0.7539,top10E=0.30,eRank=191.3,q75/q25=11.95 mlp_w2:H=0.9099,top10E=0.09,eRank=451.3,q75/q25=9.50 vo_prod:H=0.4885,top10E=0.65,eRank=43.4,q75/q25=10149.50 train_time:848086ms step_avg:90.22ms +[2025-08-22 11:53:35] [Rank 0] PRINT: step:9400/10000 val_loss:3.7270 svd_entropy: attn_qk:H=0.6770,top10E=0.37,eRank=110.0,q75/q25=83.76 attn_vo:H=0.6036,top10E=0.47,eRank=89.1,q75/q25=100.32 mlp_w1:H=0.7539,top10E=0.30,eRank=191.3,q75/q25=11.95 mlp_w2:H=0.9099,top10E=0.09,eRank=451.3,q75/q25=9.50 vo_prod:H=0.4885,top10E=0.65,eRank=43.4,q75/q25=10149.50 train_time:848086ms step_avg:90.22ms +[2025-08-22 11:53:35] [Rank 0] step:9401/10000 train_time:848098ms step_avg:90.21ms +[2025-08-22 11:53:35] [Rank 0] step:9401/10000 train_time:848098ms step_avg:90.21ms +[2025-08-22 11:53:37] [Rank 0] step:9421/10000 train_time:849809ms step_avg:90.20ms +[2025-08-22 11:53:37] [Rank 0] step:9421/10000 train_time:849809ms step_avg:90.20ms +[2025-08-22 11:53:39] [Rank 0] step:9441/10000 train_time:851706ms step_avg:90.21ms +[2025-08-22 11:53:39] [Rank 0] step:9441/10000 train_time:851706ms step_avg:90.21ms +[2025-08-22 11:53:41] [Rank 0] step:9461/10000 train_time:853613ms step_avg:90.22ms +[2025-08-22 11:53:41] [Rank 0] step:9461/10000 train_time:853613ms step_avg:90.22ms +[2025-08-22 11:53:43] [Rank 0] step:9481/10000 train_time:855517ms step_avg:90.23ms +[2025-08-22 11:53:43] [Rank 0] step:9481/10000 train_time:855517ms step_avg:90.23ms +[2025-08-22 11:53:45] [Rank 0] step:9501/10000 train_time:857433ms step_avg:90.25ms +[2025-08-22 11:53:45] [Rank 0] step:9501/10000 train_time:857433ms step_avg:90.25ms +[2025-08-22 11:53:46] [Rank 0] step:9521/10000 train_time:859328ms step_avg:90.26ms +[2025-08-22 11:53:46] [Rank 0] step:9521/10000 train_time:859328ms step_avg:90.26ms +[2025-08-22 11:53:48] [Rank 0] step:9541/10000 train_time:861226ms step_avg:90.27ms +[2025-08-22 11:53:48] [Rank 0] step:9541/10000 train_time:861226ms step_avg:90.27ms +[2025-08-22 11:53:50] [Rank 0] step:9561/10000 train_time:863123ms step_avg:90.28ms +[2025-08-22 11:53:50] [Rank 0] step:9561/10000 train_time:863123ms step_avg:90.28ms +[2025-08-22 11:53:52] [Rank 0] step:9581/10000 train_time:865026ms step_avg:90.29ms +[2025-08-22 11:53:52] [Rank 0] step:9581/10000 train_time:865026ms step_avg:90.29ms +[2025-08-22 11:53:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:53:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:54:08] [Rank 0] PRINT: step:9600/10000 val_loss:3.7188 svd_entropy: attn_qk:H=0.6772,top10E=0.37,eRank=110.2,q75/q25=83.79 attn_vo:H=0.6041,top10E=0.47,eRank=89.4,q75/q25=100.90 mlp_w1:H=0.7541,top10E=0.30,eRank=191.6,q75/q25=11.94 mlp_w2:H=0.9100,top10E=0.09,eRank=451.4,q75/q25=9.49 vo_prod:H=0.4891,top10E=0.65,eRank=43.6,q75/q25=10170.14 train_time:867131ms step_avg:90.33ms +[2025-08-22 11:54:08] [Rank 0] PRINT: step:9600/10000 val_loss:3.7188 svd_entropy: attn_qk:H=0.6772,top10E=0.37,eRank=110.2,q75/q25=83.79 attn_vo:H=0.6041,top10E=0.47,eRank=89.4,q75/q25=100.90 mlp_w1:H=0.7541,top10E=0.30,eRank=191.6,q75/q25=11.94 mlp_w2:H=0.9100,top10E=0.09,eRank=451.4,q75/q25=9.49 vo_prod:H=0.4891,top10E=0.65,eRank=43.6,q75/q25=10170.14 train_time:867131ms step_avg:90.33ms +[2025-08-22 11:54:08] [Rank 0] step:9601/10000 train_time:867143ms step_avg:90.32ms +[2025-08-22 11:54:08] [Rank 0] step:9601/10000 train_time:867143ms step_avg:90.32ms +[2025-08-22 11:54:10] [Rank 0] step:9621/10000 train_time:868859ms step_avg:90.31ms +[2025-08-22 11:54:10] [Rank 0] step:9621/10000 train_time:868859ms step_avg:90.31ms +[2025-08-22 11:54:12] [Rank 0] step:9641/10000 train_time:870761ms step_avg:90.32ms +[2025-08-22 11:54:12] [Rank 0] step:9641/10000 train_time:870761ms step_avg:90.32ms +[2025-08-22 11:54:14] [Rank 0] step:9661/10000 train_time:872693ms step_avg:90.33ms +[2025-08-22 11:54:14] [Rank 0] step:9661/10000 train_time:872693ms step_avg:90.33ms +[2025-08-22 11:54:16] [Rank 0] step:9681/10000 train_time:874616ms step_avg:90.34ms +[2025-08-22 11:54:16] [Rank 0] step:9681/10000 train_time:874616ms step_avg:90.34ms +[2025-08-22 11:54:18] [Rank 0] step:9701/10000 train_time:876555ms step_avg:90.36ms +[2025-08-22 11:54:18] [Rank 0] step:9701/10000 train_time:876555ms step_avg:90.36ms +[2025-08-22 11:54:19] [Rank 0] step:9721/10000 train_time:878475ms step_avg:90.37ms +[2025-08-22 11:54:19] [Rank 0] step:9721/10000 train_time:878475ms step_avg:90.37ms +[2025-08-22 11:54:21] [Rank 0] step:9741/10000 train_time:880428ms step_avg:90.38ms +[2025-08-22 11:54:21] [Rank 0] step:9741/10000 train_time:880428ms step_avg:90.38ms +[2025-08-22 11:54:23] [Rank 0] step:9761/10000 train_time:882353ms step_avg:90.40ms +[2025-08-22 11:54:23] [Rank 0] step:9761/10000 train_time:882353ms step_avg:90.40ms +[2025-08-22 11:54:25] [Rank 0] step:9781/10000 train_time:884295ms step_avg:90.41ms +[2025-08-22 11:54:25] [Rank 0] step:9781/10000 train_time:884295ms step_avg:90.41ms +[2025-08-22 11:54:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:54:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:54:41] [Rank 0] PRINT: step:9800/10000 val_loss:3.7103 svd_entropy: attn_qk:H=0.6774,top10E=0.37,eRank=110.4,q75/q25=83.94 attn_vo:H=0.6044,top10E=0.47,eRank=89.5,q75/q25=100.75 mlp_w1:H=0.7542,top10E=0.30,eRank=191.8,q75/q25=11.94 mlp_w2:H=0.9100,top10E=0.09,eRank=451.4,q75/q25=9.51 vo_prod:H=0.4896,top10E=0.65,eRank=43.7,q75/q25=10177.20 train_time:886436ms step_avg:90.45ms +[2025-08-22 11:54:41] [Rank 0] PRINT: step:9800/10000 val_loss:3.7103 svd_entropy: attn_qk:H=0.6774,top10E=0.37,eRank=110.4,q75/q25=83.94 attn_vo:H=0.6044,top10E=0.47,eRank=89.5,q75/q25=100.75 mlp_w1:H=0.7542,top10E=0.30,eRank=191.8,q75/q25=11.94 mlp_w2:H=0.9100,top10E=0.09,eRank=451.4,q75/q25=9.51 vo_prod:H=0.4896,top10E=0.65,eRank=43.7,q75/q25=10177.20 train_time:886436ms step_avg:90.45ms +[2025-08-22 11:54:41] [Rank 0] step:9801/10000 train_time:886449ms step_avg:90.44ms +[2025-08-22 11:54:41] [Rank 0] step:9801/10000 train_time:886449ms step_avg:90.44ms +[2025-08-22 11:54:43] [Rank 0] step:9821/10000 train_time:888184ms step_avg:90.44ms +[2025-08-22 11:54:43] [Rank 0] step:9821/10000 train_time:888184ms step_avg:90.44ms +[2025-08-22 11:54:45] [Rank 0] step:9841/10000 train_time:890129ms step_avg:90.45ms +[2025-08-22 11:54:45] [Rank 0] step:9841/10000 train_time:890129ms step_avg:90.45ms +[2025-08-22 11:54:47] [Rank 0] step:9861/10000 train_time:892049ms step_avg:90.46ms +[2025-08-22 11:54:47] [Rank 0] step:9861/10000 train_time:892049ms step_avg:90.46ms +[2025-08-22 11:54:49] [Rank 0] step:9881/10000 train_time:893971ms step_avg:90.47ms +[2025-08-22 11:54:49] [Rank 0] step:9881/10000 train_time:893971ms step_avg:90.47ms +[2025-08-22 11:54:51] [Rank 0] step:9901/10000 train_time:895912ms step_avg:90.49ms +[2025-08-22 11:54:51] [Rank 0] step:9901/10000 train_time:895912ms step_avg:90.49ms +[2025-08-22 11:54:53] [Rank 0] step:9921/10000 train_time:897839ms step_avg:90.50ms +[2025-08-22 11:54:53] [Rank 0] step:9921/10000 train_time:897839ms step_avg:90.50ms +[2025-08-22 11:54:55] [Rank 0] step:9941/10000 train_time:899778ms step_avg:90.51ms +[2025-08-22 11:54:55] [Rank 0] step:9941/10000 train_time:899778ms step_avg:90.51ms +[2025-08-22 11:54:57] [Rank 0] step:9961/10000 train_time:901710ms step_avg:90.52ms +[2025-08-22 11:54:57] [Rank 0] step:9961/10000 train_time:901710ms step_avg:90.52ms +[2025-08-22 11:54:59] [Rank 0] step:9981/10000 train_time:903644ms step_avg:90.54ms +[2025-08-22 11:54:59] [Rank 0] step:9981/10000 train_time:903644ms step_avg:90.54ms +[2025-08-22 11:55:00] [Rank 0] step:10000/10000 train_time:905487ms step_avg:90.55ms +[2025-08-22 11:55:00] [Rank 0] step:10000/10000 train_time:905487ms step_avg:90.55ms +[2025-08-22 11:55:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:55:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:55:14] [Rank 0] PRINT: step:10000/10000 val_loss:3.7030 svd_entropy: attn_qk:H=0.6776,top10E=0.37,eRank=110.5,q75/q25=83.85 attn_vo:H=0.6047,top10E=0.47,eRank=89.7,q75/q25=100.81 mlp_w1:H=0.7544,top10E=0.30,eRank=192.0,q75/q25=11.94 mlp_w2:H=0.9100,top10E=0.09,eRank=451.5,q75/q25=9.51 vo_prod:H=0.4898,top10E=0.65,eRank=43.7,q75/q25=10207.20 train_time:905782ms step_avg:90.58ms +[2025-08-22 11:55:14] [Rank 0] PRINT: step:10000/10000 val_loss:3.7030 svd_entropy: attn_qk:H=0.6776,top10E=0.37,eRank=110.5,q75/q25=83.85 attn_vo:H=0.6047,top10E=0.47,eRank=89.7,q75/q25=100.81 mlp_w1:H=0.7544,top10E=0.30,eRank=192.0,q75/q25=11.94 mlp_w2:H=0.9100,top10E=0.09,eRank=451.5,q75/q25=9.51 vo_prod:H=0.4898,top10E=0.65,eRank=43.7,q75/q25=10207.20 train_time:905782ms step_avg:90.58ms +[2025-08-22 11:55:14] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 11:55:14 2025 --- +[2025-08-22 11:55:14] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 11:55:14 2025 --- +[2025-08-22 11:55:14] [Rank 0] PRINT: Peak memory allocated: 11478 MiB reserved: 16356 MiB +[2025-08-22 11:55:14] [Rank 0] PRINT: Peak memory allocated: 11478 MiB reserved: 16356 MiB diff --git a/logs_svd_gated/mode_6_param_gated_seed_42/config.json b/logs_svd_gated/mode_6_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..abb059410991b1e87662383c07e880d33ec87378 --- /dev/null +++ b/logs_svd_gated/mode_6_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 6, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "0816df72-69ab-4d4c-b95a-28f17c2f2ef6", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_6_param_gated_seed_42/training_log_0816df72-69ab-4d4c-b95a-28f17c2f2ef6.txt b/logs_svd_gated/mode_6_param_gated_seed_42/training_log_0816df72-69ab-4d4c-b95a-28f17c2f2ef6.txt new file mode 100644 index 0000000000000000000000000000000000000000..08c7a5e204dd882e4ea61da86855c4169763c499 --- /dev/null +++ b/logs_svd_gated/mode_6_param_gated_seed_42/training_log_0816df72-69ab-4d4c-b95a-28f17c2f2ef6.txt @@ -0,0 +1,2926 @@ +[2025-08-22 16:41:36] [Rank 0] PRINT: --- Script Start: Fri Aug 22 16:41:36 2025 --- +[2025-08-22 16:41:36] [Rank 0] PRINT: --- Script Start: Fri Aug 22 16:41:36 2025 --- +[2025-08-22 16:41:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=6, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 16:41:36] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=6, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 16:41:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 16:41:36] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 16:41:36] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 16:41:36] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 16:41:36] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_6_param_gated_seed_42 +[2025-08-22 16:41:36] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_6_param_gated_seed_42 +[2025-08-22 16:41:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 16:41:36] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 16:41:36] [Rank 0] PRINT: Constructing model... +[2025-08-22 16:41:36] [Rank 0] PRINT: Constructing model... +[2025-08-22 16:41:38] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 16:41:38] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 16:41:38] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 16:41:38] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 16:41:38] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 16:41:38] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 16:41:38] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-08-22 16:41:38] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-08-22 16:41:38] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 16:41:38] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 16:41:38] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 16:41:38] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 16:41:38] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-08-22 16:41:38] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-08-22 16:41:38] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 16:41:38] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 16:41:38] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 16:41:38] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 16:41:38] [Rank 0] PRINT: Starting warmup... +[2025-08-22 16:41:38] [Rank 0] PRINT: Starting warmup... +[2025-08-22 16:42:21] [Rank 0] PRINT: Warmup complete. +[2025-08-22 16:42:21] [Rank 0] PRINT: Warmup complete. +[2025-08-22 16:42:21] [Rank 0] PRINT: Starting training... +[2025-08-22 16:42:21] [Rank 0] PRINT: Starting training... +[2025-08-22 16:42:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:42:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:42:39] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 16:42:39] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 16:42:41] [Rank 0] step:21/10000 train_time:1575ms step_avg:75.00ms +[2025-08-22 16:42:41] [Rank 0] step:21/10000 train_time:1575ms step_avg:75.00ms +[2025-08-22 16:42:43] [Rank 0] step:41/10000 train_time:3251ms step_avg:79.28ms +[2025-08-22 16:42:43] [Rank 0] step:41/10000 train_time:3251ms step_avg:79.28ms +[2025-08-22 16:42:44] [Rank 0] step:61/10000 train_time:4924ms step_avg:80.73ms +[2025-08-22 16:42:44] [Rank 0] step:61/10000 train_time:4924ms step_avg:80.73ms +[2025-08-22 16:42:46] [Rank 0] step:81/10000 train_time:6636ms step_avg:81.93ms +[2025-08-22 16:42:46] [Rank 0] step:81/10000 train_time:6636ms step_avg:81.93ms +[2025-08-22 16:42:48] [Rank 0] step:101/10000 train_time:8276ms step_avg:81.94ms +[2025-08-22 16:42:48] [Rank 0] step:101/10000 train_time:8276ms step_avg:81.94ms +[2025-08-22 16:42:49] [Rank 0] step:121/10000 train_time:9954ms step_avg:82.27ms +[2025-08-22 16:42:49] [Rank 0] step:121/10000 train_time:9954ms step_avg:82.27ms +[2025-08-22 16:42:51] [Rank 0] step:141/10000 train_time:11635ms step_avg:82.52ms +[2025-08-22 16:42:51] [Rank 0] step:141/10000 train_time:11635ms step_avg:82.52ms +[2025-08-22 16:42:53] [Rank 0] step:161/10000 train_time:13320ms step_avg:82.73ms +[2025-08-22 16:42:53] [Rank 0] step:161/10000 train_time:13320ms step_avg:82.73ms +[2025-08-22 16:42:54] [Rank 0] step:181/10000 train_time:15006ms step_avg:82.90ms +[2025-08-22 16:42:54] [Rank 0] step:181/10000 train_time:15006ms step_avg:82.90ms +[2025-08-22 16:42:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:42:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:43:10] [Rank 0] PRINT: step:200/10000 val_loss:6.7415 svd_entropy: attn_qk:H=0.3599,top10E=0.85,eRank=13.8,q75/q25=20.21 attn_vo:H=0.1542,top10E=0.98,eRank=3.3,q75/q25=96.46 mlp_w1:H=0.4162,top10E=0.79,eRank=17.0,q75/q25=7.01 mlp_w2:H=0.6784,top10E=0.34,eRank=91.4,q75/q25=12.16 vo_prod:H=0.0554,top10E=1.00,eRank=1.6,q75/q25=667.15 train_time:16858ms step_avg:84.29ms +[2025-08-22 16:43:10] [Rank 0] PRINT: step:200/10000 val_loss:6.7415 svd_entropy: attn_qk:H=0.3599,top10E=0.85,eRank=13.8,q75/q25=20.21 attn_vo:H=0.1542,top10E=0.98,eRank=3.3,q75/q25=96.46 mlp_w1:H=0.4162,top10E=0.79,eRank=17.0,q75/q25=7.01 mlp_w2:H=0.6784,top10E=0.34,eRank=91.4,q75/q25=12.16 vo_prod:H=0.0554,top10E=1.00,eRank=1.6,q75/q25=667.15 train_time:16858ms step_avg:84.29ms +[2025-08-22 16:43:10] [Rank 0] step:201/10000 train_time:16873ms step_avg:83.94ms +[2025-08-22 16:43:10] [Rank 0] step:201/10000 train_time:16873ms step_avg:83.94ms +[2025-08-22 16:43:12] [Rank 0] step:221/10000 train_time:18393ms step_avg:83.22ms +[2025-08-22 16:43:12] [Rank 0] step:221/10000 train_time:18393ms step_avg:83.22ms +[2025-08-22 16:43:13] [Rank 0] step:241/10000 train_time:20070ms step_avg:83.28ms +[2025-08-22 16:43:13] [Rank 0] step:241/10000 train_time:20070ms step_avg:83.28ms +[2025-08-22 16:43:15] [Rank 0] step:261/10000 train_time:21749ms step_avg:83.33ms +[2025-08-22 16:43:15] [Rank 0] step:261/10000 train_time:21749ms step_avg:83.33ms +[2025-08-22 16:43:17] [Rank 0] step:281/10000 train_time:23429ms step_avg:83.38ms +[2025-08-22 16:43:17] [Rank 0] step:281/10000 train_time:23429ms step_avg:83.38ms +[2025-08-22 16:43:18] [Rank 0] step:301/10000 train_time:25109ms step_avg:83.42ms +[2025-08-22 16:43:18] [Rank 0] step:301/10000 train_time:25109ms step_avg:83.42ms +[2025-08-22 16:43:20] [Rank 0] step:321/10000 train_time:26789ms step_avg:83.45ms +[2025-08-22 16:43:20] [Rank 0] step:321/10000 train_time:26789ms step_avg:83.45ms +[2025-08-22 16:43:22] [Rank 0] step:341/10000 train_time:28469ms step_avg:83.49ms +[2025-08-22 16:43:22] [Rank 0] step:341/10000 train_time:28469ms step_avg:83.49ms +[2025-08-22 16:43:23] [Rank 0] step:361/10000 train_time:30150ms step_avg:83.52ms +[2025-08-22 16:43:23] [Rank 0] step:361/10000 train_time:30150ms step_avg:83.52ms +[2025-08-22 16:43:25] [Rank 0] step:381/10000 train_time:31829ms step_avg:83.54ms +[2025-08-22 16:43:25] [Rank 0] step:381/10000 train_time:31829ms step_avg:83.54ms +[2025-08-22 16:43:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:43:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:43:41] [Rank 0] PRINT: step:400/10000 val_loss:5.8442 svd_entropy: attn_qk:H=0.5523,top10E=0.59,eRank=43.4,q75/q25=54.16 attn_vo:H=0.3085,top10E=0.93,eRank=8.8,q75/q25=45.61 mlp_w1:H=0.5831,top10E=0.56,eRank=55.1,q75/q25=7.83 mlp_w2:H=0.8742,top10E=0.10,eRank=339.8,q75/q25=15.31 vo_prod:H=0.1816,top10E=0.99,eRank=3.8,q75/q25=397.17 train_time:33675ms step_avg:84.19ms +[2025-08-22 16:43:41] [Rank 0] PRINT: step:400/10000 val_loss:5.8442 svd_entropy: attn_qk:H=0.5523,top10E=0.59,eRank=43.4,q75/q25=54.16 attn_vo:H=0.3085,top10E=0.93,eRank=8.8,q75/q25=45.61 mlp_w1:H=0.5831,top10E=0.56,eRank=55.1,q75/q25=7.83 mlp_w2:H=0.8742,top10E=0.10,eRank=339.8,q75/q25=15.31 vo_prod:H=0.1816,top10E=0.99,eRank=3.8,q75/q25=397.17 train_time:33675ms step_avg:84.19ms +[2025-08-22 16:43:41] [Rank 0] step:401/10000 train_time:33690ms step_avg:84.02ms +[2025-08-22 16:43:41] [Rank 0] step:401/10000 train_time:33690ms step_avg:84.02ms +[2025-08-22 16:43:42] [Rank 0] step:421/10000 train_time:35207ms step_avg:83.63ms +[2025-08-22 16:43:42] [Rank 0] step:421/10000 train_time:35207ms step_avg:83.63ms +[2025-08-22 16:43:44] [Rank 0] step:441/10000 train_time:36879ms step_avg:83.63ms +[2025-08-22 16:43:44] [Rank 0] step:441/10000 train_time:36879ms step_avg:83.63ms +[2025-08-22 16:43:46] [Rank 0] step:461/10000 train_time:38553ms step_avg:83.63ms +[2025-08-22 16:43:46] [Rank 0] step:461/10000 train_time:38553ms step_avg:83.63ms +[2025-08-22 16:43:47] [Rank 0] step:481/10000 train_time:40228ms step_avg:83.64ms +[2025-08-22 16:43:47] [Rank 0] step:481/10000 train_time:40228ms step_avg:83.64ms +[2025-08-22 16:43:49] [Rank 0] step:501/10000 train_time:41904ms step_avg:83.64ms +[2025-08-22 16:43:49] [Rank 0] step:501/10000 train_time:41904ms step_avg:83.64ms +[2025-08-22 16:43:51] [Rank 0] step:521/10000 train_time:43581ms step_avg:83.65ms +[2025-08-22 16:43:51] [Rank 0] step:521/10000 train_time:43581ms step_avg:83.65ms +[2025-08-22 16:43:52] [Rank 0] step:541/10000 train_time:45259ms step_avg:83.66ms +[2025-08-22 16:43:52] [Rank 0] step:541/10000 train_time:45259ms step_avg:83.66ms +[2025-08-22 16:43:54] [Rank 0] step:561/10000 train_time:46937ms step_avg:83.67ms +[2025-08-22 16:43:54] [Rank 0] step:561/10000 train_time:46937ms step_avg:83.67ms +[2025-08-22 16:43:56] [Rank 0] step:581/10000 train_time:48614ms step_avg:83.67ms +[2025-08-22 16:43:56] [Rank 0] step:581/10000 train_time:48614ms step_avg:83.67ms +[2025-08-22 16:43:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:43:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:44:11] [Rank 0] PRINT: step:600/10000 val_loss:5.4201 svd_entropy: attn_qk:H=0.5976,top10E=0.51,eRank=56.6,q75/q25=70.08 attn_vo:H=0.3910,top10E=0.85,eRank=15.9,q75/q25=40.01 mlp_w1:H=0.6372,top10E=0.48,eRank=80.8,q75/q25=8.09 mlp_w2:H=0.9177,top10E=0.07,eRank=452.6,q75/q25=7.57 vo_prod:H=0.2663,top10E=0.97,eRank=6.8,q75/q25=490.17 train_time:50462ms step_avg:84.10ms +[2025-08-22 16:44:11] [Rank 0] PRINT: step:600/10000 val_loss:5.4201 svd_entropy: attn_qk:H=0.5976,top10E=0.51,eRank=56.6,q75/q25=70.08 attn_vo:H=0.3910,top10E=0.85,eRank=15.9,q75/q25=40.01 mlp_w1:H=0.6372,top10E=0.48,eRank=80.8,q75/q25=8.09 mlp_w2:H=0.9177,top10E=0.07,eRank=452.6,q75/q25=7.57 vo_prod:H=0.2663,top10E=0.97,eRank=6.8,q75/q25=490.17 train_time:50462ms step_avg:84.10ms +[2025-08-22 16:44:11] [Rank 0] step:601/10000 train_time:50477ms step_avg:83.99ms +[2025-08-22 16:44:11] [Rank 0] step:601/10000 train_time:50477ms step_avg:83.99ms +[2025-08-22 16:44:13] [Rank 0] step:621/10000 train_time:51991ms step_avg:83.72ms +[2025-08-22 16:44:13] [Rank 0] step:621/10000 train_time:51991ms step_avg:83.72ms +[2025-08-22 16:44:15] [Rank 0] step:641/10000 train_time:53668ms step_avg:83.73ms +[2025-08-22 16:44:15] [Rank 0] step:641/10000 train_time:53668ms step_avg:83.73ms +[2025-08-22 16:44:16] [Rank 0] step:661/10000 train_time:55344ms step_avg:83.73ms +[2025-08-22 16:44:16] [Rank 0] step:661/10000 train_time:55344ms step_avg:83.73ms +[2025-08-22 16:44:18] [Rank 0] step:681/10000 train_time:57025ms step_avg:83.74ms +[2025-08-22 16:44:18] [Rank 0] step:681/10000 train_time:57025ms step_avg:83.74ms +[2025-08-22 16:44:20] [Rank 0] step:701/10000 train_time:58704ms step_avg:83.74ms +[2025-08-22 16:44:20] [Rank 0] step:701/10000 train_time:58704ms step_avg:83.74ms +[2025-08-22 16:44:21] [Rank 0] step:721/10000 train_time:60385ms step_avg:83.75ms +[2025-08-22 16:44:21] [Rank 0] step:721/10000 train_time:60385ms step_avg:83.75ms +[2025-08-22 16:44:23] [Rank 0] step:741/10000 train_time:62066ms step_avg:83.76ms +[2025-08-22 16:44:23] [Rank 0] step:741/10000 train_time:62066ms step_avg:83.76ms +[2025-08-22 16:44:25] [Rank 0] step:761/10000 train_time:63760ms step_avg:83.78ms +[2025-08-22 16:44:25] [Rank 0] step:761/10000 train_time:63760ms step_avg:83.78ms +[2025-08-22 16:44:27] [Rank 0] step:781/10000 train_time:65456ms step_avg:83.81ms +[2025-08-22 16:44:27] [Rank 0] step:781/10000 train_time:65456ms step_avg:83.81ms +[2025-08-22 16:44:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:44:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:44:42] [Rank 0] PRINT: step:800/10000 val_loss:5.1852 svd_entropy: attn_qk:H=0.6122,top10E=0.48,eRank=61.8,q75/q25=68.32 attn_vo:H=0.4236,top10E=0.80,eRank=21.1,q75/q25=44.38 mlp_w1:H=0.6685,top10E=0.44,eRank=99.1,q75/q25=7.95 mlp_w2:H=0.9255,top10E=0.07,eRank=474.9,q75/q25=6.39 vo_prod:H=0.3003,top10E=0.93,eRank=9.1,q75/q25=616.91 train_time:67370ms step_avg:84.21ms +[2025-08-22 16:44:42] [Rank 0] PRINT: step:800/10000 val_loss:5.1852 svd_entropy: attn_qk:H=0.6122,top10E=0.48,eRank=61.8,q75/q25=68.32 attn_vo:H=0.4236,top10E=0.80,eRank=21.1,q75/q25=44.38 mlp_w1:H=0.6685,top10E=0.44,eRank=99.1,q75/q25=7.95 mlp_w2:H=0.9255,top10E=0.07,eRank=474.9,q75/q25=6.39 vo_prod:H=0.3003,top10E=0.93,eRank=9.1,q75/q25=616.91 train_time:67370ms step_avg:84.21ms +[2025-08-22 16:44:42] [Rank 0] step:801/10000 train_time:67384ms step_avg:84.13ms +[2025-08-22 16:44:42] [Rank 0] step:801/10000 train_time:67384ms step_avg:84.13ms +[2025-08-22 16:44:44] [Rank 0] step:821/10000 train_time:68925ms step_avg:83.95ms +[2025-08-22 16:44:44] [Rank 0] step:821/10000 train_time:68925ms step_avg:83.95ms +[2025-08-22 16:44:46] [Rank 0] step:841/10000 train_time:70614ms step_avg:83.96ms +[2025-08-22 16:44:46] [Rank 0] step:841/10000 train_time:70614ms step_avg:83.96ms +[2025-08-22 16:44:47] [Rank 0] step:861/10000 train_time:72304ms step_avg:83.98ms +[2025-08-22 16:44:47] [Rank 0] step:861/10000 train_time:72304ms step_avg:83.98ms +[2025-08-22 16:44:49] [Rank 0] step:881/10000 train_time:73994ms step_avg:83.99ms +[2025-08-22 16:44:49] [Rank 0] step:881/10000 train_time:73994ms step_avg:83.99ms +[2025-08-22 16:44:51] [Rank 0] step:901/10000 train_time:75685ms step_avg:84.00ms +[2025-08-22 16:44:51] [Rank 0] step:901/10000 train_time:75685ms step_avg:84.00ms +[2025-08-22 16:44:52] [Rank 0] step:921/10000 train_time:77375ms step_avg:84.01ms +[2025-08-22 16:44:52] [Rank 0] step:921/10000 train_time:77375ms step_avg:84.01ms +[2025-08-22 16:44:54] [Rank 0] step:941/10000 train_time:79067ms step_avg:84.02ms +[2025-08-22 16:44:54] [Rank 0] step:941/10000 train_time:79067ms step_avg:84.02ms +[2025-08-22 16:44:56] [Rank 0] step:961/10000 train_time:80760ms step_avg:84.04ms +[2025-08-22 16:44:56] [Rank 0] step:961/10000 train_time:80760ms step_avg:84.04ms +[2025-08-22 16:44:57] [Rank 0] step:981/10000 train_time:82453ms step_avg:84.05ms +[2025-08-22 16:44:57] [Rank 0] step:981/10000 train_time:82453ms step_avg:84.05ms +[2025-08-22 16:44:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:44:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:45:13] [Rank 0] PRINT: step:1000/10000 val_loss:4.9993 svd_entropy: attn_qk:H=0.6221,top10E=0.46,eRank=66.5,q75/q25=67.10 attn_vo:H=0.4457,top10E=0.76,eRank=25.5,q75/q25=47.05 mlp_w1:H=0.6826,top10E=0.42,eRank=110.1,q75/q25=8.02 mlp_w2:H=0.9319,top10E=0.07,eRank=494.8,q75/q25=5.71 vo_prod:H=0.3259,top10E=0.91,eRank=11.1,q75/q25=763.65 train_time:84315ms step_avg:84.31ms +[2025-08-22 16:45:13] [Rank 0] PRINT: step:1000/10000 val_loss:4.9993 svd_entropy: attn_qk:H=0.6221,top10E=0.46,eRank=66.5,q75/q25=67.10 attn_vo:H=0.4457,top10E=0.76,eRank=25.5,q75/q25=47.05 mlp_w1:H=0.6826,top10E=0.42,eRank=110.1,q75/q25=8.02 mlp_w2:H=0.9319,top10E=0.07,eRank=494.8,q75/q25=5.71 vo_prod:H=0.3259,top10E=0.91,eRank=11.1,q75/q25=763.65 train_time:84315ms step_avg:84.31ms +[2025-08-22 16:45:13] [Rank 0] step:1001/10000 train_time:84326ms step_avg:84.24ms +[2025-08-22 16:45:13] [Rank 0] step:1001/10000 train_time:84326ms step_avg:84.24ms +[2025-08-22 16:45:15] [Rank 0] step:1021/10000 train_time:85850ms step_avg:84.08ms +[2025-08-22 16:45:15] [Rank 0] step:1021/10000 train_time:85850ms step_avg:84.08ms +[2025-08-22 16:45:16] [Rank 0] step:1041/10000 train_time:87536ms step_avg:84.09ms +[2025-08-22 16:45:16] [Rank 0] step:1041/10000 train_time:87536ms step_avg:84.09ms +[2025-08-22 16:45:18] [Rank 0] step:1061/10000 train_time:89225ms step_avg:84.09ms +[2025-08-22 16:45:18] [Rank 0] step:1061/10000 train_time:89225ms step_avg:84.09ms +[2025-08-22 16:45:20] [Rank 0] step:1081/10000 train_time:90914ms step_avg:84.10ms +[2025-08-22 16:45:20] [Rank 0] step:1081/10000 train_time:90914ms step_avg:84.10ms +[2025-08-22 16:45:21] [Rank 0] step:1101/10000 train_time:92604ms step_avg:84.11ms +[2025-08-22 16:45:21] [Rank 0] step:1101/10000 train_time:92604ms step_avg:84.11ms +[2025-08-22 16:45:23] [Rank 0] step:1121/10000 train_time:94294ms step_avg:84.12ms +[2025-08-22 16:45:23] [Rank 0] step:1121/10000 train_time:94294ms step_avg:84.12ms +[2025-08-22 16:45:25] [Rank 0] step:1141/10000 train_time:95986ms step_avg:84.12ms +[2025-08-22 16:45:25] [Rank 0] step:1141/10000 train_time:95986ms step_avg:84.12ms +[2025-08-22 16:45:26] [Rank 0] step:1161/10000 train_time:97678ms step_avg:84.13ms +[2025-08-22 16:45:26] [Rank 0] step:1161/10000 train_time:97678ms step_avg:84.13ms +[2025-08-22 16:45:28] [Rank 0] step:1181/10000 train_time:99370ms step_avg:84.14ms +[2025-08-22 16:45:28] [Rank 0] step:1181/10000 train_time:99370ms step_avg:84.14ms +[2025-08-22 16:45:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:45:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:45:44] [Rank 0] PRINT: step:1200/10000 val_loss:4.8225 svd_entropy: attn_qk:H=0.6306,top10E=0.45,eRank=70.8,q75/q25=68.27 attn_vo:H=0.4674,top10E=0.72,eRank=30.3,q75/q25=50.48 mlp_w1:H=0.6947,top10E=0.40,eRank=120.2,q75/q25=8.15 mlp_w2:H=0.9368,top10E=0.06,eRank=510.4,q75/q25=5.27 vo_prod:H=0.3507,top10E=0.88,eRank=13.3,q75/q25=1010.46 train_time:101232ms step_avg:84.36ms +[2025-08-22 16:45:44] [Rank 0] PRINT: step:1200/10000 val_loss:4.8225 svd_entropy: attn_qk:H=0.6306,top10E=0.45,eRank=70.8,q75/q25=68.27 attn_vo:H=0.4674,top10E=0.72,eRank=30.3,q75/q25=50.48 mlp_w1:H=0.6947,top10E=0.40,eRank=120.2,q75/q25=8.15 mlp_w2:H=0.9368,top10E=0.06,eRank=510.4,q75/q25=5.27 vo_prod:H=0.3507,top10E=0.88,eRank=13.3,q75/q25=1010.46 train_time:101232ms step_avg:84.36ms +[2025-08-22 16:45:44] [Rank 0] step:1201/10000 train_time:101246ms step_avg:84.30ms +[2025-08-22 16:45:44] [Rank 0] step:1201/10000 train_time:101246ms step_avg:84.30ms +[2025-08-22 16:45:45] [Rank 0] step:1221/10000 train_time:102782ms step_avg:84.18ms +[2025-08-22 16:45:45] [Rank 0] step:1221/10000 train_time:102782ms step_avg:84.18ms +[2025-08-22 16:45:47] [Rank 0] step:1241/10000 train_time:104471ms step_avg:84.18ms +[2025-08-22 16:45:47] [Rank 0] step:1241/10000 train_time:104471ms step_avg:84.18ms +[2025-08-22 16:45:49] [Rank 0] step:1261/10000 train_time:106160ms step_avg:84.19ms +[2025-08-22 16:45:49] [Rank 0] step:1261/10000 train_time:106160ms step_avg:84.19ms +[2025-08-22 16:45:51] [Rank 0] step:1281/10000 train_time:107850ms step_avg:84.19ms +[2025-08-22 16:45:51] [Rank 0] step:1281/10000 train_time:107850ms step_avg:84.19ms +[2025-08-22 16:45:52] [Rank 0] step:1301/10000 train_time:109539ms step_avg:84.20ms +[2025-08-22 16:45:52] [Rank 0] step:1301/10000 train_time:109539ms step_avg:84.20ms +[2025-08-22 16:45:54] [Rank 0] step:1321/10000 train_time:111231ms step_avg:84.20ms +[2025-08-22 16:45:54] [Rank 0] step:1321/10000 train_time:111231ms step_avg:84.20ms +[2025-08-22 16:45:56] [Rank 0] step:1341/10000 train_time:112923ms step_avg:84.21ms +[2025-08-22 16:45:56] [Rank 0] step:1341/10000 train_time:112923ms step_avg:84.21ms +[2025-08-22 16:45:57] [Rank 0] step:1361/10000 train_time:114616ms step_avg:84.21ms +[2025-08-22 16:45:57] [Rank 0] step:1361/10000 train_time:114616ms step_avg:84.21ms +[2025-08-22 16:45:59] [Rank 0] step:1381/10000 train_time:116310ms step_avg:84.22ms +[2025-08-22 16:45:59] [Rank 0] step:1381/10000 train_time:116310ms step_avg:84.22ms +[2025-08-22 16:46:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:46:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:46:14] [Rank 0] PRINT: step:1400/10000 val_loss:4.7065 svd_entropy: attn_qk:H=0.6384,top10E=0.43,eRank=74.9,q75/q25=69.89 attn_vo:H=0.4870,top10E=0.69,eRank=35.0,q75/q25=54.30 mlp_w1:H=0.7060,top10E=0.39,eRank=130.0,q75/q25=8.25 mlp_w2:H=0.9404,top10E=0.06,eRank=522.6,q75/q25=4.98 vo_prod:H=0.3725,top10E=0.86,eRank=15.5,q75/q25=1331.31 train_time:118173ms step_avg:84.41ms +[2025-08-22 16:46:14] [Rank 0] PRINT: step:1400/10000 val_loss:4.7065 svd_entropy: attn_qk:H=0.6384,top10E=0.43,eRank=74.9,q75/q25=69.89 attn_vo:H=0.4870,top10E=0.69,eRank=35.0,q75/q25=54.30 mlp_w1:H=0.7060,top10E=0.39,eRank=130.0,q75/q25=8.25 mlp_w2:H=0.9404,top10E=0.06,eRank=522.6,q75/q25=4.98 vo_prod:H=0.3725,top10E=0.86,eRank=15.5,q75/q25=1331.31 train_time:118173ms step_avg:84.41ms +[2025-08-22 16:46:15] [Rank 0] step:1401/10000 train_time:118185ms step_avg:84.36ms +[2025-08-22 16:46:15] [Rank 0] step:1401/10000 train_time:118185ms step_avg:84.36ms +[2025-08-22 16:46:16] [Rank 0] step:1421/10000 train_time:119724ms step_avg:84.25ms +[2025-08-22 16:46:16] [Rank 0] step:1421/10000 train_time:119724ms step_avg:84.25ms +[2025-08-22 16:46:18] [Rank 0] step:1441/10000 train_time:121411ms step_avg:84.25ms +[2025-08-22 16:46:18] [Rank 0] step:1441/10000 train_time:121411ms step_avg:84.25ms +[2025-08-22 16:46:20] [Rank 0] step:1461/10000 train_time:123101ms step_avg:84.26ms +[2025-08-22 16:46:20] [Rank 0] step:1461/10000 train_time:123101ms step_avg:84.26ms +[2025-08-22 16:46:21] [Rank 0] step:1481/10000 train_time:124793ms step_avg:84.26ms +[2025-08-22 16:46:21] [Rank 0] step:1481/10000 train_time:124793ms step_avg:84.26ms +[2025-08-22 16:46:23] [Rank 0] step:1501/10000 train_time:126495ms step_avg:84.27ms +[2025-08-22 16:46:23] [Rank 0] step:1501/10000 train_time:126495ms step_avg:84.27ms +[2025-08-22 16:46:25] [Rank 0] step:1521/10000 train_time:128199ms step_avg:84.29ms +[2025-08-22 16:46:25] [Rank 0] step:1521/10000 train_time:128199ms step_avg:84.29ms +[2025-08-22 16:46:26] [Rank 0] step:1541/10000 train_time:129905ms step_avg:84.30ms +[2025-08-22 16:46:26] [Rank 0] step:1541/10000 train_time:129905ms step_avg:84.30ms +[2025-08-22 16:46:28] [Rank 0] step:1561/10000 train_time:131610ms step_avg:84.31ms +[2025-08-22 16:46:28] [Rank 0] step:1561/10000 train_time:131610ms step_avg:84.31ms +[2025-08-22 16:46:30] [Rank 0] step:1581/10000 train_time:133317ms step_avg:84.32ms +[2025-08-22 16:46:30] [Rank 0] step:1581/10000 train_time:133317ms step_avg:84.32ms +[2025-08-22 16:46:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:46:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:46:45] [Rank 0] PRINT: step:1600/10000 val_loss:4.5767 svd_entropy: attn_qk:H=0.6454,top10E=0.42,eRank=78.6,q75/q25=71.09 attn_vo:H=0.5030,top10E=0.66,eRank=39.4,q75/q25=57.48 mlp_w1:H=0.7156,top10E=0.37,eRank=138.7,q75/q25=8.29 mlp_w2:H=0.9432,top10E=0.06,eRank=532.1,q75/q25=4.75 vo_prod:H=0.3888,top10E=0.84,eRank=17.6,q75/q25=1705.88 train_time:135196ms step_avg:84.50ms +[2025-08-22 16:46:45] [Rank 0] PRINT: step:1600/10000 val_loss:4.5767 svd_entropy: attn_qk:H=0.6454,top10E=0.42,eRank=78.6,q75/q25=71.09 attn_vo:H=0.5030,top10E=0.66,eRank=39.4,q75/q25=57.48 mlp_w1:H=0.7156,top10E=0.37,eRank=138.7,q75/q25=8.29 mlp_w2:H=0.9432,top10E=0.06,eRank=532.1,q75/q25=4.75 vo_prod:H=0.3888,top10E=0.84,eRank=17.6,q75/q25=1705.88 train_time:135196ms step_avg:84.50ms +[2025-08-22 16:46:46] [Rank 0] step:1601/10000 train_time:135209ms step_avg:84.45ms +[2025-08-22 16:46:46] [Rank 0] step:1601/10000 train_time:135209ms step_avg:84.45ms +[2025-08-22 16:46:47] [Rank 0] step:1621/10000 train_time:136750ms step_avg:84.36ms +[2025-08-22 16:46:47] [Rank 0] step:1621/10000 train_time:136750ms step_avg:84.36ms +[2025-08-22 16:46:49] [Rank 0] step:1641/10000 train_time:138450ms step_avg:84.37ms +[2025-08-22 16:46:49] [Rank 0] step:1641/10000 train_time:138450ms step_avg:84.37ms +[2025-08-22 16:46:51] [Rank 0] step:1661/10000 train_time:140151ms step_avg:84.38ms +[2025-08-22 16:46:51] [Rank 0] step:1661/10000 train_time:140151ms step_avg:84.38ms +[2025-08-22 16:46:52] [Rank 0] step:1681/10000 train_time:141853ms step_avg:84.39ms +[2025-08-22 16:46:52] [Rank 0] step:1681/10000 train_time:141853ms step_avg:84.39ms +[2025-08-22 16:46:54] [Rank 0] step:1701/10000 train_time:143556ms step_avg:84.40ms +[2025-08-22 16:46:54] [Rank 0] step:1701/10000 train_time:143556ms step_avg:84.40ms +[2025-08-22 16:46:56] [Rank 0] step:1721/10000 train_time:145261ms step_avg:84.40ms +[2025-08-22 16:46:56] [Rank 0] step:1721/10000 train_time:145261ms step_avg:84.40ms +[2025-08-22 16:46:57] [Rank 0] step:1741/10000 train_time:146965ms step_avg:84.41ms +[2025-08-22 16:46:57] [Rank 0] step:1741/10000 train_time:146965ms step_avg:84.41ms +[2025-08-22 16:46:59] [Rank 0] step:1761/10000 train_time:148671ms step_avg:84.42ms +[2025-08-22 16:46:59] [Rank 0] step:1761/10000 train_time:148671ms step_avg:84.42ms +[2025-08-22 16:47:01] [Rank 0] step:1781/10000 train_time:150375ms step_avg:84.43ms +[2025-08-22 16:47:01] [Rank 0] step:1781/10000 train_time:150375ms step_avg:84.43ms +[2025-08-22 16:47:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:47:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:47:16] [Rank 0] PRINT: step:1800/10000 val_loss:4.4841 svd_entropy: attn_qk:H=0.6514,top10E=0.41,eRank=81.9,q75/q25=71.20 attn_vo:H=0.5160,top10E=0.64,eRank=43.2,q75/q25=61.47 mlp_w1:H=0.7240,top10E=0.36,eRank=146.6,q75/q25=8.26 mlp_w2:H=0.9450,top10E=0.06,eRank=538.7,q75/q25=4.61 vo_prod:H=0.4013,top10E=0.82,eRank=19.5,q75/q25=2113.73 train_time:152251ms step_avg:84.58ms +[2025-08-22 16:47:16] [Rank 0] PRINT: step:1800/10000 val_loss:4.4841 svd_entropy: attn_qk:H=0.6514,top10E=0.41,eRank=81.9,q75/q25=71.20 attn_vo:H=0.5160,top10E=0.64,eRank=43.2,q75/q25=61.47 mlp_w1:H=0.7240,top10E=0.36,eRank=146.6,q75/q25=8.26 mlp_w2:H=0.9450,top10E=0.06,eRank=538.7,q75/q25=4.61 vo_prod:H=0.4013,top10E=0.82,eRank=19.5,q75/q25=2113.73 train_time:152251ms step_avg:84.58ms +[2025-08-22 16:47:16] [Rank 0] step:1801/10000 train_time:152264ms step_avg:84.54ms +[2025-08-22 16:47:16] [Rank 0] step:1801/10000 train_time:152264ms step_avg:84.54ms +[2025-08-22 16:47:18] [Rank 0] step:1821/10000 train_time:153793ms step_avg:84.46ms +[2025-08-22 16:47:18] [Rank 0] step:1821/10000 train_time:153793ms step_avg:84.46ms +[2025-08-22 16:47:20] [Rank 0] step:1841/10000 train_time:155491ms step_avg:84.46ms +[2025-08-22 16:47:20] [Rank 0] step:1841/10000 train_time:155491ms step_avg:84.46ms +[2025-08-22 16:47:22] [Rank 0] step:1861/10000 train_time:157190ms step_avg:84.47ms +[2025-08-22 16:47:22] [Rank 0] step:1861/10000 train_time:157190ms step_avg:84.47ms +[2025-08-22 16:47:23] [Rank 0] step:1881/10000 train_time:158893ms step_avg:84.47ms +[2025-08-22 16:47:23] [Rank 0] step:1881/10000 train_time:158893ms step_avg:84.47ms +[2025-08-22 16:47:25] [Rank 0] step:1901/10000 train_time:160595ms step_avg:84.48ms +[2025-08-22 16:47:25] [Rank 0] step:1901/10000 train_time:160595ms step_avg:84.48ms +[2025-08-22 16:47:27] [Rank 0] step:1921/10000 train_time:162297ms step_avg:84.49ms +[2025-08-22 16:47:27] [Rank 0] step:1921/10000 train_time:162297ms step_avg:84.49ms +[2025-08-22 16:47:28] [Rank 0] step:1941/10000 train_time:163999ms step_avg:84.49ms +[2025-08-22 16:47:28] [Rank 0] step:1941/10000 train_time:163999ms step_avg:84.49ms +[2025-08-22 16:47:30] [Rank 0] step:1961/10000 train_time:165702ms step_avg:84.50ms +[2025-08-22 16:47:30] [Rank 0] step:1961/10000 train_time:165702ms step_avg:84.50ms +[2025-08-22 16:47:32] [Rank 0] step:1981/10000 train_time:167405ms step_avg:84.51ms +[2025-08-22 16:47:32] [Rank 0] step:1981/10000 train_time:167405ms step_avg:84.51ms +[2025-08-22 16:47:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:47:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:47:47] [Rank 0] PRINT: step:2000/10000 val_loss:4.4358 svd_entropy: attn_qk:H=0.6569,top10E=0.40,eRank=85.0,q75/q25=72.06 attn_vo:H=0.5277,top10E=0.61,eRank=46.9,q75/q25=64.99 mlp_w1:H=0.7316,top10E=0.35,eRank=153.8,q75/q25=8.24 mlp_w2:H=0.9465,top10E=0.06,eRank=543.8,q75/q25=4.50 vo_prod:H=0.4126,top10E=0.80,eRank=21.3,q75/q25=2567.01 train_time:169280ms step_avg:84.64ms +[2025-08-22 16:47:47] [Rank 0] PRINT: step:2000/10000 val_loss:4.4358 svd_entropy: attn_qk:H=0.6569,top10E=0.40,eRank=85.0,q75/q25=72.06 attn_vo:H=0.5277,top10E=0.61,eRank=46.9,q75/q25=64.99 mlp_w1:H=0.7316,top10E=0.35,eRank=153.8,q75/q25=8.24 mlp_w2:H=0.9465,top10E=0.06,eRank=543.8,q75/q25=4.50 vo_prod:H=0.4126,top10E=0.80,eRank=21.3,q75/q25=2567.01 train_time:169280ms step_avg:84.64ms +[2025-08-22 16:47:47] [Rank 0] step:2001/10000 train_time:169292ms step_avg:84.60ms +[2025-08-22 16:47:47] [Rank 0] step:2001/10000 train_time:169292ms step_avg:84.60ms +[2025-08-22 16:47:49] [Rank 0] step:2021/10000 train_time:170843ms step_avg:84.53ms +[2025-08-22 16:47:49] [Rank 0] step:2021/10000 train_time:170843ms step_avg:84.53ms +[2025-08-22 16:47:51] [Rank 0] step:2041/10000 train_time:173052ms step_avg:84.79ms +[2025-08-22 16:47:51] [Rank 0] step:2041/10000 train_time:173052ms step_avg:84.79ms +[2025-08-22 16:47:53] [Rank 0] step:2061/10000 train_time:174754ms step_avg:84.79ms +[2025-08-22 16:47:53] [Rank 0] step:2061/10000 train_time:174754ms step_avg:84.79ms +[2025-08-22 16:47:55] [Rank 0] step:2081/10000 train_time:176459ms step_avg:84.80ms +[2025-08-22 16:47:55] [Rank 0] step:2081/10000 train_time:176459ms step_avg:84.80ms +[2025-08-22 16:47:56] [Rank 0] step:2101/10000 train_time:178165ms step_avg:84.80ms +[2025-08-22 16:47:56] [Rank 0] step:2101/10000 train_time:178165ms step_avg:84.80ms +[2025-08-22 16:47:58] [Rank 0] step:2121/10000 train_time:179871ms step_avg:84.80ms +[2025-08-22 16:47:58] [Rank 0] step:2121/10000 train_time:179871ms step_avg:84.80ms +[2025-08-22 16:48:00] [Rank 0] step:2141/10000 train_time:181580ms step_avg:84.81ms +[2025-08-22 16:48:00] [Rank 0] step:2141/10000 train_time:181580ms step_avg:84.81ms +[2025-08-22 16:48:02] [Rank 0] step:2161/10000 train_time:183289ms step_avg:84.82ms +[2025-08-22 16:48:02] [Rank 0] step:2161/10000 train_time:183289ms step_avg:84.82ms +[2025-08-22 16:48:03] [Rank 0] step:2181/10000 train_time:184997ms step_avg:84.82ms +[2025-08-22 16:48:03] [Rank 0] step:2181/10000 train_time:184997ms step_avg:84.82ms +[2025-08-22 16:48:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:48:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:48:19] [Rank 0] PRINT: step:2200/10000 val_loss:4.3698 svd_entropy: attn_qk:H=0.6616,top10E=0.39,eRank=87.7,q75/q25=72.10 attn_vo:H=0.5374,top10E=0.59,eRank=50.1,q75/q25=67.19 mlp_w1:H=0.7391,top10E=0.34,eRank=160.5,q75/q25=8.19 mlp_w2:H=0.9475,top10E=0.06,eRank=547.4,q75/q25=4.44 vo_prod:H=0.4220,top10E=0.78,eRank=22.9,q75/q25=3010.52 train_time:186877ms step_avg:84.94ms +[2025-08-22 16:48:19] [Rank 0] PRINT: step:2200/10000 val_loss:4.3698 svd_entropy: attn_qk:H=0.6616,top10E=0.39,eRank=87.7,q75/q25=72.10 attn_vo:H=0.5374,top10E=0.59,eRank=50.1,q75/q25=67.19 mlp_w1:H=0.7391,top10E=0.34,eRank=160.5,q75/q25=8.19 mlp_w2:H=0.9475,top10E=0.06,eRank=547.4,q75/q25=4.44 vo_prod:H=0.4220,top10E=0.78,eRank=22.9,q75/q25=3010.52 train_time:186877ms step_avg:84.94ms +[2025-08-22 16:48:19] [Rank 0] step:2201/10000 train_time:186890ms step_avg:84.91ms +[2025-08-22 16:48:19] [Rank 0] step:2201/10000 train_time:186890ms step_avg:84.91ms +[2025-08-22 16:48:21] [Rank 0] step:2221/10000 train_time:188427ms step_avg:84.84ms +[2025-08-22 16:48:21] [Rank 0] step:2221/10000 train_time:188427ms step_avg:84.84ms +[2025-08-22 16:48:22] [Rank 0] step:2241/10000 train_time:190160ms step_avg:84.86ms +[2025-08-22 16:48:22] [Rank 0] step:2241/10000 train_time:190160ms step_avg:84.86ms +[2025-08-22 16:48:24] [Rank 0] step:2261/10000 train_time:191904ms step_avg:84.88ms +[2025-08-22 16:48:24] [Rank 0] step:2261/10000 train_time:191904ms step_avg:84.88ms +[2025-08-22 16:48:26] [Rank 0] step:2281/10000 train_time:193650ms step_avg:84.90ms +[2025-08-22 16:48:26] [Rank 0] step:2281/10000 train_time:193650ms step_avg:84.90ms +[2025-08-22 16:48:27] [Rank 0] step:2301/10000 train_time:195396ms step_avg:84.92ms +[2025-08-22 16:48:27] [Rank 0] step:2301/10000 train_time:195396ms step_avg:84.92ms +[2025-08-22 16:48:29] [Rank 0] step:2321/10000 train_time:197142ms step_avg:84.94ms +[2025-08-22 16:48:29] [Rank 0] step:2321/10000 train_time:197142ms step_avg:84.94ms +[2025-08-22 16:48:31] [Rank 0] step:2341/10000 train_time:198890ms step_avg:84.96ms +[2025-08-22 16:48:31] [Rank 0] step:2341/10000 train_time:198890ms step_avg:84.96ms +[2025-08-22 16:48:33] [Rank 0] step:2361/10000 train_time:200637ms step_avg:84.98ms +[2025-08-22 16:48:33] [Rank 0] step:2361/10000 train_time:200637ms step_avg:84.98ms +[2025-08-22 16:48:34] [Rank 0] step:2381/10000 train_time:202386ms step_avg:85.00ms +[2025-08-22 16:48:34] [Rank 0] step:2381/10000 train_time:202386ms step_avg:85.00ms +[2025-08-22 16:48:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:48:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:48:50] [Rank 0] PRINT: step:2400/10000 val_loss:4.2934 svd_entropy: attn_qk:H=0.6653,top10E=0.39,eRank=90.0,q75/q25=72.55 attn_vo:H=0.5458,top10E=0.58,eRank=53.1,q75/q25=69.80 mlp_w1:H=0.7453,top10E=0.33,eRank=166.7,q75/q25=8.16 mlp_w2:H=0.9483,top10E=0.06,eRank=550.3,q75/q25=4.38 vo_prod:H=0.4306,top10E=0.77,eRank=24.5,q75/q25=3464.94 train_time:204310ms step_avg:85.13ms +[2025-08-22 16:48:50] [Rank 0] PRINT: step:2400/10000 val_loss:4.2934 svd_entropy: attn_qk:H=0.6653,top10E=0.39,eRank=90.0,q75/q25=72.55 attn_vo:H=0.5458,top10E=0.58,eRank=53.1,q75/q25=69.80 mlp_w1:H=0.7453,top10E=0.33,eRank=166.7,q75/q25=8.16 mlp_w2:H=0.9483,top10E=0.06,eRank=550.3,q75/q25=4.38 vo_prod:H=0.4306,top10E=0.77,eRank=24.5,q75/q25=3464.94 train_time:204310ms step_avg:85.13ms +[2025-08-22 16:48:50] [Rank 0] step:2401/10000 train_time:204323ms step_avg:85.10ms +[2025-08-22 16:48:50] [Rank 0] step:2401/10000 train_time:204323ms step_avg:85.10ms +[2025-08-22 16:48:52] [Rank 0] step:2421/10000 train_time:205893ms step_avg:85.04ms +[2025-08-22 16:48:52] [Rank 0] step:2421/10000 train_time:205893ms step_avg:85.04ms +[2025-08-22 16:48:54] [Rank 0] step:2441/10000 train_time:207635ms step_avg:85.06ms +[2025-08-22 16:48:54] [Rank 0] step:2441/10000 train_time:207635ms step_avg:85.06ms +[2025-08-22 16:48:55] [Rank 0] step:2461/10000 train_time:209379ms step_avg:85.08ms +[2025-08-22 16:48:55] [Rank 0] step:2461/10000 train_time:209379ms step_avg:85.08ms +[2025-08-22 16:48:57] [Rank 0] step:2481/10000 train_time:211124ms step_avg:85.10ms +[2025-08-22 16:48:57] [Rank 0] step:2481/10000 train_time:211124ms step_avg:85.10ms +[2025-08-22 16:48:59] [Rank 0] step:2501/10000 train_time:212872ms step_avg:85.11ms +[2025-08-22 16:48:59] [Rank 0] step:2501/10000 train_time:212872ms step_avg:85.11ms +[2025-08-22 16:49:01] [Rank 0] step:2521/10000 train_time:214619ms step_avg:85.13ms +[2025-08-22 16:49:01] [Rank 0] step:2521/10000 train_time:214619ms step_avg:85.13ms +[2025-08-22 16:49:02] [Rank 0] step:2541/10000 train_time:216366ms step_avg:85.15ms +[2025-08-22 16:49:02] [Rank 0] step:2541/10000 train_time:216366ms step_avg:85.15ms +[2025-08-22 16:49:04] [Rank 0] step:2561/10000 train_time:218115ms step_avg:85.17ms +[2025-08-22 16:49:04] [Rank 0] step:2561/10000 train_time:218115ms step_avg:85.17ms +[2025-08-22 16:49:06] [Rank 0] step:2581/10000 train_time:219864ms step_avg:85.19ms +[2025-08-22 16:49:06] [Rank 0] step:2581/10000 train_time:219864ms step_avg:85.19ms +[2025-08-22 16:49:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:49:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:49:21] [Rank 0] PRINT: step:2600/10000 val_loss:4.2486 svd_entropy: attn_qk:H=0.6691,top10E=0.38,eRank=92.3,q75/q25=72.97 attn_vo:H=0.5531,top10E=0.56,eRank=55.8,q75/q25=73.05 mlp_w1:H=0.7510,top10E=0.32,eRank=172.5,q75/q25=8.14 mlp_w2:H=0.9488,top10E=0.06,eRank=552.4,q75/q25=4.34 vo_prod:H=0.4376,top10E=0.75,eRank=26.0,q75/q25=3959.64 train_time:221790ms step_avg:85.30ms +[2025-08-22 16:49:21] [Rank 0] PRINT: step:2600/10000 val_loss:4.2486 svd_entropy: attn_qk:H=0.6691,top10E=0.38,eRank=92.3,q75/q25=72.97 attn_vo:H=0.5531,top10E=0.56,eRank=55.8,q75/q25=73.05 mlp_w1:H=0.7510,top10E=0.32,eRank=172.5,q75/q25=8.14 mlp_w2:H=0.9488,top10E=0.06,eRank=552.4,q75/q25=4.34 vo_prod:H=0.4376,top10E=0.75,eRank=26.0,q75/q25=3959.64 train_time:221790ms step_avg:85.30ms +[2025-08-22 16:49:22] [Rank 0] step:2601/10000 train_time:221803ms step_avg:85.28ms +[2025-08-22 16:49:22] [Rank 0] step:2601/10000 train_time:221803ms step_avg:85.28ms +[2025-08-22 16:49:23] [Rank 0] step:2621/10000 train_time:223389ms step_avg:85.23ms +[2025-08-22 16:49:23] [Rank 0] step:2621/10000 train_time:223389ms step_avg:85.23ms +[2025-08-22 16:49:25] [Rank 0] step:2641/10000 train_time:225137ms step_avg:85.25ms +[2025-08-22 16:49:25] [Rank 0] step:2641/10000 train_time:225137ms step_avg:85.25ms +[2025-08-22 16:49:27] [Rank 0] step:2661/10000 train_time:226887ms step_avg:85.26ms +[2025-08-22 16:49:27] [Rank 0] step:2661/10000 train_time:226887ms step_avg:85.26ms +[2025-08-22 16:49:29] [Rank 0] step:2681/10000 train_time:228637ms step_avg:85.28ms +[2025-08-22 16:49:29] [Rank 0] step:2681/10000 train_time:228637ms step_avg:85.28ms +[2025-08-22 16:49:30] [Rank 0] step:2701/10000 train_time:230387ms step_avg:85.30ms +[2025-08-22 16:49:30] [Rank 0] step:2701/10000 train_time:230387ms step_avg:85.30ms +[2025-08-22 16:49:32] [Rank 0] step:2721/10000 train_time:232139ms step_avg:85.31ms +[2025-08-22 16:49:32] [Rank 0] step:2721/10000 train_time:232139ms step_avg:85.31ms +[2025-08-22 16:49:34] [Rank 0] step:2741/10000 train_time:233891ms step_avg:85.33ms +[2025-08-22 16:49:34] [Rank 0] step:2741/10000 train_time:233891ms step_avg:85.33ms +[2025-08-22 16:49:36] [Rank 0] step:2761/10000 train_time:235645ms step_avg:85.35ms +[2025-08-22 16:49:36] [Rank 0] step:2761/10000 train_time:235645ms step_avg:85.35ms +[2025-08-22 16:49:37] [Rank 0] step:2781/10000 train_time:237399ms step_avg:85.36ms +[2025-08-22 16:49:37] [Rank 0] step:2781/10000 train_time:237399ms step_avg:85.36ms +[2025-08-22 16:49:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:49:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:49:53] [Rank 0] PRINT: step:2800/10000 val_loss:4.2186 svd_entropy: attn_qk:H=0.6727,top10E=0.38,eRank=94.6,q75/q25=73.39 attn_vo:H=0.5604,top10E=0.55,eRank=58.5,q75/q25=75.14 mlp_w1:H=0.7563,top10E=0.31,eRank=178.0,q75/q25=8.17 mlp_w2:H=0.9492,top10E=0.06,eRank=554.0,q75/q25=4.32 vo_prod:H=0.4451,top10E=0.74,eRank=27.4,q75/q25=4431.49 train_time:239329ms step_avg:85.47ms +[2025-08-22 16:49:53] [Rank 0] PRINT: step:2800/10000 val_loss:4.2186 svd_entropy: attn_qk:H=0.6727,top10E=0.38,eRank=94.6,q75/q25=73.39 attn_vo:H=0.5604,top10E=0.55,eRank=58.5,q75/q25=75.14 mlp_w1:H=0.7563,top10E=0.31,eRank=178.0,q75/q25=8.17 mlp_w2:H=0.9492,top10E=0.06,eRank=554.0,q75/q25=4.32 vo_prod:H=0.4451,top10E=0.74,eRank=27.4,q75/q25=4431.49 train_time:239329ms step_avg:85.47ms +[2025-08-22 16:49:53] [Rank 0] step:2801/10000 train_time:239342ms step_avg:85.45ms +[2025-08-22 16:49:53] [Rank 0] step:2801/10000 train_time:239342ms step_avg:85.45ms +[2025-08-22 16:49:55] [Rank 0] step:2821/10000 train_time:240934ms step_avg:85.41ms +[2025-08-22 16:49:55] [Rank 0] step:2821/10000 train_time:240934ms step_avg:85.41ms +[2025-08-22 16:49:56] [Rank 0] step:2841/10000 train_time:242677ms step_avg:85.42ms +[2025-08-22 16:49:56] [Rank 0] step:2841/10000 train_time:242677ms step_avg:85.42ms +[2025-08-22 16:49:58] [Rank 0] step:2861/10000 train_time:244422ms step_avg:85.43ms +[2025-08-22 16:49:58] [Rank 0] step:2861/10000 train_time:244422ms step_avg:85.43ms +[2025-08-22 16:50:00] [Rank 0] step:2881/10000 train_time:246169ms step_avg:85.45ms +[2025-08-22 16:50:00] [Rank 0] step:2881/10000 train_time:246169ms step_avg:85.45ms +[2025-08-22 16:50:02] [Rank 0] step:2901/10000 train_time:247916ms step_avg:85.46ms +[2025-08-22 16:50:02] [Rank 0] step:2901/10000 train_time:247916ms step_avg:85.46ms +[2025-08-22 16:50:03] [Rank 0] step:2921/10000 train_time:249662ms step_avg:85.47ms +[2025-08-22 16:50:03] [Rank 0] step:2921/10000 train_time:249662ms step_avg:85.47ms +[2025-08-22 16:50:05] [Rank 0] step:2941/10000 train_time:251412ms step_avg:85.49ms +[2025-08-22 16:50:05] [Rank 0] step:2941/10000 train_time:251412ms step_avg:85.49ms +[2025-08-22 16:50:07] [Rank 0] step:2961/10000 train_time:253162ms step_avg:85.50ms +[2025-08-22 16:50:07] [Rank 0] step:2961/10000 train_time:253162ms step_avg:85.50ms +[2025-08-22 16:50:09] [Rank 0] step:2981/10000 train_time:254915ms step_avg:85.51ms +[2025-08-22 16:50:09] [Rank 0] step:2981/10000 train_time:254915ms step_avg:85.51ms +[2025-08-22 16:50:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:50:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:50:24] [Rank 0] PRINT: step:3000/10000 val_loss:4.1792 svd_entropy: attn_qk:H=0.6760,top10E=0.37,eRank=96.7,q75/q25=73.36 attn_vo:H=0.5667,top10E=0.53,eRank=61.0,q75/q25=76.67 mlp_w1:H=0.7612,top10E=0.31,eRank=183.1,q75/q25=8.15 mlp_w2:H=0.9495,top10E=0.06,eRank=555.2,q75/q25=4.29 vo_prod:H=0.4515,top10E=0.73,eRank=28.7,q75/q25=4750.82 train_time:256848ms step_avg:85.62ms +[2025-08-22 16:50:24] [Rank 0] PRINT: step:3000/10000 val_loss:4.1792 svd_entropy: attn_qk:H=0.6760,top10E=0.37,eRank=96.7,q75/q25=73.36 attn_vo:H=0.5667,top10E=0.53,eRank=61.0,q75/q25=76.67 mlp_w1:H=0.7612,top10E=0.31,eRank=183.1,q75/q25=8.15 mlp_w2:H=0.9495,top10E=0.06,eRank=555.2,q75/q25=4.29 vo_prod:H=0.4515,top10E=0.73,eRank=28.7,q75/q25=4750.82 train_time:256848ms step_avg:85.62ms +[2025-08-22 16:50:24] [Rank 0] step:3001/10000 train_time:256862ms step_avg:85.59ms +[2025-08-22 16:50:24] [Rank 0] step:3001/10000 train_time:256862ms step_avg:85.59ms +[2025-08-22 16:50:26] [Rank 0] step:3021/10000 train_time:258448ms step_avg:85.55ms +[2025-08-22 16:50:26] [Rank 0] step:3021/10000 train_time:258448ms step_avg:85.55ms +[2025-08-22 16:50:28] [Rank 0] step:3041/10000 train_time:260198ms step_avg:85.56ms +[2025-08-22 16:50:28] [Rank 0] step:3041/10000 train_time:260198ms step_avg:85.56ms +[2025-08-22 16:50:30] [Rank 0] step:3061/10000 train_time:261949ms step_avg:85.58ms +[2025-08-22 16:50:30] [Rank 0] step:3061/10000 train_time:261949ms step_avg:85.58ms +[2025-08-22 16:50:31] [Rank 0] step:3081/10000 train_time:263701ms step_avg:85.59ms +[2025-08-22 16:50:31] [Rank 0] step:3081/10000 train_time:263701ms step_avg:85.59ms +[2025-08-22 16:50:33] [Rank 0] step:3101/10000 train_time:265454ms step_avg:85.60ms +[2025-08-22 16:50:33] [Rank 0] step:3101/10000 train_time:265454ms step_avg:85.60ms +[2025-08-22 16:50:35] [Rank 0] step:3121/10000 train_time:267206ms step_avg:85.62ms +[2025-08-22 16:50:35] [Rank 0] step:3121/10000 train_time:267206ms step_avg:85.62ms +[2025-08-22 16:50:37] [Rank 0] step:3141/10000 train_time:268962ms step_avg:85.63ms +[2025-08-22 16:50:37] [Rank 0] step:3141/10000 train_time:268962ms step_avg:85.63ms +[2025-08-22 16:50:38] [Rank 0] step:3161/10000 train_time:270718ms step_avg:85.64ms +[2025-08-22 16:50:38] [Rank 0] step:3161/10000 train_time:270718ms step_avg:85.64ms +[2025-08-22 16:50:40] [Rank 0] step:3181/10000 train_time:272474ms step_avg:85.66ms +[2025-08-22 16:50:40] [Rank 0] step:3181/10000 train_time:272474ms step_avg:85.66ms +[2025-08-22 16:50:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:50:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:50:56] [Rank 0] PRINT: step:3200/10000 val_loss:4.1488 svd_entropy: attn_qk:H=0.6790,top10E=0.37,eRank=98.7,q75/q25=73.99 attn_vo:H=0.5724,top10E=0.52,eRank=63.3,q75/q25=77.90 mlp_w1:H=0.7658,top10E=0.30,eRank=187.9,q75/q25=8.15 mlp_w2:H=0.9498,top10E=0.06,eRank=556.2,q75/q25=4.28 vo_prod:H=0.4572,top10E=0.72,eRank=29.9,q75/q25=5071.07 train_time:274406ms step_avg:85.75ms +[2025-08-22 16:50:56] [Rank 0] PRINT: step:3200/10000 val_loss:4.1488 svd_entropy: attn_qk:H=0.6790,top10E=0.37,eRank=98.7,q75/q25=73.99 attn_vo:H=0.5724,top10E=0.52,eRank=63.3,q75/q25=77.90 mlp_w1:H=0.7658,top10E=0.30,eRank=187.9,q75/q25=8.15 mlp_w2:H=0.9498,top10E=0.06,eRank=556.2,q75/q25=4.28 vo_prod:H=0.4572,top10E=0.72,eRank=29.9,q75/q25=5071.07 train_time:274406ms step_avg:85.75ms +[2025-08-22 16:50:56] [Rank 0] step:3201/10000 train_time:274419ms step_avg:85.73ms +[2025-08-22 16:50:56] [Rank 0] step:3201/10000 train_time:274419ms step_avg:85.73ms +[2025-08-22 16:50:58] [Rank 0] step:3221/10000 train_time:276013ms step_avg:85.69ms +[2025-08-22 16:50:58] [Rank 0] step:3221/10000 train_time:276013ms step_avg:85.69ms +[2025-08-22 16:50:59] [Rank 0] step:3241/10000 train_time:277765ms step_avg:85.70ms +[2025-08-22 16:50:59] [Rank 0] step:3241/10000 train_time:277765ms step_avg:85.70ms +[2025-08-22 16:51:01] [Rank 0] step:3261/10000 train_time:279516ms step_avg:85.71ms +[2025-08-22 16:51:01] [Rank 0] step:3261/10000 train_time:279516ms step_avg:85.71ms +[2025-08-22 16:51:03] [Rank 0] step:3281/10000 train_time:281272ms step_avg:85.73ms +[2025-08-22 16:51:03] [Rank 0] step:3281/10000 train_time:281272ms step_avg:85.73ms +[2025-08-22 16:51:05] [Rank 0] step:3301/10000 train_time:283024ms step_avg:85.74ms +[2025-08-22 16:51:05] [Rank 0] step:3301/10000 train_time:283024ms step_avg:85.74ms +[2025-08-22 16:51:06] [Rank 0] step:3321/10000 train_time:284778ms step_avg:85.75ms +[2025-08-22 16:51:06] [Rank 0] step:3321/10000 train_time:284778ms step_avg:85.75ms +[2025-08-22 16:51:08] [Rank 0] step:3341/10000 train_time:286533ms step_avg:85.76ms +[2025-08-22 16:51:08] [Rank 0] step:3341/10000 train_time:286533ms step_avg:85.76ms +[2025-08-22 16:51:10] [Rank 0] step:3361/10000 train_time:288288ms step_avg:85.77ms +[2025-08-22 16:51:10] [Rank 0] step:3361/10000 train_time:288288ms step_avg:85.77ms +[2025-08-22 16:51:12] [Rank 0] step:3381/10000 train_time:290048ms step_avg:85.79ms +[2025-08-22 16:51:12] [Rank 0] step:3381/10000 train_time:290048ms step_avg:85.79ms +[2025-08-22 16:51:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:51:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:51:27] [Rank 0] PRINT: step:3400/10000 val_loss:4.1219 svd_entropy: attn_qk:H=0.6819,top10E=0.36,eRank=100.7,q75/q25=74.44 attn_vo:H=0.5778,top10E=0.51,eRank=65.6,q75/q25=80.03 mlp_w1:H=0.7701,top10E=0.29,eRank=192.6,q75/q25=8.17 mlp_w2:H=0.9500,top10E=0.06,eRank=557.1,q75/q25=4.28 vo_prod:H=0.4620,top10E=0.71,eRank=31.1,q75/q25=5519.97 train_time:291983ms step_avg:85.88ms +[2025-08-22 16:51:27] [Rank 0] PRINT: step:3400/10000 val_loss:4.1219 svd_entropy: attn_qk:H=0.6819,top10E=0.36,eRank=100.7,q75/q25=74.44 attn_vo:H=0.5778,top10E=0.51,eRank=65.6,q75/q25=80.03 mlp_w1:H=0.7701,top10E=0.29,eRank=192.6,q75/q25=8.17 mlp_w2:H=0.9500,top10E=0.06,eRank=557.1,q75/q25=4.28 vo_prod:H=0.4620,top10E=0.71,eRank=31.1,q75/q25=5519.97 train_time:291983ms step_avg:85.88ms +[2025-08-22 16:51:27] [Rank 0] step:3401/10000 train_time:291996ms step_avg:85.86ms +[2025-08-22 16:51:27] [Rank 0] step:3401/10000 train_time:291996ms step_avg:85.86ms +[2025-08-22 16:51:29] [Rank 0] step:3421/10000 train_time:293577ms step_avg:85.82ms +[2025-08-22 16:51:29] [Rank 0] step:3421/10000 train_time:293577ms step_avg:85.82ms +[2025-08-22 16:51:30] [Rank 0] step:3441/10000 train_time:295332ms step_avg:85.83ms +[2025-08-22 16:51:30] [Rank 0] step:3441/10000 train_time:295332ms step_avg:85.83ms +[2025-08-22 16:51:32] [Rank 0] step:3461/10000 train_time:297090ms step_avg:85.84ms +[2025-08-22 16:51:32] [Rank 0] step:3461/10000 train_time:297090ms step_avg:85.84ms +[2025-08-22 16:51:34] [Rank 0] step:3481/10000 train_time:298846ms step_avg:85.85ms +[2025-08-22 16:51:34] [Rank 0] step:3481/10000 train_time:298846ms step_avg:85.85ms +[2025-08-22 16:51:36] [Rank 0] step:3501/10000 train_time:300606ms step_avg:85.86ms +[2025-08-22 16:51:36] [Rank 0] step:3501/10000 train_time:300606ms step_avg:85.86ms +[2025-08-22 16:51:37] [Rank 0] step:3521/10000 train_time:302366ms step_avg:85.87ms +[2025-08-22 16:51:37] [Rank 0] step:3521/10000 train_time:302366ms step_avg:85.87ms +[2025-08-22 16:51:39] [Rank 0] step:3541/10000 train_time:304125ms step_avg:85.89ms +[2025-08-22 16:51:39] [Rank 0] step:3541/10000 train_time:304125ms step_avg:85.89ms +[2025-08-22 16:51:41] [Rank 0] step:3561/10000 train_time:305886ms step_avg:85.90ms +[2025-08-22 16:51:41] [Rank 0] step:3561/10000 train_time:305886ms step_avg:85.90ms +[2025-08-22 16:51:43] [Rank 0] step:3581/10000 train_time:307647ms step_avg:85.91ms +[2025-08-22 16:51:43] [Rank 0] step:3581/10000 train_time:307647ms step_avg:85.91ms +[2025-08-22 16:51:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:51:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:51:58] [Rank 0] PRINT: step:3600/10000 val_loss:4.1148 svd_entropy: attn_qk:H=0.6845,top10E=0.36,eRank=102.6,q75/q25=74.52 attn_vo:H=0.5827,top10E=0.50,eRank=67.8,q75/q25=80.50 mlp_w1:H=0.7740,top10E=0.29,eRank=197.0,q75/q25=8.16 mlp_w2:H=0.9501,top10E=0.06,eRank=557.7,q75/q25=4.26 vo_prod:H=0.4670,top10E=0.70,eRank=32.2,q75/q25=5724.26 train_time:309586ms step_avg:86.00ms +[2025-08-22 16:51:58] [Rank 0] PRINT: step:3600/10000 val_loss:4.1148 svd_entropy: attn_qk:H=0.6845,top10E=0.36,eRank=102.6,q75/q25=74.52 attn_vo:H=0.5827,top10E=0.50,eRank=67.8,q75/q25=80.50 mlp_w1:H=0.7740,top10E=0.29,eRank=197.0,q75/q25=8.16 mlp_w2:H=0.9501,top10E=0.06,eRank=557.7,q75/q25=4.26 vo_prod:H=0.4670,top10E=0.70,eRank=32.2,q75/q25=5724.26 train_time:309586ms step_avg:86.00ms +[2025-08-22 16:51:58] [Rank 0] step:3601/10000 train_time:309598ms step_avg:85.98ms +[2025-08-22 16:51:58] [Rank 0] step:3601/10000 train_time:309598ms step_avg:85.98ms +[2025-08-22 16:52:00] [Rank 0] step:3621/10000 train_time:311178ms step_avg:85.94ms +[2025-08-22 16:52:00] [Rank 0] step:3621/10000 train_time:311178ms step_avg:85.94ms +[2025-08-22 16:52:02] [Rank 0] step:3641/10000 train_time:312928ms step_avg:85.95ms +[2025-08-22 16:52:02] [Rank 0] step:3641/10000 train_time:312928ms step_avg:85.95ms +[2025-08-22 16:52:04] [Rank 0] step:3661/10000 train_time:314681ms step_avg:85.96ms +[2025-08-22 16:52:04] [Rank 0] step:3661/10000 train_time:314681ms step_avg:85.96ms +[2025-08-22 16:52:05] [Rank 0] step:3681/10000 train_time:316436ms step_avg:85.96ms +[2025-08-22 16:52:05] [Rank 0] step:3681/10000 train_time:316436ms step_avg:85.96ms +[2025-08-22 16:52:07] [Rank 0] step:3701/10000 train_time:318190ms step_avg:85.97ms +[2025-08-22 16:52:07] [Rank 0] step:3701/10000 train_time:318190ms step_avg:85.97ms +[2025-08-22 16:52:09] [Rank 0] step:3721/10000 train_time:319972ms step_avg:85.99ms +[2025-08-22 16:52:09] [Rank 0] step:3721/10000 train_time:319972ms step_avg:85.99ms +[2025-08-22 16:52:11] [Rank 0] step:3741/10000 train_time:321763ms step_avg:86.01ms +[2025-08-22 16:52:11] [Rank 0] step:3741/10000 train_time:321763ms step_avg:86.01ms +[2025-08-22 16:52:13] [Rank 0] step:3761/10000 train_time:323554ms step_avg:86.03ms +[2025-08-22 16:52:13] [Rank 0] step:3761/10000 train_time:323554ms step_avg:86.03ms +[2025-08-22 16:52:14] [Rank 0] step:3781/10000 train_time:325349ms step_avg:86.05ms +[2025-08-22 16:52:14] [Rank 0] step:3781/10000 train_time:325349ms step_avg:86.05ms +[2025-08-22 16:52:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:52:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:52:30] [Rank 0] PRINT: step:3800/10000 val_loss:4.0651 svd_entropy: attn_qk:H=0.6868,top10E=0.36,eRank=104.2,q75/q25=75.01 attn_vo:H=0.5872,top10E=0.49,eRank=69.8,q75/q25=80.79 mlp_w1:H=0.7777,top10E=0.28,eRank=201.1,q75/q25=8.14 mlp_w2:H=0.9502,top10E=0.06,eRank=558.3,q75/q25=4.27 vo_prod:H=0.4712,top10E=0.69,eRank=33.2,q75/q25=6030.13 train_time:327322ms step_avg:86.14ms +[2025-08-22 16:52:30] [Rank 0] PRINT: step:3800/10000 val_loss:4.0651 svd_entropy: attn_qk:H=0.6868,top10E=0.36,eRank=104.2,q75/q25=75.01 attn_vo:H=0.5872,top10E=0.49,eRank=69.8,q75/q25=80.79 mlp_w1:H=0.7777,top10E=0.28,eRank=201.1,q75/q25=8.14 mlp_w2:H=0.9502,top10E=0.06,eRank=558.3,q75/q25=4.27 vo_prod:H=0.4712,top10E=0.69,eRank=33.2,q75/q25=6030.13 train_time:327322ms step_avg:86.14ms +[2025-08-22 16:52:30] [Rank 0] step:3801/10000 train_time:327334ms step_avg:86.12ms +[2025-08-22 16:52:30] [Rank 0] step:3801/10000 train_time:327334ms step_avg:86.12ms +[2025-08-22 16:52:32] [Rank 0] step:3821/10000 train_time:328953ms step_avg:86.09ms +[2025-08-22 16:52:32] [Rank 0] step:3821/10000 train_time:328953ms step_avg:86.09ms +[2025-08-22 16:52:34] [Rank 0] step:3841/10000 train_time:330744ms step_avg:86.11ms +[2025-08-22 16:52:34] [Rank 0] step:3841/10000 train_time:330744ms step_avg:86.11ms +[2025-08-22 16:52:35] [Rank 0] step:3861/10000 train_time:332534ms step_avg:86.13ms +[2025-08-22 16:52:35] [Rank 0] step:3861/10000 train_time:332534ms step_avg:86.13ms +[2025-08-22 16:52:37] [Rank 0] step:3881/10000 train_time:334322ms step_avg:86.14ms +[2025-08-22 16:52:37] [Rank 0] step:3881/10000 train_time:334322ms step_avg:86.14ms +[2025-08-22 16:52:39] [Rank 0] step:3901/10000 train_time:336112ms step_avg:86.16ms +[2025-08-22 16:52:39] [Rank 0] step:3901/10000 train_time:336112ms step_avg:86.16ms +[2025-08-22 16:52:41] [Rank 0] step:3921/10000 train_time:337902ms step_avg:86.18ms +[2025-08-22 16:52:41] [Rank 0] step:3921/10000 train_time:337902ms step_avg:86.18ms +[2025-08-22 16:52:43] [Rank 0] step:3941/10000 train_time:339696ms step_avg:86.20ms +[2025-08-22 16:52:43] [Rank 0] step:3941/10000 train_time:339696ms step_avg:86.20ms +[2025-08-22 16:52:44] [Rank 0] step:3961/10000 train_time:341488ms step_avg:86.21ms +[2025-08-22 16:52:44] [Rank 0] step:3961/10000 train_time:341488ms step_avg:86.21ms +[2025-08-22 16:52:46] [Rank 0] step:3981/10000 train_time:343280ms step_avg:86.23ms +[2025-08-22 16:52:46] [Rank 0] step:3981/10000 train_time:343280ms step_avg:86.23ms +[2025-08-22 16:52:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:52:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:53:02] [Rank 0] PRINT: step:4000/10000 val_loss:4.0420 svd_entropy: attn_qk:H=0.6890,top10E=0.35,eRank=105.8,q75/q25=74.35 attn_vo:H=0.5915,top10E=0.48,eRank=71.7,q75/q25=80.11 mlp_w1:H=0.7811,top10E=0.28,eRank=205.1,q75/q25=8.12 mlp_w2:H=0.9503,top10E=0.06,eRank=558.8,q75/q25=4.26 vo_prod:H=0.4752,top10E=0.68,eRank=34.2,q75/q25=5895.26 train_time:345252ms step_avg:86.31ms +[2025-08-22 16:53:02] [Rank 0] PRINT: step:4000/10000 val_loss:4.0420 svd_entropy: attn_qk:H=0.6890,top10E=0.35,eRank=105.8,q75/q25=74.35 attn_vo:H=0.5915,top10E=0.48,eRank=71.7,q75/q25=80.11 mlp_w1:H=0.7811,top10E=0.28,eRank=205.1,q75/q25=8.12 mlp_w2:H=0.9503,top10E=0.06,eRank=558.8,q75/q25=4.26 vo_prod:H=0.4752,top10E=0.68,eRank=34.2,q75/q25=5895.26 train_time:345252ms step_avg:86.31ms +[2025-08-22 16:53:02] [Rank 0] step:4001/10000 train_time:345265ms step_avg:86.29ms +[2025-08-22 16:53:02] [Rank 0] step:4001/10000 train_time:345265ms step_avg:86.29ms +[2025-08-22 16:53:04] [Rank 0] step:4021/10000 train_time:346883ms step_avg:86.27ms +[2025-08-22 16:53:04] [Rank 0] step:4021/10000 train_time:346883ms step_avg:86.27ms +[2025-08-22 16:53:06] [Rank 0] step:4041/10000 train_time:348675ms step_avg:86.28ms +[2025-08-22 16:53:06] [Rank 0] step:4041/10000 train_time:348675ms step_avg:86.28ms +[2025-08-22 16:53:07] [Rank 0] step:4061/10000 train_time:350471ms step_avg:86.30ms +[2025-08-22 16:53:07] [Rank 0] step:4061/10000 train_time:350471ms step_avg:86.30ms +[2025-08-22 16:53:10] [Rank 0] step:4081/10000 train_time:352759ms step_avg:86.44ms +[2025-08-22 16:53:10] [Rank 0] step:4081/10000 train_time:352759ms step_avg:86.44ms +[2025-08-22 16:53:11] [Rank 0] step:4101/10000 train_time:354552ms step_avg:86.46ms +[2025-08-22 16:53:11] [Rank 0] step:4101/10000 train_time:354552ms step_avg:86.46ms +[2025-08-22 16:53:13] [Rank 0] step:4121/10000 train_time:356349ms step_avg:86.47ms +[2025-08-22 16:53:13] [Rank 0] step:4121/10000 train_time:356349ms step_avg:86.47ms +[2025-08-22 16:53:15] [Rank 0] step:4141/10000 train_time:358143ms step_avg:86.49ms +[2025-08-22 16:53:15] [Rank 0] step:4141/10000 train_time:358143ms step_avg:86.49ms +[2025-08-22 16:53:17] [Rank 0] step:4161/10000 train_time:359940ms step_avg:86.50ms +[2025-08-22 16:53:17] [Rank 0] step:4161/10000 train_time:359940ms step_avg:86.50ms +[2025-08-22 16:53:19] [Rank 0] step:4181/10000 train_time:361737ms step_avg:86.52ms +[2025-08-22 16:53:19] [Rank 0] step:4181/10000 train_time:361737ms step_avg:86.52ms +[2025-08-22 16:53:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:53:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:53:34] [Rank 0] PRINT: step:4200/10000 val_loss:4.0296 svd_entropy: attn_qk:H=0.6912,top10E=0.35,eRank=107.4,q75/q25=74.50 attn_vo:H=0.5955,top10E=0.47,eRank=73.6,q75/q25=80.16 mlp_w1:H=0.7836,top10E=0.27,eRank=208.7,q75/q25=8.11 mlp_w2:H=0.9503,top10E=0.06,eRank=559.0,q75/q25=4.27 vo_prod:H=0.4795,top10E=0.68,eRank=35.2,q75/q25=6093.86 train_time:363713ms step_avg:86.60ms +[2025-08-22 16:53:34] [Rank 0] PRINT: step:4200/10000 val_loss:4.0296 svd_entropy: attn_qk:H=0.6912,top10E=0.35,eRank=107.4,q75/q25=74.50 attn_vo:H=0.5955,top10E=0.47,eRank=73.6,q75/q25=80.16 mlp_w1:H=0.7836,top10E=0.27,eRank=208.7,q75/q25=8.11 mlp_w2:H=0.9503,top10E=0.06,eRank=559.0,q75/q25=4.27 vo_prod:H=0.4795,top10E=0.68,eRank=35.2,q75/q25=6093.86 train_time:363713ms step_avg:86.60ms +[2025-08-22 16:53:34] [Rank 0] step:4201/10000 train_time:363724ms step_avg:86.58ms +[2025-08-22 16:53:34] [Rank 0] step:4201/10000 train_time:363724ms step_avg:86.58ms +[2025-08-22 16:53:36] [Rank 0] step:4221/10000 train_time:365362ms step_avg:86.56ms +[2025-08-22 16:53:36] [Rank 0] step:4221/10000 train_time:365362ms step_avg:86.56ms +[2025-08-22 16:53:38] [Rank 0] step:4241/10000 train_time:367151ms step_avg:86.57ms +[2025-08-22 16:53:38] [Rank 0] step:4241/10000 train_time:367151ms step_avg:86.57ms +[2025-08-22 16:53:40] [Rank 0] step:4261/10000 train_time:368942ms step_avg:86.59ms +[2025-08-22 16:53:40] [Rank 0] step:4261/10000 train_time:368942ms step_avg:86.59ms +[2025-08-22 16:53:42] [Rank 0] step:4281/10000 train_time:370731ms step_avg:86.60ms +[2025-08-22 16:53:42] [Rank 0] step:4281/10000 train_time:370731ms step_avg:86.60ms +[2025-08-22 16:53:43] [Rank 0] step:4301/10000 train_time:372520ms step_avg:86.61ms +[2025-08-22 16:53:43] [Rank 0] step:4301/10000 train_time:372520ms step_avg:86.61ms +[2025-08-22 16:53:45] [Rank 0] step:4321/10000 train_time:374311ms step_avg:86.63ms +[2025-08-22 16:53:45] [Rank 0] step:4321/10000 train_time:374311ms step_avg:86.63ms +[2025-08-22 16:53:47] [Rank 0] step:4341/10000 train_time:376100ms step_avg:86.64ms +[2025-08-22 16:53:47] [Rank 0] step:4341/10000 train_time:376100ms step_avg:86.64ms +[2025-08-22 16:53:49] [Rank 0] step:4361/10000 train_time:377893ms step_avg:86.65ms +[2025-08-22 16:53:49] [Rank 0] step:4361/10000 train_time:377893ms step_avg:86.65ms +[2025-08-22 16:53:51] [Rank 0] step:4381/10000 train_time:379683ms step_avg:86.67ms +[2025-08-22 16:53:51] [Rank 0] step:4381/10000 train_time:379683ms step_avg:86.67ms +[2025-08-22 16:53:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:53:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:54:06] [Rank 0] PRINT: step:4400/10000 val_loss:4.0140 svd_entropy: attn_qk:H=0.6932,top10E=0.35,eRank=108.9,q75/q25=74.64 attn_vo:H=0.5993,top10E=0.47,eRank=75.3,q75/q25=80.67 mlp_w1:H=0.7860,top10E=0.27,eRank=212.2,q75/q25=8.14 mlp_w2:H=0.9504,top10E=0.06,eRank=559.3,q75/q25=4.27 vo_prod:H=0.4834,top10E=0.67,eRank=36.2,q75/q25=6134.08 train_time:381657ms step_avg:86.74ms +[2025-08-22 16:54:06] [Rank 0] PRINT: step:4400/10000 val_loss:4.0140 svd_entropy: attn_qk:H=0.6932,top10E=0.35,eRank=108.9,q75/q25=74.64 attn_vo:H=0.5993,top10E=0.47,eRank=75.3,q75/q25=80.67 mlp_w1:H=0.7860,top10E=0.27,eRank=212.2,q75/q25=8.14 mlp_w2:H=0.9504,top10E=0.06,eRank=559.3,q75/q25=4.27 vo_prod:H=0.4834,top10E=0.67,eRank=36.2,q75/q25=6134.08 train_time:381657ms step_avg:86.74ms +[2025-08-22 16:54:06] [Rank 0] step:4401/10000 train_time:381669ms step_avg:86.72ms +[2025-08-22 16:54:06] [Rank 0] step:4401/10000 train_time:381669ms step_avg:86.72ms +[2025-08-22 16:54:08] [Rank 0] step:4421/10000 train_time:383308ms step_avg:86.70ms +[2025-08-22 16:54:08] [Rank 0] step:4421/10000 train_time:383308ms step_avg:86.70ms +[2025-08-22 16:54:10] [Rank 0] step:4441/10000 train_time:385092ms step_avg:86.71ms +[2025-08-22 16:54:10] [Rank 0] step:4441/10000 train_time:385092ms step_avg:86.71ms +[2025-08-22 16:54:12] [Rank 0] step:4461/10000 train_time:386884ms step_avg:86.73ms +[2025-08-22 16:54:12] [Rank 0] step:4461/10000 train_time:386884ms step_avg:86.73ms +[2025-08-22 16:54:13] [Rank 0] step:4481/10000 train_time:388678ms step_avg:86.74ms +[2025-08-22 16:54:13] [Rank 0] step:4481/10000 train_time:388678ms step_avg:86.74ms +[2025-08-22 16:54:15] [Rank 0] step:4501/10000 train_time:390473ms step_avg:86.75ms +[2025-08-22 16:54:15] [Rank 0] step:4501/10000 train_time:390473ms step_avg:86.75ms +[2025-08-22 16:54:17] [Rank 0] step:4521/10000 train_time:392268ms step_avg:86.77ms +[2025-08-22 16:54:17] [Rank 0] step:4521/10000 train_time:392268ms step_avg:86.77ms +[2025-08-22 16:54:19] [Rank 0] step:4541/10000 train_time:394064ms step_avg:86.78ms +[2025-08-22 16:54:19] [Rank 0] step:4541/10000 train_time:394064ms step_avg:86.78ms +[2025-08-22 16:54:21] [Rank 0] step:4561/10000 train_time:395863ms step_avg:86.79ms +[2025-08-22 16:54:21] [Rank 0] step:4561/10000 train_time:395863ms step_avg:86.79ms +[2025-08-22 16:54:22] [Rank 0] step:4581/10000 train_time:397661ms step_avg:86.81ms +[2025-08-22 16:54:22] [Rank 0] step:4581/10000 train_time:397661ms step_avg:86.81ms +[2025-08-22 16:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:54:38] [Rank 0] PRINT: step:4600/10000 val_loss:3.9881 svd_entropy: attn_qk:H=0.6953,top10E=0.34,eRank=110.4,q75/q25=74.78 attn_vo:H=0.6031,top10E=0.46,eRank=77.2,q75/q25=80.94 mlp_w1:H=0.7881,top10E=0.27,eRank=215.5,q75/q25=8.09 mlp_w2:H=0.9504,top10E=0.06,eRank=559.5,q75/q25=4.27 vo_prod:H=0.4870,top10E=0.66,eRank=37.1,q75/q25=6366.02 train_time:399641ms step_avg:86.88ms +[2025-08-22 16:54:38] [Rank 0] PRINT: step:4600/10000 val_loss:3.9881 svd_entropy: attn_qk:H=0.6953,top10E=0.34,eRank=110.4,q75/q25=74.78 attn_vo:H=0.6031,top10E=0.46,eRank=77.2,q75/q25=80.94 mlp_w1:H=0.7881,top10E=0.27,eRank=215.5,q75/q25=8.09 mlp_w2:H=0.9504,top10E=0.06,eRank=559.5,q75/q25=4.27 vo_prod:H=0.4870,top10E=0.66,eRank=37.1,q75/q25=6366.02 train_time:399641ms step_avg:86.88ms +[2025-08-22 16:54:38] [Rank 0] step:4601/10000 train_time:399653ms step_avg:86.86ms +[2025-08-22 16:54:38] [Rank 0] step:4601/10000 train_time:399653ms step_avg:86.86ms +[2025-08-22 16:54:40] [Rank 0] step:4621/10000 train_time:401289ms step_avg:86.84ms +[2025-08-22 16:54:40] [Rank 0] step:4621/10000 train_time:401289ms step_avg:86.84ms +[2025-08-22 16:54:42] [Rank 0] step:4641/10000 train_time:403084ms step_avg:86.85ms +[2025-08-22 16:54:42] [Rank 0] step:4641/10000 train_time:403084ms step_avg:86.85ms +[2025-08-22 16:54:44] [Rank 0] step:4661/10000 train_time:404877ms step_avg:86.86ms +[2025-08-22 16:54:44] [Rank 0] step:4661/10000 train_time:404877ms step_avg:86.86ms +[2025-08-22 16:54:45] [Rank 0] step:4681/10000 train_time:406672ms step_avg:86.88ms +[2025-08-22 16:54:45] [Rank 0] step:4681/10000 train_time:406672ms step_avg:86.88ms +[2025-08-22 16:54:47] [Rank 0] step:4701/10000 train_time:408468ms step_avg:86.89ms +[2025-08-22 16:54:47] [Rank 0] step:4701/10000 train_time:408468ms step_avg:86.89ms +[2025-08-22 16:54:49] [Rank 0] step:4721/10000 train_time:410261ms step_avg:86.90ms +[2025-08-22 16:54:49] [Rank 0] step:4721/10000 train_time:410261ms step_avg:86.90ms +[2025-08-22 16:54:51] [Rank 0] step:4741/10000 train_time:412061ms step_avg:86.91ms +[2025-08-22 16:54:51] [Rank 0] step:4741/10000 train_time:412061ms step_avg:86.91ms +[2025-08-22 16:54:53] [Rank 0] step:4761/10000 train_time:413859ms step_avg:86.93ms +[2025-08-22 16:54:53] [Rank 0] step:4761/10000 train_time:413859ms step_avg:86.93ms +[2025-08-22 16:54:54] [Rank 0] step:4781/10000 train_time:415656ms step_avg:86.94ms +[2025-08-22 16:54:54] [Rank 0] step:4781/10000 train_time:415656ms step_avg:86.94ms +[2025-08-22 16:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:55:10] [Rank 0] PRINT: step:4800/10000 val_loss:3.9814 svd_entropy: attn_qk:H=0.6972,top10E=0.34,eRank=111.9,q75/q25=75.02 attn_vo:H=0.6067,top10E=0.45,eRank=78.9,q75/q25=81.54 mlp_w1:H=0.7900,top10E=0.26,eRank=218.6,q75/q25=8.07 mlp_w2:H=0.9504,top10E=0.06,eRank=559.7,q75/q25=4.27 vo_prod:H=0.4899,top10E=0.65,eRank=38.0,q75/q25=6417.78 train_time:417637ms step_avg:87.01ms +[2025-08-22 16:55:10] [Rank 0] PRINT: step:4800/10000 val_loss:3.9814 svd_entropy: attn_qk:H=0.6972,top10E=0.34,eRank=111.9,q75/q25=75.02 attn_vo:H=0.6067,top10E=0.45,eRank=78.9,q75/q25=81.54 mlp_w1:H=0.7900,top10E=0.26,eRank=218.6,q75/q25=8.07 mlp_w2:H=0.9504,top10E=0.06,eRank=559.7,q75/q25=4.27 vo_prod:H=0.4899,top10E=0.65,eRank=38.0,q75/q25=6417.78 train_time:417637ms step_avg:87.01ms +[2025-08-22 16:55:10] [Rank 0] step:4801/10000 train_time:417648ms step_avg:86.99ms +[2025-08-22 16:55:10] [Rank 0] step:4801/10000 train_time:417648ms step_avg:86.99ms +[2025-08-22 16:55:12] [Rank 0] step:4821/10000 train_time:419280ms step_avg:86.97ms +[2025-08-22 16:55:12] [Rank 0] step:4821/10000 train_time:419280ms step_avg:86.97ms +[2025-08-22 16:55:14] [Rank 0] step:4841/10000 train_time:421076ms step_avg:86.98ms +[2025-08-22 16:55:14] [Rank 0] step:4841/10000 train_time:421076ms step_avg:86.98ms +[2025-08-22 16:55:15] [Rank 0] step:4861/10000 train_time:422877ms step_avg:86.99ms +[2025-08-22 16:55:15] [Rank 0] step:4861/10000 train_time:422877ms step_avg:86.99ms +[2025-08-22 16:55:17] [Rank 0] step:4881/10000 train_time:424677ms step_avg:87.01ms +[2025-08-22 16:55:17] [Rank 0] step:4881/10000 train_time:424677ms step_avg:87.01ms +[2025-08-22 16:55:19] [Rank 0] step:4901/10000 train_time:426476ms step_avg:87.02ms +[2025-08-22 16:55:19] [Rank 0] step:4901/10000 train_time:426476ms step_avg:87.02ms +[2025-08-22 16:55:21] [Rank 0] step:4921/10000 train_time:428278ms step_avg:87.03ms +[2025-08-22 16:55:21] [Rank 0] step:4921/10000 train_time:428278ms step_avg:87.03ms +[2025-08-22 16:55:23] [Rank 0] step:4941/10000 train_time:430082ms step_avg:87.04ms +[2025-08-22 16:55:23] [Rank 0] step:4941/10000 train_time:430082ms step_avg:87.04ms +[2025-08-22 16:55:25] [Rank 0] step:4961/10000 train_time:431883ms step_avg:87.06ms +[2025-08-22 16:55:25] [Rank 0] step:4961/10000 train_time:431883ms step_avg:87.06ms +[2025-08-22 16:55:26] [Rank 0] step:4981/10000 train_time:433689ms step_avg:87.07ms +[2025-08-22 16:55:26] [Rank 0] step:4981/10000 train_time:433689ms step_avg:87.07ms +[2025-08-22 16:55:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:55:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:55:42] [Rank 0] PRINT: step:5000/10000 val_loss:3.9666 svd_entropy: attn_qk:H=0.6989,top10E=0.34,eRank=113.2,q75/q25=74.67 attn_vo:H=0.6102,top10E=0.45,eRank=80.6,q75/q25=81.17 mlp_w1:H=0.7912,top10E=0.26,eRank=221.4,q75/q25=8.03 mlp_w2:H=0.9504,top10E=0.06,eRank=559.7,q75/q25=4.28 vo_prod:H=0.4937,top10E=0.65,eRank=38.9,q75/q25=6418.73 train_time:435673ms step_avg:87.13ms +[2025-08-22 16:55:42] [Rank 0] PRINT: step:5000/10000 val_loss:3.9666 svd_entropy: attn_qk:H=0.6989,top10E=0.34,eRank=113.2,q75/q25=74.67 attn_vo:H=0.6102,top10E=0.45,eRank=80.6,q75/q25=81.17 mlp_w1:H=0.7912,top10E=0.26,eRank=221.4,q75/q25=8.03 mlp_w2:H=0.9504,top10E=0.06,eRank=559.7,q75/q25=4.28 vo_prod:H=0.4937,top10E=0.65,eRank=38.9,q75/q25=6418.73 train_time:435673ms step_avg:87.13ms +[2025-08-22 16:55:42] [Rank 0] step:5001/10000 train_time:435686ms step_avg:87.12ms +[2025-08-22 16:55:42] [Rank 0] step:5001/10000 train_time:435686ms step_avg:87.12ms +[2025-08-22 16:55:44] [Rank 0] step:5021/10000 train_time:437311ms step_avg:87.10ms +[2025-08-22 16:55:44] [Rank 0] step:5021/10000 train_time:437311ms step_avg:87.10ms +[2025-08-22 16:55:45] [Rank 0] step:5041/10000 train_time:439106ms step_avg:87.11ms +[2025-08-22 16:55:45] [Rank 0] step:5041/10000 train_time:439106ms step_avg:87.11ms +[2025-08-22 16:55:47] [Rank 0] step:5061/10000 train_time:440898ms step_avg:87.12ms +[2025-08-22 16:55:47] [Rank 0] step:5061/10000 train_time:440898ms step_avg:87.12ms +[2025-08-22 16:55:49] [Rank 0] step:5081/10000 train_time:442693ms step_avg:87.13ms +[2025-08-22 16:55:49] [Rank 0] step:5081/10000 train_time:442693ms step_avg:87.13ms +[2025-08-22 16:55:51] [Rank 0] step:5101/10000 train_time:444488ms step_avg:87.14ms +[2025-08-22 16:55:51] [Rank 0] step:5101/10000 train_time:444488ms step_avg:87.14ms +[2025-08-22 16:55:53] [Rank 0] step:5121/10000 train_time:446285ms step_avg:87.15ms +[2025-08-22 16:55:53] [Rank 0] step:5121/10000 train_time:446285ms step_avg:87.15ms +[2025-08-22 16:55:54] [Rank 0] step:5141/10000 train_time:448085ms step_avg:87.16ms +[2025-08-22 16:55:54] [Rank 0] step:5141/10000 train_time:448085ms step_avg:87.16ms +[2025-08-22 16:55:56] [Rank 0] step:5161/10000 train_time:449883ms step_avg:87.17ms +[2025-08-22 16:55:56] [Rank 0] step:5161/10000 train_time:449883ms step_avg:87.17ms +[2025-08-22 16:55:58] [Rank 0] step:5181/10000 train_time:451682ms step_avg:87.18ms +[2025-08-22 16:55:58] [Rank 0] step:5181/10000 train_time:451682ms step_avg:87.18ms +[2025-08-22 16:56:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:56:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:56:14] [Rank 0] PRINT: step:5200/10000 val_loss:3.9479 svd_entropy: attn_qk:H=0.7005,top10E=0.34,eRank=114.6,q75/q25=75.09 attn_vo:H=0.6135,top10E=0.44,eRank=82.2,q75/q25=80.69 mlp_w1:H=0.7924,top10E=0.26,eRank=224.2,q75/q25=7.98 mlp_w2:H=0.9504,top10E=0.06,eRank=559.9,q75/q25=4.28 vo_prod:H=0.4966,top10E=0.64,eRank=39.7,q75/q25=6236.02 train_time:453687ms step_avg:87.25ms +[2025-08-22 16:56:14] [Rank 0] PRINT: step:5200/10000 val_loss:3.9479 svd_entropy: attn_qk:H=0.7005,top10E=0.34,eRank=114.6,q75/q25=75.09 attn_vo:H=0.6135,top10E=0.44,eRank=82.2,q75/q25=80.69 mlp_w1:H=0.7924,top10E=0.26,eRank=224.2,q75/q25=7.98 mlp_w2:H=0.9504,top10E=0.06,eRank=559.9,q75/q25=4.28 vo_prod:H=0.4966,top10E=0.64,eRank=39.7,q75/q25=6236.02 train_time:453687ms step_avg:87.25ms +[2025-08-22 16:56:14] [Rank 0] step:5201/10000 train_time:453700ms step_avg:87.23ms +[2025-08-22 16:56:14] [Rank 0] step:5201/10000 train_time:453700ms step_avg:87.23ms +[2025-08-22 16:56:16] [Rank 0] step:5221/10000 train_time:455354ms step_avg:87.22ms +[2025-08-22 16:56:16] [Rank 0] step:5221/10000 train_time:455354ms step_avg:87.22ms +[2025-08-22 16:56:17] [Rank 0] step:5241/10000 train_time:457177ms step_avg:87.23ms +[2025-08-22 16:56:17] [Rank 0] step:5241/10000 train_time:457177ms step_avg:87.23ms +[2025-08-22 16:56:19] [Rank 0] step:5261/10000 train_time:459003ms step_avg:87.25ms +[2025-08-22 16:56:19] [Rank 0] step:5261/10000 train_time:459003ms step_avg:87.25ms +[2025-08-22 16:56:21] [Rank 0] step:5281/10000 train_time:460831ms step_avg:87.26ms +[2025-08-22 16:56:21] [Rank 0] step:5281/10000 train_time:460831ms step_avg:87.26ms +[2025-08-22 16:56:23] [Rank 0] step:5301/10000 train_time:462669ms step_avg:87.28ms +[2025-08-22 16:56:23] [Rank 0] step:5301/10000 train_time:462669ms step_avg:87.28ms +[2025-08-22 16:56:25] [Rank 0] step:5321/10000 train_time:464496ms step_avg:87.29ms +[2025-08-22 16:56:25] [Rank 0] step:5321/10000 train_time:464496ms step_avg:87.29ms +[2025-08-22 16:56:26] [Rank 0] step:5341/10000 train_time:466323ms step_avg:87.31ms +[2025-08-22 16:56:26] [Rank 0] step:5341/10000 train_time:466323ms step_avg:87.31ms +[2025-08-22 16:56:28] [Rank 0] step:5361/10000 train_time:468153ms step_avg:87.33ms +[2025-08-22 16:56:28] [Rank 0] step:5361/10000 train_time:468153ms step_avg:87.33ms +[2025-08-22 16:56:30] [Rank 0] step:5381/10000 train_time:469983ms step_avg:87.34ms +[2025-08-22 16:56:30] [Rank 0] step:5381/10000 train_time:469983ms step_avg:87.34ms +[2025-08-22 16:56:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:56:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:56:46] [Rank 0] PRINT: step:5400/10000 val_loss:3.9350 svd_entropy: attn_qk:H=0.7021,top10E=0.33,eRank=115.8,q75/q25=73.73 attn_vo:H=0.6171,top10E=0.43,eRank=83.8,q75/q25=78.35 mlp_w1:H=0.7943,top10E=0.26,eRank=227.2,q75/q25=7.75 mlp_w2:H=0.9504,top10E=0.06,eRank=560.0,q75/q25=4.28 vo_prod:H=0.5004,top10E=0.63,eRank=40.5,q75/q25=6030.79 train_time:471994ms step_avg:87.41ms +[2025-08-22 16:56:46] [Rank 0] PRINT: step:5400/10000 val_loss:3.9350 svd_entropy: attn_qk:H=0.7021,top10E=0.33,eRank=115.8,q75/q25=73.73 attn_vo:H=0.6171,top10E=0.43,eRank=83.8,q75/q25=78.35 mlp_w1:H=0.7943,top10E=0.26,eRank=227.2,q75/q25=7.75 mlp_w2:H=0.9504,top10E=0.06,eRank=560.0,q75/q25=4.28 vo_prod:H=0.5004,top10E=0.63,eRank=40.5,q75/q25=6030.79 train_time:471994ms step_avg:87.41ms +[2025-08-22 16:56:46] [Rank 0] step:5401/10000 train_time:472007ms step_avg:87.39ms +[2025-08-22 16:56:46] [Rank 0] step:5401/10000 train_time:472007ms step_avg:87.39ms +[2025-08-22 16:56:48] [Rank 0] step:5421/10000 train_time:473661ms step_avg:87.38ms +[2025-08-22 16:56:48] [Rank 0] step:5421/10000 train_time:473661ms step_avg:87.38ms +[2025-08-22 16:56:49] [Rank 0] step:5441/10000 train_time:475485ms step_avg:87.39ms +[2025-08-22 16:56:49] [Rank 0] step:5441/10000 train_time:475485ms step_avg:87.39ms +[2025-08-22 16:56:51] [Rank 0] step:5461/10000 train_time:477313ms step_avg:87.40ms +[2025-08-22 16:56:51] [Rank 0] step:5461/10000 train_time:477313ms step_avg:87.40ms +[2025-08-22 16:56:53] [Rank 0] step:5481/10000 train_time:479142ms step_avg:87.42ms +[2025-08-22 16:56:53] [Rank 0] step:5481/10000 train_time:479142ms step_avg:87.42ms +[2025-08-22 16:56:55] [Rank 0] step:5501/10000 train_time:480973ms step_avg:87.43ms +[2025-08-22 16:56:55] [Rank 0] step:5501/10000 train_time:480973ms step_avg:87.43ms +[2025-08-22 16:56:57] [Rank 0] step:5521/10000 train_time:482806ms step_avg:87.45ms +[2025-08-22 16:56:57] [Rank 0] step:5521/10000 train_time:482806ms step_avg:87.45ms +[2025-08-22 16:56:59] [Rank 0] step:5541/10000 train_time:484633ms step_avg:87.46ms +[2025-08-22 16:56:59] [Rank 0] step:5541/10000 train_time:484633ms step_avg:87.46ms +[2025-08-22 16:57:00] [Rank 0] step:5561/10000 train_time:486462ms step_avg:87.48ms +[2025-08-22 16:57:00] [Rank 0] step:5561/10000 train_time:486462ms step_avg:87.48ms +[2025-08-22 16:57:02] [Rank 0] step:5581/10000 train_time:488293ms step_avg:87.49ms +[2025-08-22 16:57:02] [Rank 0] step:5581/10000 train_time:488293ms step_avg:87.49ms +[2025-08-22 16:57:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:57:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:57:18] [Rank 0] PRINT: step:5600/10000 val_loss:3.9253 svd_entropy: attn_qk:H=0.7034,top10E=0.33,eRank=116.9,q75/q25=73.66 attn_vo:H=0.6202,top10E=0.43,eRank=85.4,q75/q25=78.49 mlp_w1:H=0.7967,top10E=0.25,eRank=230.1,q75/q25=7.65 mlp_w2:H=0.9506,top10E=0.06,eRank=560.5,q75/q25=4.22 vo_prod:H=0.5036,top10E=0.63,eRank=41.3,q75/q25=5972.65 train_time:490309ms step_avg:87.56ms +[2025-08-22 16:57:18] [Rank 0] PRINT: step:5600/10000 val_loss:3.9253 svd_entropy: attn_qk:H=0.7034,top10E=0.33,eRank=116.9,q75/q25=73.66 attn_vo:H=0.6202,top10E=0.43,eRank=85.4,q75/q25=78.49 mlp_w1:H=0.7967,top10E=0.25,eRank=230.1,q75/q25=7.65 mlp_w2:H=0.9506,top10E=0.06,eRank=560.5,q75/q25=4.22 vo_prod:H=0.5036,top10E=0.63,eRank=41.3,q75/q25=5972.65 train_time:490309ms step_avg:87.56ms +[2025-08-22 16:57:18] [Rank 0] step:5601/10000 train_time:490323ms step_avg:87.54ms +[2025-08-22 16:57:18] [Rank 0] step:5601/10000 train_time:490323ms step_avg:87.54ms +[2025-08-22 16:57:20] [Rank 0] step:5621/10000 train_time:491974ms step_avg:87.52ms +[2025-08-22 16:57:20] [Rank 0] step:5621/10000 train_time:491974ms step_avg:87.52ms +[2025-08-22 16:57:22] [Rank 0] step:5641/10000 train_time:493804ms step_avg:87.54ms +[2025-08-22 16:57:22] [Rank 0] step:5641/10000 train_time:493804ms step_avg:87.54ms +[2025-08-22 16:57:23] [Rank 0] step:5661/10000 train_time:495632ms step_avg:87.55ms +[2025-08-22 16:57:23] [Rank 0] step:5661/10000 train_time:495632ms step_avg:87.55ms +[2025-08-22 16:57:25] [Rank 0] step:5681/10000 train_time:497466ms step_avg:87.57ms +[2025-08-22 16:57:25] [Rank 0] step:5681/10000 train_time:497466ms step_avg:87.57ms +[2025-08-22 16:57:27] [Rank 0] step:5701/10000 train_time:499296ms step_avg:87.58ms +[2025-08-22 16:57:27] [Rank 0] step:5701/10000 train_time:499296ms step_avg:87.58ms +[2025-08-22 16:57:29] [Rank 0] step:5721/10000 train_time:501131ms step_avg:87.59ms +[2025-08-22 16:57:29] [Rank 0] step:5721/10000 train_time:501131ms step_avg:87.59ms +[2025-08-22 16:57:31] [Rank 0] step:5741/10000 train_time:502968ms step_avg:87.61ms +[2025-08-22 16:57:31] [Rank 0] step:5741/10000 train_time:502968ms step_avg:87.61ms +[2025-08-22 16:57:33] [Rank 0] step:5761/10000 train_time:504802ms step_avg:87.62ms +[2025-08-22 16:57:33] [Rank 0] step:5761/10000 train_time:504802ms step_avg:87.62ms +[2025-08-22 16:57:34] [Rank 0] step:5781/10000 train_time:506637ms step_avg:87.64ms +[2025-08-22 16:57:34] [Rank 0] step:5781/10000 train_time:506637ms step_avg:87.64ms +[2025-08-22 16:57:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:57:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:57:50] [Rank 0] PRINT: step:5800/10000 val_loss:3.9195 svd_entropy: attn_qk:H=0.7047,top10E=0.33,eRank=118.0,q75/q25=73.87 attn_vo:H=0.6229,top10E=0.42,eRank=86.9,q75/q25=78.11 mlp_w1:H=0.7987,top10E=0.25,eRank=232.8,q75/q25=7.58 mlp_w2:H=0.9508,top10E=0.06,eRank=560.8,q75/q25=4.17 vo_prod:H=0.5061,top10E=0.62,eRank=42.1,q75/q25=5958.07 train_time:508660ms step_avg:87.70ms +[2025-08-22 16:57:50] [Rank 0] PRINT: step:5800/10000 val_loss:3.9195 svd_entropy: attn_qk:H=0.7047,top10E=0.33,eRank=118.0,q75/q25=73.87 attn_vo:H=0.6229,top10E=0.42,eRank=86.9,q75/q25=78.11 mlp_w1:H=0.7987,top10E=0.25,eRank=232.8,q75/q25=7.58 mlp_w2:H=0.9508,top10E=0.06,eRank=560.8,q75/q25=4.17 vo_prod:H=0.5061,top10E=0.62,eRank=42.1,q75/q25=5958.07 train_time:508660ms step_avg:87.70ms +[2025-08-22 16:57:50] [Rank 0] step:5801/10000 train_time:508673ms step_avg:87.69ms +[2025-08-22 16:57:50] [Rank 0] step:5801/10000 train_time:508673ms step_avg:87.69ms +[2025-08-22 16:57:52] [Rank 0] step:5821/10000 train_time:510344ms step_avg:87.67ms +[2025-08-22 16:57:52] [Rank 0] step:5821/10000 train_time:510344ms step_avg:87.67ms +[2025-08-22 16:57:54] [Rank 0] step:5841/10000 train_time:512167ms step_avg:87.68ms +[2025-08-22 16:57:54] [Rank 0] step:5841/10000 train_time:512167ms step_avg:87.68ms +[2025-08-22 16:57:56] [Rank 0] step:5861/10000 train_time:513997ms step_avg:87.70ms +[2025-08-22 16:57:56] [Rank 0] step:5861/10000 train_time:513997ms step_avg:87.70ms +[2025-08-22 16:57:57] [Rank 0] step:5881/10000 train_time:515828ms step_avg:87.71ms +[2025-08-22 16:57:57] [Rank 0] step:5881/10000 train_time:515828ms step_avg:87.71ms +[2025-08-22 16:57:59] [Rank 0] step:5901/10000 train_time:517656ms step_avg:87.72ms +[2025-08-22 16:57:59] [Rank 0] step:5901/10000 train_time:517656ms step_avg:87.72ms +[2025-08-22 16:58:01] [Rank 0] step:5921/10000 train_time:519488ms step_avg:87.74ms +[2025-08-22 16:58:01] [Rank 0] step:5921/10000 train_time:519488ms step_avg:87.74ms +[2025-08-22 16:58:03] [Rank 0] step:5941/10000 train_time:521324ms step_avg:87.75ms +[2025-08-22 16:58:03] [Rank 0] step:5941/10000 train_time:521324ms step_avg:87.75ms +[2025-08-22 16:58:05] [Rank 0] step:5961/10000 train_time:523157ms step_avg:87.76ms +[2025-08-22 16:58:05] [Rank 0] step:5961/10000 train_time:523157ms step_avg:87.76ms +[2025-08-22 16:58:07] [Rank 0] step:5981/10000 train_time:524991ms step_avg:87.78ms +[2025-08-22 16:58:07] [Rank 0] step:5981/10000 train_time:524991ms step_avg:87.78ms +[2025-08-22 16:58:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:58:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:58:22] [Rank 0] PRINT: step:6000/10000 val_loss:3.8975 svd_entropy: attn_qk:H=0.7060,top10E=0.33,eRank=119.1,q75/q25=73.96 attn_vo:H=0.6253,top10E=0.42,eRank=88.2,q75/q25=77.52 mlp_w1:H=0.8005,top10E=0.25,eRank=235.4,q75/q25=7.55 mlp_w2:H=0.9508,top10E=0.06,eRank=561.0,q75/q25=4.16 vo_prod:H=0.5087,top10E=0.62,eRank=42.8,q75/q25=5836.15 train_time:527011ms step_avg:87.84ms +[2025-08-22 16:58:22] [Rank 0] PRINT: step:6000/10000 val_loss:3.8975 svd_entropy: attn_qk:H=0.7060,top10E=0.33,eRank=119.1,q75/q25=73.96 attn_vo:H=0.6253,top10E=0.42,eRank=88.2,q75/q25=77.52 mlp_w1:H=0.8005,top10E=0.25,eRank=235.4,q75/q25=7.55 mlp_w2:H=0.9508,top10E=0.06,eRank=561.0,q75/q25=4.16 vo_prod:H=0.5087,top10E=0.62,eRank=42.8,q75/q25=5836.15 train_time:527011ms step_avg:87.84ms +[2025-08-22 16:58:22] [Rank 0] step:6001/10000 train_time:527022ms step_avg:87.82ms +[2025-08-22 16:58:22] [Rank 0] step:6001/10000 train_time:527022ms step_avg:87.82ms +[2025-08-22 16:58:24] [Rank 0] step:6021/10000 train_time:528672ms step_avg:87.80ms +[2025-08-22 16:58:24] [Rank 0] step:6021/10000 train_time:528672ms step_avg:87.80ms +[2025-08-22 16:58:26] [Rank 0] step:6041/10000 train_time:530498ms step_avg:87.82ms +[2025-08-22 16:58:26] [Rank 0] step:6041/10000 train_time:530498ms step_avg:87.82ms +[2025-08-22 16:58:28] [Rank 0] step:6061/10000 train_time:532337ms step_avg:87.83ms +[2025-08-22 16:58:28] [Rank 0] step:6061/10000 train_time:532337ms step_avg:87.83ms +[2025-08-22 16:58:29] [Rank 0] step:6081/10000 train_time:534169ms step_avg:87.84ms +[2025-08-22 16:58:29] [Rank 0] step:6081/10000 train_time:534169ms step_avg:87.84ms +[2025-08-22 16:58:31] [Rank 0] step:6101/10000 train_time:536005ms step_avg:87.86ms +[2025-08-22 16:58:31] [Rank 0] step:6101/10000 train_time:536005ms step_avg:87.86ms +[2025-08-22 16:58:33] [Rank 0] step:6121/10000 train_time:538104ms step_avg:87.91ms +[2025-08-22 16:58:33] [Rank 0] step:6121/10000 train_time:538104ms step_avg:87.91ms +[2025-08-22 16:58:35] [Rank 0] step:6141/10000 train_time:539947ms step_avg:87.92ms +[2025-08-22 16:58:35] [Rank 0] step:6141/10000 train_time:539947ms step_avg:87.92ms +[2025-08-22 16:58:37] [Rank 0] step:6161/10000 train_time:541780ms step_avg:87.94ms +[2025-08-22 16:58:37] [Rank 0] step:6161/10000 train_time:541780ms step_avg:87.94ms +[2025-08-22 16:58:39] [Rank 0] step:6181/10000 train_time:543611ms step_avg:87.95ms +[2025-08-22 16:58:39] [Rank 0] step:6181/10000 train_time:543611ms step_avg:87.95ms +[2025-08-22 16:58:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:58:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:58:54] [Rank 0] PRINT: step:6200/10000 val_loss:3.8826 svd_entropy: attn_qk:H=0.7072,top10E=0.33,eRank=120.2,q75/q25=73.53 attn_vo:H=0.6278,top10E=0.41,eRank=89.6,q75/q25=77.61 mlp_w1:H=0.8019,top10E=0.25,eRank=237.8,q75/q25=7.51 mlp_w2:H=0.9508,top10E=0.06,eRank=561.1,q75/q25=4.13 vo_prod:H=0.5111,top10E=0.61,eRank=43.5,q75/q25=5867.82 train_time:545629ms step_avg:88.00ms +[2025-08-22 16:58:54] [Rank 0] PRINT: step:6200/10000 val_loss:3.8826 svd_entropy: attn_qk:H=0.7072,top10E=0.33,eRank=120.2,q75/q25=73.53 attn_vo:H=0.6278,top10E=0.41,eRank=89.6,q75/q25=77.61 mlp_w1:H=0.8019,top10E=0.25,eRank=237.8,q75/q25=7.51 mlp_w2:H=0.9508,top10E=0.06,eRank=561.1,q75/q25=4.13 vo_prod:H=0.5111,top10E=0.61,eRank=43.5,q75/q25=5867.82 train_time:545629ms step_avg:88.00ms +[2025-08-22 16:58:55] [Rank 0] step:6201/10000 train_time:545641ms step_avg:87.99ms +[2025-08-22 16:58:55] [Rank 0] step:6201/10000 train_time:545641ms step_avg:87.99ms +[2025-08-22 16:58:56] [Rank 0] step:6221/10000 train_time:547305ms step_avg:87.98ms +[2025-08-22 16:58:56] [Rank 0] step:6221/10000 train_time:547305ms step_avg:87.98ms +[2025-08-22 16:58:58] [Rank 0] step:6241/10000 train_time:549131ms step_avg:87.99ms +[2025-08-22 16:58:58] [Rank 0] step:6241/10000 train_time:549131ms step_avg:87.99ms +[2025-08-22 16:59:00] [Rank 0] step:6261/10000 train_time:550961ms step_avg:88.00ms +[2025-08-22 16:59:00] [Rank 0] step:6261/10000 train_time:550961ms step_avg:88.00ms +[2025-08-22 16:59:02] [Rank 0] step:6281/10000 train_time:552796ms step_avg:88.01ms +[2025-08-22 16:59:02] [Rank 0] step:6281/10000 train_time:552796ms step_avg:88.01ms +[2025-08-22 16:59:04] [Rank 0] step:6301/10000 train_time:554630ms step_avg:88.02ms +[2025-08-22 16:59:04] [Rank 0] step:6301/10000 train_time:554630ms step_avg:88.02ms +[2025-08-22 16:59:06] [Rank 0] step:6321/10000 train_time:556462ms step_avg:88.03ms +[2025-08-22 16:59:06] [Rank 0] step:6321/10000 train_time:556462ms step_avg:88.03ms +[2025-08-22 16:59:07] [Rank 0] step:6341/10000 train_time:558298ms step_avg:88.05ms +[2025-08-22 16:59:07] [Rank 0] step:6341/10000 train_time:558298ms step_avg:88.05ms +[2025-08-22 16:59:09] [Rank 0] step:6361/10000 train_time:560139ms step_avg:88.06ms +[2025-08-22 16:59:09] [Rank 0] step:6361/10000 train_time:560139ms step_avg:88.06ms +[2025-08-22 16:59:11] [Rank 0] step:6381/10000 train_time:561988ms step_avg:88.07ms +[2025-08-22 16:59:11] [Rank 0] step:6381/10000 train_time:561988ms step_avg:88.07ms +[2025-08-22 16:59:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:59:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:59:27] [Rank 0] PRINT: step:6400/10000 val_loss:3.8667 svd_entropy: attn_qk:H=0.7082,top10E=0.33,eRank=121.2,q75/q25=73.74 attn_vo:H=0.6296,top10E=0.41,eRank=90.8,q75/q25=77.78 mlp_w1:H=0.8033,top10E=0.25,eRank=239.9,q75/q25=7.47 mlp_w2:H=0.9508,top10E=0.06,eRank=561.1,q75/q25=4.12 vo_prod:H=0.5123,top10E=0.61,eRank=44.1,q75/q25=5849.78 train_time:564009ms step_avg:88.13ms +[2025-08-22 16:59:27] [Rank 0] PRINT: step:6400/10000 val_loss:3.8667 svd_entropy: attn_qk:H=0.7082,top10E=0.33,eRank=121.2,q75/q25=73.74 attn_vo:H=0.6296,top10E=0.41,eRank=90.8,q75/q25=77.78 mlp_w1:H=0.8033,top10E=0.25,eRank=239.9,q75/q25=7.47 mlp_w2:H=0.9508,top10E=0.06,eRank=561.1,q75/q25=4.12 vo_prod:H=0.5123,top10E=0.61,eRank=44.1,q75/q25=5849.78 train_time:564009ms step_avg:88.13ms +[2025-08-22 16:59:27] [Rank 0] step:6401/10000 train_time:564021ms step_avg:88.11ms +[2025-08-22 16:59:27] [Rank 0] step:6401/10000 train_time:564021ms step_avg:88.11ms +[2025-08-22 16:59:29] [Rank 0] step:6421/10000 train_time:565691ms step_avg:88.10ms +[2025-08-22 16:59:29] [Rank 0] step:6421/10000 train_time:565691ms step_avg:88.10ms +[2025-08-22 16:59:30] [Rank 0] step:6441/10000 train_time:567525ms step_avg:88.11ms +[2025-08-22 16:59:30] [Rank 0] step:6441/10000 train_time:567525ms step_avg:88.11ms +[2025-08-22 16:59:32] [Rank 0] step:6461/10000 train_time:569366ms step_avg:88.12ms +[2025-08-22 16:59:32] [Rank 0] step:6461/10000 train_time:569366ms step_avg:88.12ms +[2025-08-22 16:59:34] [Rank 0] step:6481/10000 train_time:571212ms step_avg:88.14ms +[2025-08-22 16:59:34] [Rank 0] step:6481/10000 train_time:571212ms step_avg:88.14ms +[2025-08-22 16:59:36] [Rank 0] step:6501/10000 train_time:573045ms step_avg:88.15ms +[2025-08-22 16:59:36] [Rank 0] step:6501/10000 train_time:573045ms step_avg:88.15ms +[2025-08-22 16:59:38] [Rank 0] step:6521/10000 train_time:574880ms step_avg:88.16ms +[2025-08-22 16:59:38] [Rank 0] step:6521/10000 train_time:574880ms step_avg:88.16ms +[2025-08-22 16:59:40] [Rank 0] step:6541/10000 train_time:576723ms step_avg:88.17ms +[2025-08-22 16:59:40] [Rank 0] step:6541/10000 train_time:576723ms step_avg:88.17ms +[2025-08-22 16:59:42] [Rank 0] step:6561/10000 train_time:578565ms step_avg:88.18ms +[2025-08-22 16:59:42] [Rank 0] step:6561/10000 train_time:578565ms step_avg:88.18ms +[2025-08-22 16:59:43] [Rank 0] step:6581/10000 train_time:580403ms step_avg:88.19ms +[2025-08-22 16:59:43] [Rank 0] step:6581/10000 train_time:580403ms step_avg:88.19ms +[2025-08-22 16:59:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:59:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 16:59:59] [Rank 0] PRINT: step:6600/10000 val_loss:3.8564 svd_entropy: attn_qk:H=0.7092,top10E=0.33,eRank=122.1,q75/q25=73.77 attn_vo:H=0.6317,top10E=0.41,eRank=92.0,q75/q25=77.81 mlp_w1:H=0.8039,top10E=0.25,eRank=241.8,q75/q25=7.44 mlp_w2:H=0.9508,top10E=0.06,eRank=561.1,q75/q25=4.12 vo_prod:H=0.5148,top10E=0.61,eRank=44.7,q75/q25=5890.52 train_time:582431ms step_avg:88.25ms +[2025-08-22 16:59:59] [Rank 0] PRINT: step:6600/10000 val_loss:3.8564 svd_entropy: attn_qk:H=0.7092,top10E=0.33,eRank=122.1,q75/q25=73.77 attn_vo:H=0.6317,top10E=0.41,eRank=92.0,q75/q25=77.81 mlp_w1:H=0.8039,top10E=0.25,eRank=241.8,q75/q25=7.44 mlp_w2:H=0.9508,top10E=0.06,eRank=561.1,q75/q25=4.12 vo_prod:H=0.5148,top10E=0.61,eRank=44.7,q75/q25=5890.52 train_time:582431ms step_avg:88.25ms +[2025-08-22 16:59:59] [Rank 0] step:6601/10000 train_time:582443ms step_avg:88.24ms +[2025-08-22 16:59:59] [Rank 0] step:6601/10000 train_time:582443ms step_avg:88.24ms +[2025-08-22 17:00:01] [Rank 0] step:6621/10000 train_time:584101ms step_avg:88.22ms +[2025-08-22 17:00:01] [Rank 0] step:6621/10000 train_time:584101ms step_avg:88.22ms +[2025-08-22 17:00:03] [Rank 0] step:6641/10000 train_time:585942ms step_avg:88.23ms +[2025-08-22 17:00:03] [Rank 0] step:6641/10000 train_time:585942ms step_avg:88.23ms +[2025-08-22 17:00:05] [Rank 0] step:6661/10000 train_time:587780ms step_avg:88.24ms +[2025-08-22 17:00:05] [Rank 0] step:6661/10000 train_time:587780ms step_avg:88.24ms +[2025-08-22 17:00:07] [Rank 0] step:6681/10000 train_time:589632ms step_avg:88.26ms +[2025-08-22 17:00:07] [Rank 0] step:6681/10000 train_time:589632ms step_avg:88.26ms +[2025-08-22 17:00:08] [Rank 0] step:6701/10000 train_time:591504ms step_avg:88.27ms +[2025-08-22 17:00:08] [Rank 0] step:6701/10000 train_time:591504ms step_avg:88.27ms +[2025-08-22 17:00:10] [Rank 0] step:6721/10000 train_time:593370ms step_avg:88.29ms +[2025-08-22 17:00:10] [Rank 0] step:6721/10000 train_time:593370ms step_avg:88.29ms +[2025-08-22 17:00:12] [Rank 0] step:6741/10000 train_time:595232ms step_avg:88.30ms +[2025-08-22 17:00:12] [Rank 0] step:6741/10000 train_time:595232ms step_avg:88.30ms +[2025-08-22 17:00:14] [Rank 0] step:6761/10000 train_time:597095ms step_avg:88.31ms +[2025-08-22 17:00:14] [Rank 0] step:6761/10000 train_time:597095ms step_avg:88.31ms +[2025-08-22 17:00:16] [Rank 0] step:6781/10000 train_time:598960ms step_avg:88.33ms +[2025-08-22 17:00:16] [Rank 0] step:6781/10000 train_time:598960ms step_avg:88.33ms +[2025-08-22 17:00:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:00:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:00:32] [Rank 0] PRINT: step:6800/10000 val_loss:3.8376 svd_entropy: attn_qk:H=0.7102,top10E=0.32,eRank=122.9,q75/q25=74.07 attn_vo:H=0.6337,top10E=0.40,eRank=93.1,q75/q25=77.76 mlp_w1:H=0.8045,top10E=0.24,eRank=243.4,q75/q25=7.40 mlp_w2:H=0.9507,top10E=0.06,eRank=561.0,q75/q25=4.10 vo_prod:H=0.5170,top10E=0.60,eRank=45.3,q75/q25=5786.20 train_time:601015ms step_avg:88.38ms +[2025-08-22 17:00:32] [Rank 0] PRINT: step:6800/10000 val_loss:3.8376 svd_entropy: attn_qk:H=0.7102,top10E=0.32,eRank=122.9,q75/q25=74.07 attn_vo:H=0.6337,top10E=0.40,eRank=93.1,q75/q25=77.76 mlp_w1:H=0.8045,top10E=0.24,eRank=243.4,q75/q25=7.40 mlp_w2:H=0.9507,top10E=0.06,eRank=561.0,q75/q25=4.10 vo_prod:H=0.5170,top10E=0.60,eRank=45.3,q75/q25=5786.20 train_time:601015ms step_avg:88.38ms +[2025-08-22 17:00:32] [Rank 0] step:6801/10000 train_time:601026ms step_avg:88.37ms +[2025-08-22 17:00:32] [Rank 0] step:6801/10000 train_time:601026ms step_avg:88.37ms +[2025-08-22 17:00:34] [Rank 0] step:6821/10000 train_time:602708ms step_avg:88.36ms +[2025-08-22 17:00:34] [Rank 0] step:6821/10000 train_time:602708ms step_avg:88.36ms +[2025-08-22 17:00:35] [Rank 0] step:6841/10000 train_time:604561ms step_avg:88.37ms +[2025-08-22 17:00:35] [Rank 0] step:6841/10000 train_time:604561ms step_avg:88.37ms +[2025-08-22 17:00:37] [Rank 0] step:6861/10000 train_time:606423ms step_avg:88.39ms +[2025-08-22 17:00:37] [Rank 0] step:6861/10000 train_time:606423ms step_avg:88.39ms +[2025-08-22 17:00:39] [Rank 0] step:6881/10000 train_time:608283ms step_avg:88.40ms +[2025-08-22 17:00:39] [Rank 0] step:6881/10000 train_time:608283ms step_avg:88.40ms +[2025-08-22 17:00:41] [Rank 0] step:6901/10000 train_time:610143ms step_avg:88.41ms +[2025-08-22 17:00:41] [Rank 0] step:6901/10000 train_time:610143ms step_avg:88.41ms +[2025-08-22 17:00:43] [Rank 0] step:6921/10000 train_time:612002ms step_avg:88.43ms +[2025-08-22 17:00:43] [Rank 0] step:6921/10000 train_time:612002ms step_avg:88.43ms +[2025-08-22 17:00:45] [Rank 0] step:6941/10000 train_time:613872ms step_avg:88.44ms +[2025-08-22 17:00:45] [Rank 0] step:6941/10000 train_time:613872ms step_avg:88.44ms +[2025-08-22 17:00:47] [Rank 0] step:6961/10000 train_time:615750ms step_avg:88.46ms +[2025-08-22 17:00:47] [Rank 0] step:6961/10000 train_time:615750ms step_avg:88.46ms +[2025-08-22 17:00:48] [Rank 0] step:6981/10000 train_time:617621ms step_avg:88.47ms +[2025-08-22 17:00:48] [Rank 0] step:6981/10000 train_time:617621ms step_avg:88.47ms +[2025-08-22 17:00:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:00:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:01:04] [Rank 0] PRINT: step:7000/10000 val_loss:3.8192 svd_entropy: attn_qk:H=0.7111,top10E=0.32,eRank=123.7,q75/q25=73.62 attn_vo:H=0.6355,top10E=0.40,eRank=94.1,q75/q25=77.60 mlp_w1:H=0.8047,top10E=0.24,eRank=244.9,q75/q25=7.36 mlp_w2:H=0.9507,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5189,top10E=0.60,eRank=45.9,q75/q25=5823.48 train_time:619680ms step_avg:88.53ms +[2025-08-22 17:01:04] [Rank 0] PRINT: step:7000/10000 val_loss:3.8192 svd_entropy: attn_qk:H=0.7111,top10E=0.32,eRank=123.7,q75/q25=73.62 attn_vo:H=0.6355,top10E=0.40,eRank=94.1,q75/q25=77.60 mlp_w1:H=0.8047,top10E=0.24,eRank=244.9,q75/q25=7.36 mlp_w2:H=0.9507,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5189,top10E=0.60,eRank=45.9,q75/q25=5823.48 train_time:619680ms step_avg:88.53ms +[2025-08-22 17:01:04] [Rank 0] step:7001/10000 train_time:619692ms step_avg:88.51ms +[2025-08-22 17:01:04] [Rank 0] step:7001/10000 train_time:619692ms step_avg:88.51ms +[2025-08-22 17:01:06] [Rank 0] step:7021/10000 train_time:621398ms step_avg:88.51ms +[2025-08-22 17:01:06] [Rank 0] step:7021/10000 train_time:621398ms step_avg:88.51ms +[2025-08-22 17:01:08] [Rank 0] step:7041/10000 train_time:623260ms step_avg:88.52ms +[2025-08-22 17:01:08] [Rank 0] step:7041/10000 train_time:623260ms step_avg:88.52ms +[2025-08-22 17:01:10] [Rank 0] step:7061/10000 train_time:625122ms step_avg:88.53ms +[2025-08-22 17:01:10] [Rank 0] step:7061/10000 train_time:625122ms step_avg:88.53ms +[2025-08-22 17:01:12] [Rank 0] step:7081/10000 train_time:626991ms step_avg:88.55ms +[2025-08-22 17:01:12] [Rank 0] step:7081/10000 train_time:626991ms step_avg:88.55ms +[2025-08-22 17:01:14] [Rank 0] step:7101/10000 train_time:628861ms step_avg:88.56ms +[2025-08-22 17:01:14] [Rank 0] step:7101/10000 train_time:628861ms step_avg:88.56ms +[2025-08-22 17:01:15] [Rank 0] step:7121/10000 train_time:630730ms step_avg:88.57ms +[2025-08-22 17:01:15] [Rank 0] step:7121/10000 train_time:630730ms step_avg:88.57ms +[2025-08-22 17:01:17] [Rank 0] step:7141/10000 train_time:632600ms step_avg:88.59ms +[2025-08-22 17:01:17] [Rank 0] step:7141/10000 train_time:632600ms step_avg:88.59ms +[2025-08-22 17:01:19] [Rank 0] step:7161/10000 train_time:634472ms step_avg:88.60ms +[2025-08-22 17:01:19] [Rank 0] step:7161/10000 train_time:634472ms step_avg:88.60ms +[2025-08-22 17:01:21] [Rank 0] step:7181/10000 train_time:636342ms step_avg:88.61ms +[2025-08-22 17:01:21] [Rank 0] step:7181/10000 train_time:636342ms step_avg:88.61ms +[2025-08-22 17:01:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:01:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:01:37] [Rank 0] PRINT: step:7200/10000 val_loss:3.8093 svd_entropy: attn_qk:H=0.7120,top10E=0.32,eRank=124.5,q75/q25=73.88 attn_vo:H=0.6372,top10E=0.40,eRank=95.1,q75/q25=77.55 mlp_w1:H=0.8053,top10E=0.24,eRank=246.3,q75/q25=7.33 mlp_w2:H=0.9506,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5206,top10E=0.59,eRank=46.4,q75/q25=5765.71 train_time:638403ms step_avg:88.67ms +[2025-08-22 17:01:37] [Rank 0] PRINT: step:7200/10000 val_loss:3.8093 svd_entropy: attn_qk:H=0.7120,top10E=0.32,eRank=124.5,q75/q25=73.88 attn_vo:H=0.6372,top10E=0.40,eRank=95.1,q75/q25=77.55 mlp_w1:H=0.8053,top10E=0.24,eRank=246.3,q75/q25=7.33 mlp_w2:H=0.9506,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5206,top10E=0.59,eRank=46.4,q75/q25=5765.71 train_time:638403ms step_avg:88.67ms +[2025-08-22 17:01:37] [Rank 0] step:7201/10000 train_time:638415ms step_avg:88.66ms +[2025-08-22 17:01:37] [Rank 0] step:7201/10000 train_time:638415ms step_avg:88.66ms +[2025-08-22 17:01:39] [Rank 0] step:7221/10000 train_time:640107ms step_avg:88.65ms +[2025-08-22 17:01:39] [Rank 0] step:7221/10000 train_time:640107ms step_avg:88.65ms +[2025-08-22 17:01:41] [Rank 0] step:7241/10000 train_time:641964ms step_avg:88.66ms +[2025-08-22 17:01:41] [Rank 0] step:7241/10000 train_time:641964ms step_avg:88.66ms +[2025-08-22 17:01:42] [Rank 0] step:7261/10000 train_time:643825ms step_avg:88.67ms +[2025-08-22 17:01:42] [Rank 0] step:7261/10000 train_time:643825ms step_avg:88.67ms +[2025-08-22 17:01:44] [Rank 0] step:7281/10000 train_time:645694ms step_avg:88.68ms +[2025-08-22 17:01:44] [Rank 0] step:7281/10000 train_time:645694ms step_avg:88.68ms +[2025-08-22 17:01:46] [Rank 0] step:7301/10000 train_time:647560ms step_avg:88.69ms +[2025-08-22 17:01:46] [Rank 0] step:7301/10000 train_time:647560ms step_avg:88.69ms +[2025-08-22 17:01:48] [Rank 0] step:7321/10000 train_time:649434ms step_avg:88.71ms +[2025-08-22 17:01:48] [Rank 0] step:7321/10000 train_time:649434ms step_avg:88.71ms +[2025-08-22 17:01:50] [Rank 0] step:7341/10000 train_time:651299ms step_avg:88.72ms +[2025-08-22 17:01:50] [Rank 0] step:7341/10000 train_time:651299ms step_avg:88.72ms +[2025-08-22 17:01:52] [Rank 0] step:7361/10000 train_time:653172ms step_avg:88.73ms +[2025-08-22 17:01:52] [Rank 0] step:7361/10000 train_time:653172ms step_avg:88.73ms +[2025-08-22 17:01:54] [Rank 0] step:7381/10000 train_time:655046ms step_avg:88.75ms +[2025-08-22 17:01:54] [Rank 0] step:7381/10000 train_time:655046ms step_avg:88.75ms +[2025-08-22 17:01:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:01:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:02:09] [Rank 0] PRINT: step:7400/10000 val_loss:3.7883 svd_entropy: attn_qk:H=0.7128,top10E=0.32,eRank=125.2,q75/q25=74.00 attn_vo:H=0.6385,top10E=0.39,eRank=95.9,q75/q25=77.02 mlp_w1:H=0.8057,top10E=0.24,eRank=247.7,q75/q25=7.30 mlp_w2:H=0.9506,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5216,top10E=0.59,eRank=46.8,q75/q25=5698.30 train_time:657084ms step_avg:88.80ms +[2025-08-22 17:02:09] [Rank 0] PRINT: step:7400/10000 val_loss:3.7883 svd_entropy: attn_qk:H=0.7128,top10E=0.32,eRank=125.2,q75/q25=74.00 attn_vo:H=0.6385,top10E=0.39,eRank=95.9,q75/q25=77.02 mlp_w1:H=0.8057,top10E=0.24,eRank=247.7,q75/q25=7.30 mlp_w2:H=0.9506,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5216,top10E=0.59,eRank=46.8,q75/q25=5698.30 train_time:657084ms step_avg:88.80ms +[2025-08-22 17:02:09] [Rank 0] step:7401/10000 train_time:657097ms step_avg:88.78ms +[2025-08-22 17:02:09] [Rank 0] step:7401/10000 train_time:657097ms step_avg:88.78ms +[2025-08-22 17:02:11] [Rank 0] step:7421/10000 train_time:658792ms step_avg:88.77ms +[2025-08-22 17:02:11] [Rank 0] step:7421/10000 train_time:658792ms step_avg:88.77ms +[2025-08-22 17:02:13] [Rank 0] step:7441/10000 train_time:660651ms step_avg:88.79ms +[2025-08-22 17:02:13] [Rank 0] step:7441/10000 train_time:660651ms step_avg:88.79ms +[2025-08-22 17:02:15] [Rank 0] step:7461/10000 train_time:662514ms step_avg:88.80ms +[2025-08-22 17:02:15] [Rank 0] step:7461/10000 train_time:662514ms step_avg:88.80ms +[2025-08-22 17:02:17] [Rank 0] step:7481/10000 train_time:664382ms step_avg:88.81ms +[2025-08-22 17:02:17] [Rank 0] step:7481/10000 train_time:664382ms step_avg:88.81ms +[2025-08-22 17:02:19] [Rank 0] step:7501/10000 train_time:666252ms step_avg:88.82ms +[2025-08-22 17:02:19] [Rank 0] step:7501/10000 train_time:666252ms step_avg:88.82ms +[2025-08-22 17:02:21] [Rank 0] step:7521/10000 train_time:668119ms step_avg:88.83ms +[2025-08-22 17:02:21] [Rank 0] step:7521/10000 train_time:668119ms step_avg:88.83ms +[2025-08-22 17:02:23] [Rank 0] step:7541/10000 train_time:669999ms step_avg:88.85ms +[2025-08-22 17:02:23] [Rank 0] step:7541/10000 train_time:669999ms step_avg:88.85ms +[2025-08-22 17:02:24] [Rank 0] step:7561/10000 train_time:671853ms step_avg:88.86ms +[2025-08-22 17:02:24] [Rank 0] step:7561/10000 train_time:671853ms step_avg:88.86ms +[2025-08-22 17:02:26] [Rank 0] step:7581/10000 train_time:673729ms step_avg:88.87ms +[2025-08-22 17:02:26] [Rank 0] step:7581/10000 train_time:673729ms step_avg:88.87ms +[2025-08-22 17:02:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:02:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:02:42] [Rank 0] PRINT: step:7600/10000 val_loss:3.7812 svd_entropy: attn_qk:H=0.7135,top10E=0.32,eRank=125.8,q75/q25=73.66 attn_vo:H=0.6399,top10E=0.39,eRank=96.7,q75/q25=76.83 mlp_w1:H=0.8059,top10E=0.24,eRank=248.8,q75/q25=7.29 mlp_w2:H=0.9506,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5233,top10E=0.59,eRank=47.2,q75/q25=5560.28 train_time:675794ms step_avg:88.92ms +[2025-08-22 17:02:42] [Rank 0] PRINT: step:7600/10000 val_loss:3.7812 svd_entropy: attn_qk:H=0.7135,top10E=0.32,eRank=125.8,q75/q25=73.66 attn_vo:H=0.6399,top10E=0.39,eRank=96.7,q75/q25=76.83 mlp_w1:H=0.8059,top10E=0.24,eRank=248.8,q75/q25=7.29 mlp_w2:H=0.9506,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5233,top10E=0.59,eRank=47.2,q75/q25=5560.28 train_time:675794ms step_avg:88.92ms +[2025-08-22 17:02:42] [Rank 0] step:7601/10000 train_time:675807ms step_avg:88.91ms +[2025-08-22 17:02:42] [Rank 0] step:7601/10000 train_time:675807ms step_avg:88.91ms +[2025-08-22 17:02:44] [Rank 0] step:7621/10000 train_time:677486ms step_avg:88.90ms +[2025-08-22 17:02:44] [Rank 0] step:7621/10000 train_time:677486ms step_avg:88.90ms +[2025-08-22 17:02:46] [Rank 0] step:7641/10000 train_time:679347ms step_avg:88.91ms +[2025-08-22 17:02:46] [Rank 0] step:7641/10000 train_time:679347ms step_avg:88.91ms +[2025-08-22 17:02:48] [Rank 0] step:7661/10000 train_time:681214ms step_avg:88.92ms +[2025-08-22 17:02:48] [Rank 0] step:7661/10000 train_time:681214ms step_avg:88.92ms +[2025-08-22 17:02:50] [Rank 0] step:7681/10000 train_time:683074ms step_avg:88.93ms +[2025-08-22 17:02:50] [Rank 0] step:7681/10000 train_time:683074ms step_avg:88.93ms +[2025-08-22 17:02:51] [Rank 0] step:7701/10000 train_time:684939ms step_avg:88.94ms +[2025-08-22 17:02:51] [Rank 0] step:7701/10000 train_time:684939ms step_avg:88.94ms +[2025-08-22 17:02:53] [Rank 0] step:7721/10000 train_time:686820ms step_avg:88.95ms +[2025-08-22 17:02:53] [Rank 0] step:7721/10000 train_time:686820ms step_avg:88.95ms +[2025-08-22 17:02:55] [Rank 0] step:7741/10000 train_time:688688ms step_avg:88.97ms +[2025-08-22 17:02:55] [Rank 0] step:7741/10000 train_time:688688ms step_avg:88.97ms +[2025-08-22 17:02:57] [Rank 0] step:7761/10000 train_time:690560ms step_avg:88.98ms +[2025-08-22 17:02:57] [Rank 0] step:7761/10000 train_time:690560ms step_avg:88.98ms +[2025-08-22 17:02:59] [Rank 0] step:7781/10000 train_time:692435ms step_avg:88.99ms +[2025-08-22 17:02:59] [Rank 0] step:7781/10000 train_time:692435ms step_avg:88.99ms +[2025-08-22 17:03:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:03:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:03:15] [Rank 0] PRINT: step:7800/10000 val_loss:3.7675 svd_entropy: attn_qk:H=0.7142,top10E=0.32,eRank=126.4,q75/q25=73.65 attn_vo:H=0.6411,top10E=0.39,eRank=97.4,q75/q25=76.15 mlp_w1:H=0.8059,top10E=0.24,eRank=249.8,q75/q25=7.26 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5243,top10E=0.59,eRank=47.6,q75/q25=5611.55 train_time:694502ms step_avg:89.04ms +[2025-08-22 17:03:15] [Rank 0] PRINT: step:7800/10000 val_loss:3.7675 svd_entropy: attn_qk:H=0.7142,top10E=0.32,eRank=126.4,q75/q25=73.65 attn_vo:H=0.6411,top10E=0.39,eRank=97.4,q75/q25=76.15 mlp_w1:H=0.8059,top10E=0.24,eRank=249.8,q75/q25=7.26 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5243,top10E=0.59,eRank=47.6,q75/q25=5611.55 train_time:694502ms step_avg:89.04ms +[2025-08-22 17:03:15] [Rank 0] step:7801/10000 train_time:694515ms step_avg:89.03ms +[2025-08-22 17:03:15] [Rank 0] step:7801/10000 train_time:694515ms step_avg:89.03ms +[2025-08-22 17:03:17] [Rank 0] step:7821/10000 train_time:696199ms step_avg:89.02ms +[2025-08-22 17:03:17] [Rank 0] step:7821/10000 train_time:696199ms step_avg:89.02ms +[2025-08-22 17:03:18] [Rank 0] step:7841/10000 train_time:698065ms step_avg:89.03ms +[2025-08-22 17:03:18] [Rank 0] step:7841/10000 train_time:698065ms step_avg:89.03ms +[2025-08-22 17:03:20] [Rank 0] step:7861/10000 train_time:699938ms step_avg:89.04ms +[2025-08-22 17:03:20] [Rank 0] step:7861/10000 train_time:699938ms step_avg:89.04ms +[2025-08-22 17:03:22] [Rank 0] step:7881/10000 train_time:701813ms step_avg:89.05ms +[2025-08-22 17:03:22] [Rank 0] step:7881/10000 train_time:701813ms step_avg:89.05ms +[2025-08-22 17:03:24] [Rank 0] step:7901/10000 train_time:703681ms step_avg:89.06ms +[2025-08-22 17:03:24] [Rank 0] step:7901/10000 train_time:703681ms step_avg:89.06ms +[2025-08-22 17:03:26] [Rank 0] step:7921/10000 train_time:705555ms step_avg:89.07ms +[2025-08-22 17:03:26] [Rank 0] step:7921/10000 train_time:705555ms step_avg:89.07ms +[2025-08-22 17:03:28] [Rank 0] step:7941/10000 train_time:707435ms step_avg:89.09ms +[2025-08-22 17:03:28] [Rank 0] step:7941/10000 train_time:707435ms step_avg:89.09ms +[2025-08-22 17:03:30] [Rank 0] step:7961/10000 train_time:709311ms step_avg:89.10ms +[2025-08-22 17:03:30] [Rank 0] step:7961/10000 train_time:709311ms step_avg:89.10ms +[2025-08-22 17:03:32] [Rank 0] step:7981/10000 train_time:711182ms step_avg:89.11ms +[2025-08-22 17:03:32] [Rank 0] step:7981/10000 train_time:711182ms step_avg:89.11ms +[2025-08-22 17:03:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:03:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:03:47] [Rank 0] PRINT: step:8000/10000 val_loss:3.7491 svd_entropy: attn_qk:H=0.7149,top10E=0.32,eRank=127.0,q75/q25=73.58 attn_vo:H=0.6424,top10E=0.39,eRank=98.1,q75/q25=75.97 mlp_w1:H=0.8060,top10E=0.24,eRank=250.8,q75/q25=7.24 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5262,top10E=0.58,eRank=48.1,q75/q25=5530.83 train_time:713243ms step_avg:89.16ms +[2025-08-22 17:03:47] [Rank 0] PRINT: step:8000/10000 val_loss:3.7491 svd_entropy: attn_qk:H=0.7149,top10E=0.32,eRank=127.0,q75/q25=73.58 attn_vo:H=0.6424,top10E=0.39,eRank=98.1,q75/q25=75.97 mlp_w1:H=0.8060,top10E=0.24,eRank=250.8,q75/q25=7.24 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5262,top10E=0.58,eRank=48.1,q75/q25=5530.83 train_time:713243ms step_avg:89.16ms +[2025-08-22 17:03:47] [Rank 0] step:8001/10000 train_time:713256ms step_avg:89.15ms +[2025-08-22 17:03:47] [Rank 0] step:8001/10000 train_time:713256ms step_avg:89.15ms +[2025-08-22 17:03:49] [Rank 0] step:8021/10000 train_time:714944ms step_avg:89.13ms +[2025-08-22 17:03:49] [Rank 0] step:8021/10000 train_time:714944ms step_avg:89.13ms +[2025-08-22 17:03:51] [Rank 0] step:8041/10000 train_time:716820ms step_avg:89.15ms +[2025-08-22 17:03:51] [Rank 0] step:8041/10000 train_time:716820ms step_avg:89.15ms +[2025-08-22 17:03:53] [Rank 0] step:8061/10000 train_time:718685ms step_avg:89.16ms +[2025-08-22 17:03:53] [Rank 0] step:8061/10000 train_time:718685ms step_avg:89.16ms +[2025-08-22 17:03:55] [Rank 0] step:8081/10000 train_time:720544ms step_avg:89.17ms +[2025-08-22 17:03:55] [Rank 0] step:8081/10000 train_time:720544ms step_avg:89.17ms +[2025-08-22 17:03:57] [Rank 0] step:8101/10000 train_time:722417ms step_avg:89.18ms +[2025-08-22 17:03:57] [Rank 0] step:8101/10000 train_time:722417ms step_avg:89.18ms +[2025-08-22 17:03:59] [Rank 0] step:8121/10000 train_time:724282ms step_avg:89.19ms +[2025-08-22 17:03:59] [Rank 0] step:8121/10000 train_time:724282ms step_avg:89.19ms +[2025-08-22 17:04:01] [Rank 0] step:8141/10000 train_time:726630ms step_avg:89.26ms +[2025-08-22 17:04:01] [Rank 0] step:8141/10000 train_time:726630ms step_avg:89.26ms +[2025-08-22 17:04:03] [Rank 0] step:8161/10000 train_time:728508ms step_avg:89.27ms +[2025-08-22 17:04:03] [Rank 0] step:8161/10000 train_time:728508ms step_avg:89.27ms +[2025-08-22 17:04:05] [Rank 0] step:8181/10000 train_time:730407ms step_avg:89.28ms +[2025-08-22 17:04:05] [Rank 0] step:8181/10000 train_time:730407ms step_avg:89.28ms +[2025-08-22 17:04:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:04:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:04:20] [Rank 0] PRINT: step:8200/10000 val_loss:3.7367 svd_entropy: attn_qk:H=0.7154,top10E=0.32,eRank=127.5,q75/q25=73.63 attn_vo:H=0.6435,top10E=0.39,eRank=98.7,q75/q25=75.71 mlp_w1:H=0.8066,top10E=0.24,eRank=251.7,q75/q25=7.22 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5271,top10E=0.58,eRank=48.4,q75/q25=5518.51 train_time:732518ms step_avg:89.33ms +[2025-08-22 17:04:20] [Rank 0] PRINT: step:8200/10000 val_loss:3.7367 svd_entropy: attn_qk:H=0.7154,top10E=0.32,eRank=127.5,q75/q25=73.63 attn_vo:H=0.6435,top10E=0.39,eRank=98.7,q75/q25=75.71 mlp_w1:H=0.8066,top10E=0.24,eRank=251.7,q75/q25=7.22 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5271,top10E=0.58,eRank=48.4,q75/q25=5518.51 train_time:732518ms step_avg:89.33ms +[2025-08-22 17:04:21] [Rank 0] step:8201/10000 train_time:732531ms step_avg:89.32ms +[2025-08-22 17:04:21] [Rank 0] step:8201/10000 train_time:732531ms step_avg:89.32ms +[2025-08-22 17:04:23] [Rank 0] step:8221/10000 train_time:734263ms step_avg:89.32ms +[2025-08-22 17:04:23] [Rank 0] step:8221/10000 train_time:734263ms step_avg:89.32ms +[2025-08-22 17:04:24] [Rank 0] step:8241/10000 train_time:736165ms step_avg:89.33ms +[2025-08-22 17:04:24] [Rank 0] step:8241/10000 train_time:736165ms step_avg:89.33ms +[2025-08-22 17:04:26] [Rank 0] step:8261/10000 train_time:738059ms step_avg:89.34ms +[2025-08-22 17:04:26] [Rank 0] step:8261/10000 train_time:738059ms step_avg:89.34ms +[2025-08-22 17:04:28] [Rank 0] step:8281/10000 train_time:739957ms step_avg:89.36ms +[2025-08-22 17:04:28] [Rank 0] step:8281/10000 train_time:739957ms step_avg:89.36ms +[2025-08-22 17:04:30] [Rank 0] step:8301/10000 train_time:741853ms step_avg:89.37ms +[2025-08-22 17:04:30] [Rank 0] step:8301/10000 train_time:741853ms step_avg:89.37ms +[2025-08-22 17:04:32] [Rank 0] step:8321/10000 train_time:743741ms step_avg:89.38ms +[2025-08-22 17:04:32] [Rank 0] step:8321/10000 train_time:743741ms step_avg:89.38ms +[2025-08-22 17:04:34] [Rank 0] step:8341/10000 train_time:745641ms step_avg:89.39ms +[2025-08-22 17:04:34] [Rank 0] step:8341/10000 train_time:745641ms step_avg:89.39ms +[2025-08-22 17:04:36] [Rank 0] step:8361/10000 train_time:747538ms step_avg:89.41ms +[2025-08-22 17:04:36] [Rank 0] step:8361/10000 train_time:747538ms step_avg:89.41ms +[2025-08-22 17:04:38] [Rank 0] step:8381/10000 train_time:749434ms step_avg:89.42ms +[2025-08-22 17:04:38] [Rank 0] step:8381/10000 train_time:749434ms step_avg:89.42ms +[2025-08-22 17:04:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:04:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:04:53] [Rank 0] PRINT: step:8400/10000 val_loss:3.7243 svd_entropy: attn_qk:H=0.7158,top10E=0.32,eRank=127.8,q75/q25=73.35 attn_vo:H=0.6445,top10E=0.38,eRank=99.3,q75/q25=75.95 mlp_w1:H=0.8072,top10E=0.24,eRank=252.7,q75/q25=7.20 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5283,top10E=0.58,eRank=48.7,q75/q25=5471.23 train_time:751516ms step_avg:89.47ms +[2025-08-22 17:04:53] [Rank 0] PRINT: step:8400/10000 val_loss:3.7243 svd_entropy: attn_qk:H=0.7158,top10E=0.32,eRank=127.8,q75/q25=73.35 attn_vo:H=0.6445,top10E=0.38,eRank=99.3,q75/q25=75.95 mlp_w1:H=0.8072,top10E=0.24,eRank=252.7,q75/q25=7.20 mlp_w2:H=0.9505,top10E=0.06,eRank=560.8,q75/q25=4.10 vo_prod:H=0.5283,top10E=0.58,eRank=48.7,q75/q25=5471.23 train_time:751516ms step_avg:89.47ms +[2025-08-22 17:04:54] [Rank 0] step:8401/10000 train_time:751529ms step_avg:89.46ms +[2025-08-22 17:04:54] [Rank 0] step:8401/10000 train_time:751529ms step_avg:89.46ms +[2025-08-22 17:04:55] [Rank 0] step:8421/10000 train_time:753247ms step_avg:89.45ms +[2025-08-22 17:04:55] [Rank 0] step:8421/10000 train_time:753247ms step_avg:89.45ms +[2025-08-22 17:04:57] [Rank 0] step:8441/10000 train_time:755137ms step_avg:89.46ms +[2025-08-22 17:04:57] [Rank 0] step:8441/10000 train_time:755137ms step_avg:89.46ms +[2025-08-22 17:04:59] [Rank 0] step:8461/10000 train_time:757025ms step_avg:89.47ms +[2025-08-22 17:04:59] [Rank 0] step:8461/10000 train_time:757025ms step_avg:89.47ms +[2025-08-22 17:05:01] [Rank 0] step:8481/10000 train_time:758923ms step_avg:89.49ms +[2025-08-22 17:05:01] [Rank 0] step:8481/10000 train_time:758923ms step_avg:89.49ms +[2025-08-22 17:05:03] [Rank 0] step:8501/10000 train_time:760848ms step_avg:89.50ms +[2025-08-22 17:05:03] [Rank 0] step:8501/10000 train_time:760848ms step_avg:89.50ms +[2025-08-22 17:05:05] [Rank 0] step:8521/10000 train_time:762748ms step_avg:89.51ms +[2025-08-22 17:05:05] [Rank 0] step:8521/10000 train_time:762748ms step_avg:89.51ms +[2025-08-22 17:05:07] [Rank 0] step:8541/10000 train_time:764658ms step_avg:89.53ms +[2025-08-22 17:05:07] [Rank 0] step:8541/10000 train_time:764658ms step_avg:89.53ms +[2025-08-22 17:05:09] [Rank 0] step:8561/10000 train_time:766559ms step_avg:89.54ms +[2025-08-22 17:05:09] [Rank 0] step:8561/10000 train_time:766559ms step_avg:89.54ms +[2025-08-22 17:05:11] [Rank 0] step:8581/10000 train_time:768462ms step_avg:89.55ms +[2025-08-22 17:05:11] [Rank 0] step:8581/10000 train_time:768462ms step_avg:89.55ms +[2025-08-22 17:05:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:05:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:05:26] [Rank 0] PRINT: step:8600/10000 val_loss:3.7134 svd_entropy: attn_qk:H=0.7163,top10E=0.32,eRank=128.2,q75/q25=73.21 attn_vo:H=0.6454,top10E=0.38,eRank=99.8,q75/q25=75.43 mlp_w1:H=0.8074,top10E=0.24,eRank=253.4,q75/q25=7.17 mlp_w2:H=0.9505,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5294,top10E=0.58,eRank=49.0,q75/q25=5478.10 train_time:770544ms step_avg:89.60ms +[2025-08-22 17:05:26] [Rank 0] PRINT: step:8600/10000 val_loss:3.7134 svd_entropy: attn_qk:H=0.7163,top10E=0.32,eRank=128.2,q75/q25=73.21 attn_vo:H=0.6454,top10E=0.38,eRank=99.8,q75/q25=75.43 mlp_w1:H=0.8074,top10E=0.24,eRank=253.4,q75/q25=7.17 mlp_w2:H=0.9505,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5294,top10E=0.58,eRank=49.0,q75/q25=5478.10 train_time:770544ms step_avg:89.60ms +[2025-08-22 17:05:27] [Rank 0] step:8601/10000 train_time:770556ms step_avg:89.59ms +[2025-08-22 17:05:27] [Rank 0] step:8601/10000 train_time:770556ms step_avg:89.59ms +[2025-08-22 17:05:28] [Rank 0] step:8621/10000 train_time:772274ms step_avg:89.58ms +[2025-08-22 17:05:28] [Rank 0] step:8621/10000 train_time:772274ms step_avg:89.58ms +[2025-08-22 17:05:30] [Rank 0] step:8641/10000 train_time:774170ms step_avg:89.59ms +[2025-08-22 17:05:30] [Rank 0] step:8641/10000 train_time:774170ms step_avg:89.59ms +[2025-08-22 17:05:32] [Rank 0] step:8661/10000 train_time:776068ms step_avg:89.60ms +[2025-08-22 17:05:32] [Rank 0] step:8661/10000 train_time:776068ms step_avg:89.60ms +[2025-08-22 17:05:34] [Rank 0] step:8681/10000 train_time:777966ms step_avg:89.62ms +[2025-08-22 17:05:34] [Rank 0] step:8681/10000 train_time:777966ms step_avg:89.62ms +[2025-08-22 17:05:36] [Rank 0] step:8701/10000 train_time:779863ms step_avg:89.63ms +[2025-08-22 17:05:36] [Rank 0] step:8701/10000 train_time:779863ms step_avg:89.63ms +[2025-08-22 17:05:38] [Rank 0] step:8721/10000 train_time:781767ms step_avg:89.64ms +[2025-08-22 17:05:38] [Rank 0] step:8721/10000 train_time:781767ms step_avg:89.64ms +[2025-08-22 17:05:40] [Rank 0] step:8741/10000 train_time:783662ms step_avg:89.65ms +[2025-08-22 17:05:40] [Rank 0] step:8741/10000 train_time:783662ms step_avg:89.65ms +[2025-08-22 17:05:42] [Rank 0] step:8761/10000 train_time:785561ms step_avg:89.67ms +[2025-08-22 17:05:42] [Rank 0] step:8761/10000 train_time:785561ms step_avg:89.67ms +[2025-08-22 17:05:44] [Rank 0] step:8781/10000 train_time:787465ms step_avg:89.68ms +[2025-08-22 17:05:44] [Rank 0] step:8781/10000 train_time:787465ms step_avg:89.68ms +[2025-08-22 17:05:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:05:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:05:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.7014 svd_entropy: attn_qk:H=0.7167,top10E=0.32,eRank=128.6,q75/q25=73.45 attn_vo:H=0.6461,top10E=0.38,eRank=100.2,q75/q25=75.46 mlp_w1:H=0.8075,top10E=0.24,eRank=254.0,q75/q25=7.17 mlp_w2:H=0.9505,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5301,top10E=0.58,eRank=49.2,q75/q25=5522.09 train_time:789557ms step_avg:89.72ms +[2025-08-22 17:05:59] [Rank 0] PRINT: step:8800/10000 val_loss:3.7014 svd_entropy: attn_qk:H=0.7167,top10E=0.32,eRank=128.6,q75/q25=73.45 attn_vo:H=0.6461,top10E=0.38,eRank=100.2,q75/q25=75.46 mlp_w1:H=0.8075,top10E=0.24,eRank=254.0,q75/q25=7.17 mlp_w2:H=0.9505,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5301,top10E=0.58,eRank=49.2,q75/q25=5522.09 train_time:789557ms step_avg:89.72ms +[2025-08-22 17:05:59] [Rank 0] step:8801/10000 train_time:789570ms step_avg:89.71ms +[2025-08-22 17:05:59] [Rank 0] step:8801/10000 train_time:789570ms step_avg:89.71ms +[2025-08-22 17:06:01] [Rank 0] step:8821/10000 train_time:791270ms step_avg:89.70ms +[2025-08-22 17:06:01] [Rank 0] step:8821/10000 train_time:791270ms step_avg:89.70ms +[2025-08-22 17:06:03] [Rank 0] step:8841/10000 train_time:793181ms step_avg:89.72ms +[2025-08-22 17:06:03] [Rank 0] step:8841/10000 train_time:793181ms step_avg:89.72ms +[2025-08-22 17:06:05] [Rank 0] step:8861/10000 train_time:795073ms step_avg:89.73ms +[2025-08-22 17:06:05] [Rank 0] step:8861/10000 train_time:795073ms step_avg:89.73ms +[2025-08-22 17:06:07] [Rank 0] step:8881/10000 train_time:796968ms step_avg:89.74ms +[2025-08-22 17:06:07] [Rank 0] step:8881/10000 train_time:796968ms step_avg:89.74ms +[2025-08-22 17:06:09] [Rank 0] step:8901/10000 train_time:798910ms step_avg:89.76ms +[2025-08-22 17:06:09] [Rank 0] step:8901/10000 train_time:798910ms step_avg:89.76ms +[2025-08-22 17:06:11] [Rank 0] step:8921/10000 train_time:800813ms step_avg:89.77ms +[2025-08-22 17:06:11] [Rank 0] step:8921/10000 train_time:800813ms step_avg:89.77ms +[2025-08-22 17:06:13] [Rank 0] step:8941/10000 train_time:802721ms step_avg:89.78ms +[2025-08-22 17:06:13] [Rank 0] step:8941/10000 train_time:802721ms step_avg:89.78ms +[2025-08-22 17:06:15] [Rank 0] step:8961/10000 train_time:804617ms step_avg:89.79ms +[2025-08-22 17:06:15] [Rank 0] step:8961/10000 train_time:804617ms step_avg:89.79ms +[2025-08-22 17:06:17] [Rank 0] step:8981/10000 train_time:806514ms step_avg:89.80ms +[2025-08-22 17:06:17] [Rank 0] step:8981/10000 train_time:806514ms step_avg:89.80ms +[2025-08-22 17:06:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:06:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:06:32] [Rank 0] PRINT: step:9000/10000 val_loss:3.6907 svd_entropy: attn_qk:H=0.7171,top10E=0.32,eRank=128.9,q75/q25=73.47 attn_vo:H=0.6468,top10E=0.38,eRank=100.6,q75/q25=75.55 mlp_w1:H=0.8077,top10E=0.24,eRank=254.5,q75/q25=7.15 mlp_w2:H=0.9505,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5310,top10E=0.57,eRank=49.4,q75/q25=5560.71 train_time:808601ms step_avg:89.84ms +[2025-08-22 17:06:32] [Rank 0] PRINT: step:9000/10000 val_loss:3.6907 svd_entropy: attn_qk:H=0.7171,top10E=0.32,eRank=128.9,q75/q25=73.47 attn_vo:H=0.6468,top10E=0.38,eRank=100.6,q75/q25=75.55 mlp_w1:H=0.8077,top10E=0.24,eRank=254.5,q75/q25=7.15 mlp_w2:H=0.9505,top10E=0.06,eRank=560.9,q75/q25=4.10 vo_prod:H=0.5310,top10E=0.57,eRank=49.4,q75/q25=5560.71 train_time:808601ms step_avg:89.84ms +[2025-08-22 17:06:32] [Rank 0] step:9001/10000 train_time:808614ms step_avg:89.84ms +[2025-08-22 17:06:32] [Rank 0] step:9001/10000 train_time:808614ms step_avg:89.84ms +[2025-08-22 17:06:34] [Rank 0] step:9021/10000 train_time:810334ms step_avg:89.83ms +[2025-08-22 17:06:34] [Rank 0] step:9021/10000 train_time:810334ms step_avg:89.83ms +[2025-08-22 17:06:36] [Rank 0] step:9041/10000 train_time:812239ms step_avg:89.84ms +[2025-08-22 17:06:36] [Rank 0] step:9041/10000 train_time:812239ms step_avg:89.84ms +[2025-08-22 17:06:38] [Rank 0] step:9061/10000 train_time:814147ms step_avg:89.85ms +[2025-08-22 17:06:38] [Rank 0] step:9061/10000 train_time:814147ms step_avg:89.85ms +[2025-08-22 17:06:40] [Rank 0] step:9081/10000 train_time:816052ms step_avg:89.86ms +[2025-08-22 17:06:40] [Rank 0] step:9081/10000 train_time:816052ms step_avg:89.86ms +[2025-08-22 17:06:42] [Rank 0] step:9101/10000 train_time:817967ms step_avg:89.88ms +[2025-08-22 17:06:42] [Rank 0] step:9101/10000 train_time:817967ms step_avg:89.88ms +[2025-08-22 17:06:44] [Rank 0] step:9121/10000 train_time:819868ms step_avg:89.89ms +[2025-08-22 17:06:44] [Rank 0] step:9121/10000 train_time:819868ms step_avg:89.89ms +[2025-08-22 17:06:46] [Rank 0] step:9141/10000 train_time:821754ms step_avg:89.90ms +[2025-08-22 17:06:46] [Rank 0] step:9141/10000 train_time:821754ms step_avg:89.90ms +[2025-08-22 17:06:48] [Rank 0] step:9161/10000 train_time:823641ms step_avg:89.91ms +[2025-08-22 17:06:48] [Rank 0] step:9161/10000 train_time:823641ms step_avg:89.91ms +[2025-08-22 17:06:50] [Rank 0] step:9181/10000 train_time:825572ms step_avg:89.92ms +[2025-08-22 17:06:50] [Rank 0] step:9181/10000 train_time:825572ms step_avg:89.92ms +[2025-08-22 17:06:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:06:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:07:05] [Rank 0] PRINT: step:9200/10000 val_loss:3.6820 svd_entropy: attn_qk:H=0.7174,top10E=0.32,eRank=129.1,q75/q25=73.40 attn_vo:H=0.6474,top10E=0.38,eRank=101.0,q75/q25=75.41 mlp_w1:H=0.8081,top10E=0.24,eRank=255.1,q75/q25=7.14 mlp_w2:H=0.9505,top10E=0.06,eRank=561.0,q75/q25=4.10 vo_prod:H=0.5316,top10E=0.57,eRank=49.6,q75/q25=5545.92 train_time:827659ms step_avg:89.96ms +[2025-08-22 17:07:05] [Rank 0] PRINT: step:9200/10000 val_loss:3.6820 svd_entropy: attn_qk:H=0.7174,top10E=0.32,eRank=129.1,q75/q25=73.40 attn_vo:H=0.6474,top10E=0.38,eRank=101.0,q75/q25=75.41 mlp_w1:H=0.8081,top10E=0.24,eRank=255.1,q75/q25=7.14 mlp_w2:H=0.9505,top10E=0.06,eRank=561.0,q75/q25=4.10 vo_prod:H=0.5316,top10E=0.57,eRank=49.6,q75/q25=5545.92 train_time:827659ms step_avg:89.96ms +[2025-08-22 17:07:05] [Rank 0] step:9201/10000 train_time:827672ms step_avg:89.95ms +[2025-08-22 17:07:05] [Rank 0] step:9201/10000 train_time:827672ms step_avg:89.95ms +[2025-08-22 17:07:07] [Rank 0] step:9221/10000 train_time:829395ms step_avg:89.95ms +[2025-08-22 17:07:07] [Rank 0] step:9221/10000 train_time:829395ms step_avg:89.95ms +[2025-08-22 17:07:09] [Rank 0] step:9241/10000 train_time:831301ms step_avg:89.96ms +[2025-08-22 17:07:09] [Rank 0] step:9241/10000 train_time:831301ms step_avg:89.96ms +[2025-08-22 17:07:11] [Rank 0] step:9261/10000 train_time:833208ms step_avg:89.97ms +[2025-08-22 17:07:11] [Rank 0] step:9261/10000 train_time:833208ms step_avg:89.97ms +[2025-08-22 17:07:13] [Rank 0] step:9281/10000 train_time:835094ms step_avg:89.98ms +[2025-08-22 17:07:13] [Rank 0] step:9281/10000 train_time:835094ms step_avg:89.98ms +[2025-08-22 17:07:15] [Rank 0] step:9301/10000 train_time:836988ms step_avg:89.99ms +[2025-08-22 17:07:15] [Rank 0] step:9301/10000 train_time:836988ms step_avg:89.99ms +[2025-08-22 17:07:17] [Rank 0] step:9321/10000 train_time:838891ms step_avg:90.00ms +[2025-08-22 17:07:17] [Rank 0] step:9321/10000 train_time:838891ms step_avg:90.00ms +[2025-08-22 17:07:19] [Rank 0] step:9341/10000 train_time:840788ms step_avg:90.01ms +[2025-08-22 17:07:19] [Rank 0] step:9341/10000 train_time:840788ms step_avg:90.01ms +[2025-08-22 17:07:21] [Rank 0] step:9361/10000 train_time:842689ms step_avg:90.02ms +[2025-08-22 17:07:21] [Rank 0] step:9361/10000 train_time:842689ms step_avg:90.02ms +[2025-08-22 17:07:23] [Rank 0] step:9381/10000 train_time:844607ms step_avg:90.03ms +[2025-08-22 17:07:23] [Rank 0] step:9381/10000 train_time:844607ms step_avg:90.03ms +[2025-08-22 17:07:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:07:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:07:38] [Rank 0] PRINT: step:9400/10000 val_loss:3.6724 svd_entropy: attn_qk:H=0.7176,top10E=0.31,eRank=129.3,q75/q25=73.21 attn_vo:H=0.6479,top10E=0.38,eRank=101.3,q75/q25=75.33 mlp_w1:H=0.8084,top10E=0.24,eRank=255.5,q75/q25=7.14 mlp_w2:H=0.9506,top10E=0.06,eRank=561.1,q75/q25=4.11 vo_prod:H=0.5320,top10E=0.57,eRank=49.7,q75/q25=5579.79 train_time:846705ms step_avg:90.08ms +[2025-08-22 17:07:38] [Rank 0] PRINT: step:9400/10000 val_loss:3.6724 svd_entropy: attn_qk:H=0.7176,top10E=0.31,eRank=129.3,q75/q25=73.21 attn_vo:H=0.6479,top10E=0.38,eRank=101.3,q75/q25=75.33 mlp_w1:H=0.8084,top10E=0.24,eRank=255.5,q75/q25=7.14 mlp_w2:H=0.9506,top10E=0.06,eRank=561.1,q75/q25=4.11 vo_prod:H=0.5320,top10E=0.57,eRank=49.7,q75/q25=5579.79 train_time:846705ms step_avg:90.08ms +[2025-08-22 17:07:38] [Rank 0] step:9401/10000 train_time:846717ms step_avg:90.07ms +[2025-08-22 17:07:38] [Rank 0] step:9401/10000 train_time:846717ms step_avg:90.07ms +[2025-08-22 17:07:40] [Rank 0] step:9421/10000 train_time:848457ms step_avg:90.06ms +[2025-08-22 17:07:40] [Rank 0] step:9421/10000 train_time:848457ms step_avg:90.06ms +[2025-08-22 17:07:42] [Rank 0] step:9441/10000 train_time:850355ms step_avg:90.07ms +[2025-08-22 17:07:42] [Rank 0] step:9441/10000 train_time:850355ms step_avg:90.07ms +[2025-08-22 17:07:44] [Rank 0] step:9461/10000 train_time:852263ms step_avg:90.08ms +[2025-08-22 17:07:44] [Rank 0] step:9461/10000 train_time:852263ms step_avg:90.08ms +[2025-08-22 17:07:46] [Rank 0] step:9481/10000 train_time:854168ms step_avg:90.09ms +[2025-08-22 17:07:46] [Rank 0] step:9481/10000 train_time:854168ms step_avg:90.09ms +[2025-08-22 17:07:48] [Rank 0] step:9501/10000 train_time:856084ms step_avg:90.10ms +[2025-08-22 17:07:48] [Rank 0] step:9501/10000 train_time:856084ms step_avg:90.10ms +[2025-08-22 17:07:50] [Rank 0] step:9521/10000 train_time:857982ms step_avg:90.11ms +[2025-08-22 17:07:50] [Rank 0] step:9521/10000 train_time:857982ms step_avg:90.11ms +[2025-08-22 17:07:52] [Rank 0] step:9541/10000 train_time:859884ms step_avg:90.13ms +[2025-08-22 17:07:52] [Rank 0] step:9541/10000 train_time:859884ms step_avg:90.13ms +[2025-08-22 17:07:54] [Rank 0] step:9561/10000 train_time:861784ms step_avg:90.14ms +[2025-08-22 17:07:54] [Rank 0] step:9561/10000 train_time:861784ms step_avg:90.14ms +[2025-08-22 17:07:56] [Rank 0] step:9581/10000 train_time:863689ms step_avg:90.15ms +[2025-08-22 17:07:56] [Rank 0] step:9581/10000 train_time:863689ms step_avg:90.15ms +[2025-08-22 17:07:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:07:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:08:11] [Rank 0] PRINT: step:9600/10000 val_loss:3.6642 svd_entropy: attn_qk:H=0.7178,top10E=0.31,eRank=129.5,q75/q25=73.25 attn_vo:H=0.6484,top10E=0.38,eRank=101.5,q75/q25=75.39 mlp_w1:H=0.8086,top10E=0.24,eRank=255.9,q75/q25=7.14 mlp_w2:H=0.9506,top10E=0.06,eRank=561.1,q75/q25=4.11 vo_prod:H=0.5325,top10E=0.57,eRank=49.9,q75/q25=5529.20 train_time:865796ms step_avg:90.19ms +[2025-08-22 17:08:11] [Rank 0] PRINT: step:9600/10000 val_loss:3.6642 svd_entropy: attn_qk:H=0.7178,top10E=0.31,eRank=129.5,q75/q25=73.25 attn_vo:H=0.6484,top10E=0.38,eRank=101.5,q75/q25=75.39 mlp_w1:H=0.8086,top10E=0.24,eRank=255.9,q75/q25=7.14 mlp_w2:H=0.9506,top10E=0.06,eRank=561.1,q75/q25=4.11 vo_prod:H=0.5325,top10E=0.57,eRank=49.9,q75/q25=5529.20 train_time:865796ms step_avg:90.19ms +[2025-08-22 17:08:11] [Rank 0] step:9601/10000 train_time:865808ms step_avg:90.18ms +[2025-08-22 17:08:11] [Rank 0] step:9601/10000 train_time:865808ms step_avg:90.18ms +[2025-08-22 17:08:13] [Rank 0] step:9621/10000 train_time:867543ms step_avg:90.17ms +[2025-08-22 17:08:13] [Rank 0] step:9621/10000 train_time:867543ms step_avg:90.17ms +[2025-08-22 17:08:15] [Rank 0] step:9641/10000 train_time:869445ms step_avg:90.18ms +[2025-08-22 17:08:15] [Rank 0] step:9641/10000 train_time:869445ms step_avg:90.18ms +[2025-08-22 17:08:17] [Rank 0] step:9661/10000 train_time:871375ms step_avg:90.20ms +[2025-08-22 17:08:17] [Rank 0] step:9661/10000 train_time:871375ms step_avg:90.20ms +[2025-08-22 17:08:19] [Rank 0] step:9681/10000 train_time:873295ms step_avg:90.21ms +[2025-08-22 17:08:19] [Rank 0] step:9681/10000 train_time:873295ms step_avg:90.21ms +[2025-08-22 17:08:21] [Rank 0] step:9701/10000 train_time:875236ms step_avg:90.22ms +[2025-08-22 17:08:21] [Rank 0] step:9701/10000 train_time:875236ms step_avg:90.22ms +[2025-08-22 17:08:23] [Rank 0] step:9721/10000 train_time:877158ms step_avg:90.23ms +[2025-08-22 17:08:23] [Rank 0] step:9721/10000 train_time:877158ms step_avg:90.23ms +[2025-08-22 17:08:25] [Rank 0] step:9741/10000 train_time:879107ms step_avg:90.25ms +[2025-08-22 17:08:25] [Rank 0] step:9741/10000 train_time:879107ms step_avg:90.25ms +[2025-08-22 17:08:27] [Rank 0] step:9761/10000 train_time:881035ms step_avg:90.26ms +[2025-08-22 17:08:27] [Rank 0] step:9761/10000 train_time:881035ms step_avg:90.26ms +[2025-08-22 17:08:28] [Rank 0] step:9781/10000 train_time:882977ms step_avg:90.27ms +[2025-08-22 17:08:28] [Rank 0] step:9781/10000 train_time:882977ms step_avg:90.27ms +[2025-08-22 17:08:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:08:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:08:44] [Rank 0] PRINT: step:9800/10000 val_loss:3.6557 svd_entropy: attn_qk:H=0.7180,top10E=0.31,eRank=129.6,q75/q25=73.18 attn_vo:H=0.6487,top10E=0.38,eRank=101.7,q75/q25=75.39 mlp_w1:H=0.8088,top10E=0.24,eRank=256.2,q75/q25=7.13 mlp_w2:H=0.9506,top10E=0.06,eRank=561.1,q75/q25=4.11 vo_prod:H=0.5328,top10E=0.57,eRank=50.0,q75/q25=5544.47 train_time:885114ms step_avg:90.32ms +[2025-08-22 17:08:44] [Rank 0] PRINT: step:9800/10000 val_loss:3.6557 svd_entropy: attn_qk:H=0.7180,top10E=0.31,eRank=129.6,q75/q25=73.18 attn_vo:H=0.6487,top10E=0.38,eRank=101.7,q75/q25=75.39 mlp_w1:H=0.8088,top10E=0.24,eRank=256.2,q75/q25=7.13 mlp_w2:H=0.9506,top10E=0.06,eRank=561.1,q75/q25=4.11 vo_prod:H=0.5328,top10E=0.57,eRank=50.0,q75/q25=5544.47 train_time:885114ms step_avg:90.32ms +[2025-08-22 17:08:44] [Rank 0] step:9801/10000 train_time:885127ms step_avg:90.31ms +[2025-08-22 17:08:44] [Rank 0] step:9801/10000 train_time:885127ms step_avg:90.31ms +[2025-08-22 17:08:46] [Rank 0] step:9821/10000 train_time:886866ms step_avg:90.30ms +[2025-08-22 17:08:46] [Rank 0] step:9821/10000 train_time:886866ms step_avg:90.30ms +[2025-08-22 17:08:48] [Rank 0] step:9841/10000 train_time:888806ms step_avg:90.32ms +[2025-08-22 17:08:48] [Rank 0] step:9841/10000 train_time:888806ms step_avg:90.32ms +[2025-08-22 17:08:50] [Rank 0] step:9861/10000 train_time:890720ms step_avg:90.33ms +[2025-08-22 17:08:50] [Rank 0] step:9861/10000 train_time:890720ms step_avg:90.33ms +[2025-08-22 17:08:52] [Rank 0] step:9881/10000 train_time:892635ms step_avg:90.34ms +[2025-08-22 17:08:52] [Rank 0] step:9881/10000 train_time:892635ms step_avg:90.34ms +[2025-08-22 17:08:54] [Rank 0] step:9901/10000 train_time:894566ms step_avg:90.35ms +[2025-08-22 17:08:54] [Rank 0] step:9901/10000 train_time:894566ms step_avg:90.35ms +[2025-08-22 17:08:56] [Rank 0] step:9921/10000 train_time:896488ms step_avg:90.36ms +[2025-08-22 17:08:56] [Rank 0] step:9921/10000 train_time:896488ms step_avg:90.36ms +[2025-08-22 17:08:58] [Rank 0] step:9941/10000 train_time:898417ms step_avg:90.37ms +[2025-08-22 17:08:58] [Rank 0] step:9941/10000 train_time:898417ms step_avg:90.37ms +[2025-08-22 17:08:59] [Rank 0] step:9961/10000 train_time:900341ms step_avg:90.39ms +[2025-08-22 17:08:59] [Rank 0] step:9961/10000 train_time:900341ms step_avg:90.39ms +[2025-08-22 17:09:01] [Rank 0] step:9981/10000 train_time:902267ms step_avg:90.40ms +[2025-08-22 17:09:01] [Rank 0] step:9981/10000 train_time:902267ms step_avg:90.40ms +[2025-08-22 17:09:03] [Rank 0] step:10000/10000 train_time:904104ms step_avg:90.41ms +[2025-08-22 17:09:03] [Rank 0] step:10000/10000 train_time:904104ms step_avg:90.41ms +[2025-08-22 17:09:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:09:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:09:17] [Rank 0] PRINT: step:10000/10000 val_loss:3.6483 svd_entropy: attn_qk:H=0.7181,top10E=0.31,eRank=129.7,q75/q25=73.20 attn_vo:H=0.6489,top10E=0.38,eRank=101.8,q75/q25=75.42 mlp_w1:H=0.8089,top10E=0.24,eRank=256.4,q75/q25=7.12 mlp_w2:H=0.9506,top10E=0.06,eRank=561.2,q75/q25=4.11 vo_prod:H=0.5331,top10E=0.57,eRank=50.1,q75/q25=5597.24 train_time:904398ms step_avg:90.44ms +[2025-08-22 17:09:17] [Rank 0] PRINT: step:10000/10000 val_loss:3.6483 svd_entropy: attn_qk:H=0.7181,top10E=0.31,eRank=129.7,q75/q25=73.20 attn_vo:H=0.6489,top10E=0.38,eRank=101.8,q75/q25=75.42 mlp_w1:H=0.8089,top10E=0.24,eRank=256.4,q75/q25=7.12 mlp_w2:H=0.9506,top10E=0.06,eRank=561.2,q75/q25=4.11 vo_prod:H=0.5331,top10E=0.57,eRank=50.1,q75/q25=5597.24 train_time:904398ms step_avg:90.44ms +[2025-08-22 17:09:17] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 17:09:17 2025 --- +[2025-08-22 17:09:17] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 17:09:17 2025 --- +[2025-08-22 17:09:17] [Rank 0] PRINT: Peak memory allocated: 11478 MiB reserved: 16356 MiB +[2025-08-22 17:09:17] [Rank 0] PRINT: Peak memory allocated: 11478 MiB reserved: 16356 MiB diff --git a/logs_svd_gated/mode_6_param_gated_seed_43/config.json b/logs_svd_gated/mode_6_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..b28169d56000f23b8f9ee4f74a5c67cbda1c6383 --- /dev/null +++ b/logs_svd_gated/mode_6_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 6, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "c32de312-95f0-494c-b77b-ee17fd0d5f29", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_6_param_gated_seed_43/training_log_c32de312-95f0-494c-b77b-ee17fd0d5f29.txt b/logs_svd_gated/mode_6_param_gated_seed_43/training_log_c32de312-95f0-494c-b77b-ee17fd0d5f29.txt new file mode 100644 index 0000000000000000000000000000000000000000..95e619db92b1368fe5438afa400d9489c2430856 --- /dev/null +++ b/logs_svd_gated/mode_6_param_gated_seed_43/training_log_c32de312-95f0-494c-b77b-ee17fd0d5f29.txt @@ -0,0 +1,2926 @@ +[2025-08-22 21:55:10] [Rank 0] PRINT: --- Script Start: Fri Aug 22 21:55:10 2025 --- +[2025-08-22 21:55:10] [Rank 0] PRINT: --- Script Start: Fri Aug 22 21:55:10 2025 --- +[2025-08-22 21:55:10] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=6, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 21:55:10] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=6, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 21:55:10] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 21:55:10] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 21:55:10] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 21:55:10] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 21:55:10] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_6_param_gated_seed_43 +[2025-08-22 21:55:10] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_6_param_gated_seed_43 +[2025-08-22 21:55:10] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 21:55:10] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 21:55:10] [Rank 0] PRINT: Constructing model... +[2025-08-22 21:55:10] [Rank 0] PRINT: Constructing model... +[2025-08-22 21:55:12] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 21:55:12] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 21:55:12] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 21:55:12] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 21:55:12] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 21:55:12] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 21:55:12] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-08-22 21:55:12] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 6 +[2025-08-22 21:55:12] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 21:55:12] [Rank 0] PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 21:55:12] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 21:55:12] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 21:55:12] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-08-22 21:55:12] [Rank 0] PRINT: Muon optimizer is active with 12 parameters. +[2025-08-22 21:55:12] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 21:55:12] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 21:55:12] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 21:55:12] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 21:55:12] [Rank 0] PRINT: Starting warmup... +[2025-08-22 21:55:12] [Rank 0] PRINT: Starting warmup... +[2025-08-22 21:55:55] [Rank 0] PRINT: Warmup complete. +[2025-08-22 21:55:55] [Rank 0] PRINT: Warmup complete. +[2025-08-22 21:55:56] [Rank 0] PRINT: Starting training... +[2025-08-22 21:55:56] [Rank 0] PRINT: Starting training... +[2025-08-22 21:55:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:55:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:56:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 21:56:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 21:56:15] [Rank 0] step:21/10000 train_time:1566ms step_avg:74.55ms +[2025-08-22 21:56:15] [Rank 0] step:21/10000 train_time:1566ms step_avg:74.55ms +[2025-08-22 21:56:17] [Rank 0] step:41/10000 train_time:3240ms step_avg:79.03ms +[2025-08-22 21:56:17] [Rank 0] step:41/10000 train_time:3240ms step_avg:79.03ms +[2025-08-22 21:56:18] [Rank 0] step:61/10000 train_time:4916ms step_avg:80.60ms +[2025-08-22 21:56:18] [Rank 0] step:61/10000 train_time:4916ms step_avg:80.60ms +[2025-08-22 21:56:20] [Rank 0] step:81/10000 train_time:6594ms step_avg:81.40ms +[2025-08-22 21:56:20] [Rank 0] step:81/10000 train_time:6594ms step_avg:81.40ms +[2025-08-22 21:56:22] [Rank 0] step:101/10000 train_time:8272ms step_avg:81.90ms +[2025-08-22 21:56:22] [Rank 0] step:101/10000 train_time:8272ms step_avg:81.90ms +[2025-08-22 21:56:23] [Rank 0] step:121/10000 train_time:9951ms step_avg:82.24ms +[2025-08-22 21:56:23] [Rank 0] step:121/10000 train_time:9951ms step_avg:82.24ms +[2025-08-22 21:56:25] [Rank 0] step:141/10000 train_time:11631ms step_avg:82.49ms +[2025-08-22 21:56:25] [Rank 0] step:141/10000 train_time:11631ms step_avg:82.49ms +[2025-08-22 21:56:27] [Rank 0] step:161/10000 train_time:13314ms step_avg:82.70ms +[2025-08-22 21:56:27] [Rank 0] step:161/10000 train_time:13314ms step_avg:82.70ms +[2025-08-22 21:56:29] [Rank 0] step:181/10000 train_time:14999ms step_avg:82.87ms +[2025-08-22 21:56:29] [Rank 0] step:181/10000 train_time:14999ms step_avg:82.87ms +[2025-08-22 21:56:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:56:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:56:44] [Rank 0] PRINT: step:200/10000 val_loss:7.0230 svd_entropy: attn_qk:H=0.3482,top10E=0.86,eRank=14.9,q75/q25=18.48 attn_vo:H=0.1449,top10E=0.98,eRank=3.0,q75/q25=79.68 mlp_w1:H=0.3933,top10E=0.83,eRank=13.9,q75/q25=6.43 mlp_w2:H=0.6450,top10E=0.41,eRank=73.7,q75/q25=8.86 vo_prod:H=0.0436,top10E=1.00,eRank=1.4,q75/q25=561.74 train_time:16852ms step_avg:84.26ms +[2025-08-22 21:56:44] [Rank 0] PRINT: step:200/10000 val_loss:7.0230 svd_entropy: attn_qk:H=0.3482,top10E=0.86,eRank=14.9,q75/q25=18.48 attn_vo:H=0.1449,top10E=0.98,eRank=3.0,q75/q25=79.68 mlp_w1:H=0.3933,top10E=0.83,eRank=13.9,q75/q25=6.43 mlp_w2:H=0.6450,top10E=0.41,eRank=73.7,q75/q25=8.86 vo_prod:H=0.0436,top10E=1.00,eRank=1.4,q75/q25=561.74 train_time:16852ms step_avg:84.26ms +[2025-08-22 21:56:44] [Rank 0] step:201/10000 train_time:16866ms step_avg:83.91ms +[2025-08-22 21:56:44] [Rank 0] step:201/10000 train_time:16866ms step_avg:83.91ms +[2025-08-22 21:56:46] [Rank 0] step:221/10000 train_time:18383ms step_avg:83.18ms +[2025-08-22 21:56:46] [Rank 0] step:221/10000 train_time:18383ms step_avg:83.18ms +[2025-08-22 21:56:47] [Rank 0] step:241/10000 train_time:20065ms step_avg:83.26ms +[2025-08-22 21:56:47] [Rank 0] step:241/10000 train_time:20065ms step_avg:83.26ms +[2025-08-22 21:56:49] [Rank 0] step:261/10000 train_time:21749ms step_avg:83.33ms +[2025-08-22 21:56:49] [Rank 0] step:261/10000 train_time:21749ms step_avg:83.33ms +[2025-08-22 21:56:51] [Rank 0] step:281/10000 train_time:23434ms step_avg:83.39ms +[2025-08-22 21:56:51] [Rank 0] step:281/10000 train_time:23434ms step_avg:83.39ms +[2025-08-22 21:56:52] [Rank 0] step:301/10000 train_time:25119ms step_avg:83.45ms +[2025-08-22 21:56:52] [Rank 0] step:301/10000 train_time:25119ms step_avg:83.45ms +[2025-08-22 21:56:54] [Rank 0] step:321/10000 train_time:26805ms step_avg:83.51ms +[2025-08-22 21:56:54] [Rank 0] step:321/10000 train_time:26805ms step_avg:83.51ms +[2025-08-22 21:56:56] [Rank 0] step:341/10000 train_time:28492ms step_avg:83.56ms +[2025-08-22 21:56:56] [Rank 0] step:341/10000 train_time:28492ms step_avg:83.56ms +[2025-08-22 21:56:57] [Rank 0] step:361/10000 train_time:30181ms step_avg:83.60ms +[2025-08-22 21:56:57] [Rank 0] step:361/10000 train_time:30181ms step_avg:83.60ms +[2025-08-22 21:56:59] [Rank 0] step:381/10000 train_time:31868ms step_avg:83.64ms +[2025-08-22 21:56:59] [Rank 0] step:381/10000 train_time:31868ms step_avg:83.64ms +[2025-08-22 21:57:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:57:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:57:14] [Rank 0] PRINT: step:400/10000 val_loss:5.9851 svd_entropy: attn_qk:H=0.5190,top10E=0.64,eRank=39.4,q75/q25=48.06 attn_vo:H=0.2797,top10E=0.95,eRank=7.6,q75/q25=48.55 mlp_w1:H=0.5662,top10E=0.60,eRank=46.5,q75/q25=7.29 mlp_w2:H=0.8586,top10E=0.12,eRank=304.5,q75/q25=16.83 vo_prod:H=0.1470,top10E=1.00,eRank=3.3,q75/q25=422.91 train_time:33724ms step_avg:84.31ms +[2025-08-22 21:57:14] [Rank 0] PRINT: step:400/10000 val_loss:5.9851 svd_entropy: attn_qk:H=0.5190,top10E=0.64,eRank=39.4,q75/q25=48.06 attn_vo:H=0.2797,top10E=0.95,eRank=7.6,q75/q25=48.55 mlp_w1:H=0.5662,top10E=0.60,eRank=46.5,q75/q25=7.29 mlp_w2:H=0.8586,top10E=0.12,eRank=304.5,q75/q25=16.83 vo_prod:H=0.1470,top10E=1.00,eRank=3.3,q75/q25=422.91 train_time:33724ms step_avg:84.31ms +[2025-08-22 21:57:14] [Rank 0] step:401/10000 train_time:33737ms step_avg:84.13ms +[2025-08-22 21:57:14] [Rank 0] step:401/10000 train_time:33737ms step_avg:84.13ms +[2025-08-22 21:57:16] [Rank 0] step:421/10000 train_time:35255ms step_avg:83.74ms +[2025-08-22 21:57:16] [Rank 0] step:421/10000 train_time:35255ms step_avg:83.74ms +[2025-08-22 21:57:18] [Rank 0] step:441/10000 train_time:36936ms step_avg:83.76ms +[2025-08-22 21:57:18] [Rank 0] step:441/10000 train_time:36936ms step_avg:83.76ms +[2025-08-22 21:57:20] [Rank 0] step:461/10000 train_time:38616ms step_avg:83.77ms +[2025-08-22 21:57:20] [Rank 0] step:461/10000 train_time:38616ms step_avg:83.77ms +[2025-08-22 21:57:21] [Rank 0] step:481/10000 train_time:40297ms step_avg:83.78ms +[2025-08-22 21:57:21] [Rank 0] step:481/10000 train_time:40297ms step_avg:83.78ms +[2025-08-22 21:57:23] [Rank 0] step:501/10000 train_time:41977ms step_avg:83.79ms +[2025-08-22 21:57:23] [Rank 0] step:501/10000 train_time:41977ms step_avg:83.79ms +[2025-08-22 21:57:25] [Rank 0] step:521/10000 train_time:43658ms step_avg:83.80ms +[2025-08-22 21:57:25] [Rank 0] step:521/10000 train_time:43658ms step_avg:83.80ms +[2025-08-22 21:57:26] [Rank 0] step:541/10000 train_time:45338ms step_avg:83.80ms +[2025-08-22 21:57:26] [Rank 0] step:541/10000 train_time:45338ms step_avg:83.80ms +[2025-08-22 21:57:28] [Rank 0] step:561/10000 train_time:47038ms step_avg:83.85ms +[2025-08-22 21:57:28] [Rank 0] step:561/10000 train_time:47038ms step_avg:83.85ms +[2025-08-22 21:57:30] [Rank 0] step:581/10000 train_time:48703ms step_avg:83.83ms +[2025-08-22 21:57:30] [Rank 0] step:581/10000 train_time:48703ms step_avg:83.83ms +[2025-08-22 21:57:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:57:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:57:45] [Rank 0] PRINT: step:600/10000 val_loss:5.5165 svd_entropy: attn_qk:H=0.5874,top10E=0.53,eRank=53.2,q75/q25=60.43 attn_vo:H=0.3606,top10E=0.88,eRank=13.3,q75/q25=41.16 mlp_w1:H=0.6318,top10E=0.49,eRank=74.4,q75/q25=7.41 mlp_w2:H=0.9048,top10E=0.08,eRank=415.1,q75/q25=8.99 vo_prod:H=0.2244,top10E=0.98,eRank=5.4,q75/q25=479.35 train_time:50556ms step_avg:84.26ms +[2025-08-22 21:57:45] [Rank 0] PRINT: step:600/10000 val_loss:5.5165 svd_entropy: attn_qk:H=0.5874,top10E=0.53,eRank=53.2,q75/q25=60.43 attn_vo:H=0.3606,top10E=0.88,eRank=13.3,q75/q25=41.16 mlp_w1:H=0.6318,top10E=0.49,eRank=74.4,q75/q25=7.41 mlp_w2:H=0.9048,top10E=0.08,eRank=415.1,q75/q25=8.99 vo_prod:H=0.2244,top10E=0.98,eRank=5.4,q75/q25=479.35 train_time:50556ms step_avg:84.26ms +[2025-08-22 21:57:45] [Rank 0] step:601/10000 train_time:50568ms step_avg:84.14ms +[2025-08-22 21:57:45] [Rank 0] step:601/10000 train_time:50568ms step_avg:84.14ms +[2025-08-22 21:57:47] [Rank 0] step:621/10000 train_time:52098ms step_avg:83.89ms +[2025-08-22 21:57:47] [Rank 0] step:621/10000 train_time:52098ms step_avg:83.89ms +[2025-08-22 21:57:48] [Rank 0] step:641/10000 train_time:53776ms step_avg:83.89ms +[2025-08-22 21:57:48] [Rank 0] step:641/10000 train_time:53776ms step_avg:83.89ms +[2025-08-22 21:57:50] [Rank 0] step:661/10000 train_time:55454ms step_avg:83.89ms +[2025-08-22 21:57:50] [Rank 0] step:661/10000 train_time:55454ms step_avg:83.89ms +[2025-08-22 21:57:52] [Rank 0] step:681/10000 train_time:57134ms step_avg:83.90ms +[2025-08-22 21:57:52] [Rank 0] step:681/10000 train_time:57134ms step_avg:83.90ms +[2025-08-22 21:57:54] [Rank 0] step:701/10000 train_time:58815ms step_avg:83.90ms +[2025-08-22 21:57:54] [Rank 0] step:701/10000 train_time:58815ms step_avg:83.90ms +[2025-08-22 21:57:55] [Rank 0] step:721/10000 train_time:60497ms step_avg:83.91ms +[2025-08-22 21:57:55] [Rank 0] step:721/10000 train_time:60497ms step_avg:83.91ms +[2025-08-22 21:57:57] [Rank 0] step:741/10000 train_time:62181ms step_avg:83.91ms +[2025-08-22 21:57:57] [Rank 0] step:741/10000 train_time:62181ms step_avg:83.91ms +[2025-08-22 21:57:59] [Rank 0] step:761/10000 train_time:63876ms step_avg:83.94ms +[2025-08-22 21:57:59] [Rank 0] step:761/10000 train_time:63876ms step_avg:83.94ms +[2025-08-22 21:58:00] [Rank 0] step:781/10000 train_time:65574ms step_avg:83.96ms +[2025-08-22 21:58:00] [Rank 0] step:781/10000 train_time:65574ms step_avg:83.96ms +[2025-08-22 21:58:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:58:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:58:16] [Rank 0] PRINT: step:800/10000 val_loss:5.2073 svd_entropy: attn_qk:H=0.6026,top10E=0.50,eRank=58.2,q75/q25=62.71 attn_vo:H=0.4092,top10E=0.82,eRank=19.2,q75/q25=42.12 mlp_w1:H=0.6603,top10E=0.45,eRank=90.6,q75/q25=7.51 mlp_w2:H=0.9176,top10E=0.08,eRank=452.0,q75/q25=7.30 vo_prod:H=0.2854,top10E=0.95,eRank=8.0,q75/q25=556.79 train_time:67442ms step_avg:84.30ms +[2025-08-22 21:58:16] [Rank 0] PRINT: step:800/10000 val_loss:5.2073 svd_entropy: attn_qk:H=0.6026,top10E=0.50,eRank=58.2,q75/q25=62.71 attn_vo:H=0.4092,top10E=0.82,eRank=19.2,q75/q25=42.12 mlp_w1:H=0.6603,top10E=0.45,eRank=90.6,q75/q25=7.51 mlp_w2:H=0.9176,top10E=0.08,eRank=452.0,q75/q25=7.30 vo_prod:H=0.2854,top10E=0.95,eRank=8.0,q75/q25=556.79 train_time:67442ms step_avg:84.30ms +[2025-08-22 21:58:16] [Rank 0] step:801/10000 train_time:67454ms step_avg:84.21ms +[2025-08-22 21:58:16] [Rank 0] step:801/10000 train_time:67454ms step_avg:84.21ms +[2025-08-22 21:58:17] [Rank 0] step:821/10000 train_time:68995ms step_avg:84.04ms +[2025-08-22 21:58:17] [Rank 0] step:821/10000 train_time:68995ms step_avg:84.04ms +[2025-08-22 21:58:19] [Rank 0] step:841/10000 train_time:70688ms step_avg:84.05ms +[2025-08-22 21:58:19] [Rank 0] step:841/10000 train_time:70688ms step_avg:84.05ms +[2025-08-22 21:58:21] [Rank 0] step:861/10000 train_time:72383ms step_avg:84.07ms +[2025-08-22 21:58:21] [Rank 0] step:861/10000 train_time:72383ms step_avg:84.07ms +[2025-08-22 21:58:23] [Rank 0] step:881/10000 train_time:74083ms step_avg:84.09ms +[2025-08-22 21:58:23] [Rank 0] step:881/10000 train_time:74083ms step_avg:84.09ms +[2025-08-22 21:58:24] [Rank 0] step:901/10000 train_time:75781ms step_avg:84.11ms +[2025-08-22 21:58:24] [Rank 0] step:901/10000 train_time:75781ms step_avg:84.11ms +[2025-08-22 21:58:26] [Rank 0] step:921/10000 train_time:77481ms step_avg:84.13ms +[2025-08-22 21:58:26] [Rank 0] step:921/10000 train_time:77481ms step_avg:84.13ms +[2025-08-22 21:58:28] [Rank 0] step:941/10000 train_time:79180ms step_avg:84.14ms +[2025-08-22 21:58:28] [Rank 0] step:941/10000 train_time:79180ms step_avg:84.14ms +[2025-08-22 21:58:29] [Rank 0] step:961/10000 train_time:80881ms step_avg:84.16ms +[2025-08-22 21:58:29] [Rank 0] step:961/10000 train_time:80881ms step_avg:84.16ms +[2025-08-22 21:58:31] [Rank 0] step:981/10000 train_time:82704ms step_avg:84.31ms +[2025-08-22 21:58:31] [Rank 0] step:981/10000 train_time:82704ms step_avg:84.31ms +[2025-08-22 21:58:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:58:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:58:46] [Rank 0] PRINT: step:1000/10000 val_loss:4.9918 svd_entropy: attn_qk:H=0.6111,top10E=0.48,eRank=62.4,q75/q25=63.29 attn_vo:H=0.4379,top10E=0.77,eRank=24.3,q75/q25=44.06 mlp_w1:H=0.6750,top10E=0.42,eRank=102.1,q75/q25=7.72 mlp_w2:H=0.9251,top10E=0.07,eRank=475.1,q75/q25=6.46 vo_prod:H=0.3195,top10E=0.92,eRank=10.5,q75/q25=637.53 train_time:84454ms step_avg:84.45ms +[2025-08-22 21:58:46] [Rank 0] PRINT: step:1000/10000 val_loss:4.9918 svd_entropy: attn_qk:H=0.6111,top10E=0.48,eRank=62.4,q75/q25=63.29 attn_vo:H=0.4379,top10E=0.77,eRank=24.3,q75/q25=44.06 mlp_w1:H=0.6750,top10E=0.42,eRank=102.1,q75/q25=7.72 mlp_w2:H=0.9251,top10E=0.07,eRank=475.1,q75/q25=6.46 vo_prod:H=0.3195,top10E=0.92,eRank=10.5,q75/q25=637.53 train_time:84454ms step_avg:84.45ms +[2025-08-22 21:58:46] [Rank 0] step:1001/10000 train_time:84467ms step_avg:84.38ms +[2025-08-22 21:58:46] [Rank 0] step:1001/10000 train_time:84467ms step_avg:84.38ms +[2025-08-22 21:58:48] [Rank 0] step:1021/10000 train_time:85992ms step_avg:84.22ms +[2025-08-22 21:58:48] [Rank 0] step:1021/10000 train_time:85992ms step_avg:84.22ms +[2025-08-22 21:58:50] [Rank 0] step:1041/10000 train_time:87684ms step_avg:84.23ms +[2025-08-22 21:58:50] [Rank 0] step:1041/10000 train_time:87684ms step_avg:84.23ms +[2025-08-22 21:58:51] [Rank 0] step:1061/10000 train_time:89377ms step_avg:84.24ms +[2025-08-22 21:58:51] [Rank 0] step:1061/10000 train_time:89377ms step_avg:84.24ms +[2025-08-22 21:58:53] [Rank 0] step:1081/10000 train_time:91072ms step_avg:84.25ms +[2025-08-22 21:58:53] [Rank 0] step:1081/10000 train_time:91072ms step_avg:84.25ms +[2025-08-22 21:58:55] [Rank 0] step:1101/10000 train_time:92770ms step_avg:84.26ms +[2025-08-22 21:58:55] [Rank 0] step:1101/10000 train_time:92770ms step_avg:84.26ms +[2025-08-22 21:58:57] [Rank 0] step:1121/10000 train_time:94467ms step_avg:84.27ms +[2025-08-22 21:58:57] [Rank 0] step:1121/10000 train_time:94467ms step_avg:84.27ms +[2025-08-22 21:58:58] [Rank 0] step:1141/10000 train_time:96164ms step_avg:84.28ms +[2025-08-22 21:58:58] [Rank 0] step:1141/10000 train_time:96164ms step_avg:84.28ms +[2025-08-22 21:59:00] [Rank 0] step:1161/10000 train_time:97860ms step_avg:84.29ms +[2025-08-22 21:59:00] [Rank 0] step:1161/10000 train_time:97860ms step_avg:84.29ms +[2025-08-22 21:59:02] [Rank 0] step:1181/10000 train_time:99558ms step_avg:84.30ms +[2025-08-22 21:59:02] [Rank 0] step:1181/10000 train_time:99558ms step_avg:84.30ms +[2025-08-22 21:59:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:59:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:59:17] [Rank 0] PRINT: step:1200/10000 val_loss:4.8094 svd_entropy: attn_qk:H=0.6197,top10E=0.47,eRank=66.6,q75/q25=64.80 attn_vo:H=0.4605,top10E=0.73,eRank=29.1,q75/q25=46.18 mlp_w1:H=0.6930,top10E=0.40,eRank=114.1,q75/q25=7.89 mlp_w2:H=0.9300,top10E=0.07,eRank=491.4,q75/q25=5.94 vo_prod:H=0.3437,top10E=0.88,eRank=12.8,q75/q25=780.32 train_time:101425ms step_avg:84.52ms +[2025-08-22 21:59:17] [Rank 0] PRINT: step:1200/10000 val_loss:4.8094 svd_entropy: attn_qk:H=0.6197,top10E=0.47,eRank=66.6,q75/q25=64.80 attn_vo:H=0.4605,top10E=0.73,eRank=29.1,q75/q25=46.18 mlp_w1:H=0.6930,top10E=0.40,eRank=114.1,q75/q25=7.89 mlp_w2:H=0.9300,top10E=0.07,eRank=491.4,q75/q25=5.94 vo_prod:H=0.3437,top10E=0.88,eRank=12.8,q75/q25=780.32 train_time:101425ms step_avg:84.52ms +[2025-08-22 21:59:17] [Rank 0] step:1201/10000 train_time:101438ms step_avg:84.46ms +[2025-08-22 21:59:17] [Rank 0] step:1201/10000 train_time:101438ms step_avg:84.46ms +[2025-08-22 21:59:19] [Rank 0] step:1221/10000 train_time:102985ms step_avg:84.34ms +[2025-08-22 21:59:19] [Rank 0] step:1221/10000 train_time:102985ms step_avg:84.34ms +[2025-08-22 21:59:20] [Rank 0] step:1241/10000 train_time:104679ms step_avg:84.35ms +[2025-08-22 21:59:20] [Rank 0] step:1241/10000 train_time:104679ms step_avg:84.35ms +[2025-08-22 21:59:22] [Rank 0] step:1261/10000 train_time:106374ms step_avg:84.36ms +[2025-08-22 21:59:22] [Rank 0] step:1261/10000 train_time:106374ms step_avg:84.36ms +[2025-08-22 21:59:24] [Rank 0] step:1281/10000 train_time:108066ms step_avg:84.36ms +[2025-08-22 21:59:24] [Rank 0] step:1281/10000 train_time:108066ms step_avg:84.36ms +[2025-08-22 21:59:26] [Rank 0] step:1301/10000 train_time:109762ms step_avg:84.37ms +[2025-08-22 21:59:26] [Rank 0] step:1301/10000 train_time:109762ms step_avg:84.37ms +[2025-08-22 21:59:27] [Rank 0] step:1321/10000 train_time:111458ms step_avg:84.37ms +[2025-08-22 21:59:27] [Rank 0] step:1321/10000 train_time:111458ms step_avg:84.37ms +[2025-08-22 21:59:29] [Rank 0] step:1341/10000 train_time:113153ms step_avg:84.38ms +[2025-08-22 21:59:29] [Rank 0] step:1341/10000 train_time:113153ms step_avg:84.38ms +[2025-08-22 21:59:31] [Rank 0] step:1361/10000 train_time:114849ms step_avg:84.39ms +[2025-08-22 21:59:31] [Rank 0] step:1361/10000 train_time:114849ms step_avg:84.39ms +[2025-08-22 21:59:32] [Rank 0] step:1381/10000 train_time:116544ms step_avg:84.39ms +[2025-08-22 21:59:32] [Rank 0] step:1381/10000 train_time:116544ms step_avg:84.39ms +[2025-08-22 21:59:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:59:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 21:59:47] [Rank 0] PRINT: step:1400/10000 val_loss:4.7088 svd_entropy: attn_qk:H=0.6267,top10E=0.45,eRank=70.3,q75/q25=66.80 attn_vo:H=0.4766,top10E=0.70,eRank=33.1,q75/q25=49.12 mlp_w1:H=0.7050,top10E=0.38,eRank=122.9,q75/q25=8.03 mlp_w2:H=0.9339,top10E=0.07,eRank=504.2,q75/q25=5.54 vo_prod:H=0.3592,top10E=0.86,eRank=15.0,q75/q25=997.10 train_time:118411ms step_avg:84.58ms +[2025-08-22 21:59:47] [Rank 0] PRINT: step:1400/10000 val_loss:4.7088 svd_entropy: attn_qk:H=0.6267,top10E=0.45,eRank=70.3,q75/q25=66.80 attn_vo:H=0.4766,top10E=0.70,eRank=33.1,q75/q25=49.12 mlp_w1:H=0.7050,top10E=0.38,eRank=122.9,q75/q25=8.03 mlp_w2:H=0.9339,top10E=0.07,eRank=504.2,q75/q25=5.54 vo_prod:H=0.3592,top10E=0.86,eRank=15.0,q75/q25=997.10 train_time:118411ms step_avg:84.58ms +[2025-08-22 21:59:48] [Rank 0] step:1401/10000 train_time:118423ms step_avg:84.53ms +[2025-08-22 21:59:48] [Rank 0] step:1401/10000 train_time:118423ms step_avg:84.53ms +[2025-08-22 21:59:49] [Rank 0] step:1421/10000 train_time:119963ms step_avg:84.42ms +[2025-08-22 21:59:49] [Rank 0] step:1421/10000 train_time:119963ms step_avg:84.42ms +[2025-08-22 21:59:51] [Rank 0] step:1441/10000 train_time:121653ms step_avg:84.42ms +[2025-08-22 21:59:51] [Rank 0] step:1441/10000 train_time:121653ms step_avg:84.42ms +[2025-08-22 21:59:53] [Rank 0] step:1461/10000 train_time:123345ms step_avg:84.43ms +[2025-08-22 21:59:53] [Rank 0] step:1461/10000 train_time:123345ms step_avg:84.43ms +[2025-08-22 21:59:54] [Rank 0] step:1481/10000 train_time:125037ms step_avg:84.43ms +[2025-08-22 21:59:54] [Rank 0] step:1481/10000 train_time:125037ms step_avg:84.43ms +[2025-08-22 21:59:56] [Rank 0] step:1501/10000 train_time:126739ms step_avg:84.44ms +[2025-08-22 21:59:56] [Rank 0] step:1501/10000 train_time:126739ms step_avg:84.44ms +[2025-08-22 21:59:58] [Rank 0] step:1521/10000 train_time:128444ms step_avg:84.45ms +[2025-08-22 21:59:58] [Rank 0] step:1521/10000 train_time:128444ms step_avg:84.45ms +[2025-08-22 21:59:59] [Rank 0] step:1541/10000 train_time:130151ms step_avg:84.46ms +[2025-08-22 21:59:59] [Rank 0] step:1541/10000 train_time:130151ms step_avg:84.46ms +[2025-08-22 22:00:01] [Rank 0] step:1561/10000 train_time:131859ms step_avg:84.47ms +[2025-08-22 22:00:01] [Rank 0] step:1561/10000 train_time:131859ms step_avg:84.47ms +[2025-08-22 22:00:03] [Rank 0] step:1581/10000 train_time:133568ms step_avg:84.48ms +[2025-08-22 22:00:03] [Rank 0] step:1581/10000 train_time:133568ms step_avg:84.48ms +[2025-08-22 22:00:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:00:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:00:18] [Rank 0] PRINT: step:1600/10000 val_loss:4.5828 svd_entropy: attn_qk:H=0.6323,top10E=0.44,eRank=73.6,q75/q25=69.09 attn_vo:H=0.4890,top10E=0.68,eRank=36.5,q75/q25=52.24 mlp_w1:H=0.7143,top10E=0.37,eRank=130.7,q75/q25=8.11 mlp_w2:H=0.9368,top10E=0.07,eRank=514.0,q75/q25=5.26 vo_prod:H=0.3702,top10E=0.84,eRank=16.8,q75/q25=1235.99 train_time:135447ms step_avg:84.65ms +[2025-08-22 22:00:18] [Rank 0] PRINT: step:1600/10000 val_loss:4.5828 svd_entropy: attn_qk:H=0.6323,top10E=0.44,eRank=73.6,q75/q25=69.09 attn_vo:H=0.4890,top10E=0.68,eRank=36.5,q75/q25=52.24 mlp_w1:H=0.7143,top10E=0.37,eRank=130.7,q75/q25=8.11 mlp_w2:H=0.9368,top10E=0.07,eRank=514.0,q75/q25=5.26 vo_prod:H=0.3702,top10E=0.84,eRank=16.8,q75/q25=1235.99 train_time:135447ms step_avg:84.65ms +[2025-08-22 22:00:18] [Rank 0] step:1601/10000 train_time:135460ms step_avg:84.61ms +[2025-08-22 22:00:18] [Rank 0] step:1601/10000 train_time:135460ms step_avg:84.61ms +[2025-08-22 22:00:20] [Rank 0] step:1621/10000 train_time:136999ms step_avg:84.52ms +[2025-08-22 22:00:20] [Rank 0] step:1621/10000 train_time:136999ms step_avg:84.52ms +[2025-08-22 22:00:22] [Rank 0] step:1641/10000 train_time:138707ms step_avg:84.53ms +[2025-08-22 22:00:22] [Rank 0] step:1641/10000 train_time:138707ms step_avg:84.53ms +[2025-08-22 22:00:23] [Rank 0] step:1661/10000 train_time:140414ms step_avg:84.54ms +[2025-08-22 22:00:23] [Rank 0] step:1661/10000 train_time:140414ms step_avg:84.54ms +[2025-08-22 22:00:25] [Rank 0] step:1681/10000 train_time:142123ms step_avg:84.55ms +[2025-08-22 22:00:25] [Rank 0] step:1681/10000 train_time:142123ms step_avg:84.55ms +[2025-08-22 22:00:27] [Rank 0] step:1701/10000 train_time:143832ms step_avg:84.56ms +[2025-08-22 22:00:27] [Rank 0] step:1701/10000 train_time:143832ms step_avg:84.56ms +[2025-08-22 22:00:29] [Rank 0] step:1721/10000 train_time:145542ms step_avg:84.57ms +[2025-08-22 22:00:29] [Rank 0] step:1721/10000 train_time:145542ms step_avg:84.57ms +[2025-08-22 22:00:30] [Rank 0] step:1741/10000 train_time:147256ms step_avg:84.58ms +[2025-08-22 22:00:30] [Rank 0] step:1741/10000 train_time:147256ms step_avg:84.58ms +[2025-08-22 22:00:32] [Rank 0] step:1761/10000 train_time:148968ms step_avg:84.59ms +[2025-08-22 22:00:32] [Rank 0] step:1761/10000 train_time:148968ms step_avg:84.59ms +[2025-08-22 22:00:34] [Rank 0] step:1781/10000 train_time:150682ms step_avg:84.61ms +[2025-08-22 22:00:34] [Rank 0] step:1781/10000 train_time:150682ms step_avg:84.61ms +[2025-08-22 22:00:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:00:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:00:49] [Rank 0] PRINT: step:1800/10000 val_loss:4.4979 svd_entropy: attn_qk:H=0.6361,top10E=0.44,eRank=76.2,q75/q25=69.61 attn_vo:H=0.5000,top10E=0.66,eRank=39.7,q75/q25=55.16 mlp_w1:H=0.7223,top10E=0.36,eRank=138.0,q75/q25=8.12 mlp_w2:H=0.9387,top10E=0.07,eRank=520.9,q75/q25=5.06 vo_prod:H=0.3829,top10E=0.82,eRank=18.6,q75/q25=1557.55 train_time:152564ms step_avg:84.76ms +[2025-08-22 22:00:49] [Rank 0] PRINT: step:1800/10000 val_loss:4.4979 svd_entropy: attn_qk:H=0.6361,top10E=0.44,eRank=76.2,q75/q25=69.61 attn_vo:H=0.5000,top10E=0.66,eRank=39.7,q75/q25=55.16 mlp_w1:H=0.7223,top10E=0.36,eRank=138.0,q75/q25=8.12 mlp_w2:H=0.9387,top10E=0.07,eRank=520.9,q75/q25=5.06 vo_prod:H=0.3829,top10E=0.82,eRank=18.6,q75/q25=1557.55 train_time:152564ms step_avg:84.76ms +[2025-08-22 22:00:49] [Rank 0] step:1801/10000 train_time:152576ms step_avg:84.72ms +[2025-08-22 22:00:49] [Rank 0] step:1801/10000 train_time:152576ms step_avg:84.72ms +[2025-08-22 22:00:51] [Rank 0] step:1821/10000 train_time:154118ms step_avg:84.63ms +[2025-08-22 22:00:51] [Rank 0] step:1821/10000 train_time:154118ms step_avg:84.63ms +[2025-08-22 22:00:52] [Rank 0] step:1841/10000 train_time:155824ms step_avg:84.64ms +[2025-08-22 22:00:52] [Rank 0] step:1841/10000 train_time:155824ms step_avg:84.64ms +[2025-08-22 22:00:54] [Rank 0] step:1861/10000 train_time:157531ms step_avg:84.65ms +[2025-08-22 22:00:54] [Rank 0] step:1861/10000 train_time:157531ms step_avg:84.65ms +[2025-08-22 22:00:56] [Rank 0] step:1881/10000 train_time:159238ms step_avg:84.66ms +[2025-08-22 22:00:56] [Rank 0] step:1881/10000 train_time:159238ms step_avg:84.66ms +[2025-08-22 22:00:58] [Rank 0] step:1901/10000 train_time:160946ms step_avg:84.66ms +[2025-08-22 22:00:58] [Rank 0] step:1901/10000 train_time:160946ms step_avg:84.66ms +[2025-08-22 22:00:59] [Rank 0] step:1921/10000 train_time:162657ms step_avg:84.67ms +[2025-08-22 22:00:59] [Rank 0] step:1921/10000 train_time:162657ms step_avg:84.67ms +[2025-08-22 22:01:01] [Rank 0] step:1941/10000 train_time:164365ms step_avg:84.68ms +[2025-08-22 22:01:01] [Rank 0] step:1941/10000 train_time:164365ms step_avg:84.68ms +[2025-08-22 22:01:03] [Rank 0] step:1961/10000 train_time:166076ms step_avg:84.69ms +[2025-08-22 22:01:03] [Rank 0] step:1961/10000 train_time:166076ms step_avg:84.69ms +[2025-08-22 22:01:04] [Rank 0] step:1981/10000 train_time:167786ms step_avg:84.70ms +[2025-08-22 22:01:04] [Rank 0] step:1981/10000 train_time:167786ms step_avg:84.70ms +[2025-08-22 22:01:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:01:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:01:20] [Rank 0] PRINT: step:2000/10000 val_loss:4.4503 svd_entropy: attn_qk:H=0.6406,top10E=0.43,eRank=78.9,q75/q25=70.96 attn_vo:H=0.5109,top10E=0.64,eRank=43.1,q75/q25=58.48 mlp_w1:H=0.7295,top10E=0.34,eRank=144.7,q75/q25=8.11 mlp_w2:H=0.9404,top10E=0.07,eRank=526.6,q75/q25=4.91 vo_prod:H=0.3954,top10E=0.80,eRank=20.4,q75/q25=1958.48 train_time:169668ms step_avg:84.83ms +[2025-08-22 22:01:20] [Rank 0] PRINT: step:2000/10000 val_loss:4.4503 svd_entropy: attn_qk:H=0.6406,top10E=0.43,eRank=78.9,q75/q25=70.96 attn_vo:H=0.5109,top10E=0.64,eRank=43.1,q75/q25=58.48 mlp_w1:H=0.7295,top10E=0.34,eRank=144.7,q75/q25=8.11 mlp_w2:H=0.9404,top10E=0.07,eRank=526.6,q75/q25=4.91 vo_prod:H=0.3954,top10E=0.80,eRank=20.4,q75/q25=1958.48 train_time:169668ms step_avg:84.83ms +[2025-08-22 22:01:20] [Rank 0] step:2001/10000 train_time:169679ms step_avg:84.80ms +[2025-08-22 22:01:20] [Rank 0] step:2001/10000 train_time:169679ms step_avg:84.80ms +[2025-08-22 22:01:22] [Rank 0] step:2021/10000 train_time:171224ms step_avg:84.72ms +[2025-08-22 22:01:22] [Rank 0] step:2021/10000 train_time:171224ms step_avg:84.72ms +[2025-08-22 22:01:24] [Rank 0] step:2041/10000 train_time:173430ms step_avg:84.97ms +[2025-08-22 22:01:24] [Rank 0] step:2041/10000 train_time:173430ms step_avg:84.97ms +[2025-08-22 22:01:25] [Rank 0] step:2061/10000 train_time:175135ms step_avg:84.98ms +[2025-08-22 22:01:25] [Rank 0] step:2061/10000 train_time:175135ms step_avg:84.98ms +[2025-08-22 22:01:27] [Rank 0] step:2081/10000 train_time:176839ms step_avg:84.98ms +[2025-08-22 22:01:27] [Rank 0] step:2081/10000 train_time:176839ms step_avg:84.98ms +[2025-08-22 22:01:29] [Rank 0] step:2101/10000 train_time:178545ms step_avg:84.98ms +[2025-08-22 22:01:29] [Rank 0] step:2101/10000 train_time:178545ms step_avg:84.98ms +[2025-08-22 22:01:31] [Rank 0] step:2121/10000 train_time:180252ms step_avg:84.98ms +[2025-08-22 22:01:31] [Rank 0] step:2121/10000 train_time:180252ms step_avg:84.98ms +[2025-08-22 22:01:32] [Rank 0] step:2141/10000 train_time:181960ms step_avg:84.99ms +[2025-08-22 22:01:32] [Rank 0] step:2141/10000 train_time:181960ms step_avg:84.99ms +[2025-08-22 22:01:34] [Rank 0] step:2161/10000 train_time:183670ms step_avg:84.99ms +[2025-08-22 22:01:34] [Rank 0] step:2161/10000 train_time:183670ms step_avg:84.99ms +[2025-08-22 22:01:36] [Rank 0] step:2181/10000 train_time:185381ms step_avg:85.00ms +[2025-08-22 22:01:36] [Rank 0] step:2181/10000 train_time:185381ms step_avg:85.00ms +[2025-08-22 22:01:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:01:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:01:51] [Rank 0] PRINT: step:2200/10000 val_loss:4.3908 svd_entropy: attn_qk:H=0.6439,top10E=0.42,eRank=81.2,q75/q25=70.89 attn_vo:H=0.5202,top10E=0.62,eRank=46.0,q75/q25=61.37 mlp_w1:H=0.7355,top10E=0.33,eRank=150.9,q75/q25=8.08 mlp_w2:H=0.9415,top10E=0.07,eRank=530.5,q75/q25=4.81 vo_prod:H=0.4084,top10E=0.79,eRank=22.0,q75/q25=2356.95 train_time:187262ms step_avg:85.12ms +[2025-08-22 22:01:51] [Rank 0] PRINT: step:2200/10000 val_loss:4.3908 svd_entropy: attn_qk:H=0.6439,top10E=0.42,eRank=81.2,q75/q25=70.89 attn_vo:H=0.5202,top10E=0.62,eRank=46.0,q75/q25=61.37 mlp_w1:H=0.7355,top10E=0.33,eRank=150.9,q75/q25=8.08 mlp_w2:H=0.9415,top10E=0.07,eRank=530.5,q75/q25=4.81 vo_prod:H=0.4084,top10E=0.79,eRank=22.0,q75/q25=2356.95 train_time:187262ms step_avg:85.12ms +[2025-08-22 22:01:51] [Rank 0] step:2201/10000 train_time:187274ms step_avg:85.09ms +[2025-08-22 22:01:51] [Rank 0] step:2201/10000 train_time:187274ms step_avg:85.09ms +[2025-08-22 22:01:53] [Rank 0] step:2221/10000 train_time:188811ms step_avg:85.01ms +[2025-08-22 22:01:53] [Rank 0] step:2221/10000 train_time:188811ms step_avg:85.01ms +[2025-08-22 22:01:55] [Rank 0] step:2241/10000 train_time:190553ms step_avg:85.03ms +[2025-08-22 22:01:55] [Rank 0] step:2241/10000 train_time:190553ms step_avg:85.03ms +[2025-08-22 22:01:56] [Rank 0] step:2261/10000 train_time:192305ms step_avg:85.05ms +[2025-08-22 22:01:56] [Rank 0] step:2261/10000 train_time:192305ms step_avg:85.05ms +[2025-08-22 22:01:58] [Rank 0] step:2281/10000 train_time:194062ms step_avg:85.08ms +[2025-08-22 22:01:58] [Rank 0] step:2281/10000 train_time:194062ms step_avg:85.08ms +[2025-08-22 22:02:00] [Rank 0] step:2301/10000 train_time:195818ms step_avg:85.10ms +[2025-08-22 22:02:00] [Rank 0] step:2301/10000 train_time:195818ms step_avg:85.10ms +[2025-08-22 22:02:02] [Rank 0] step:2321/10000 train_time:197575ms step_avg:85.12ms +[2025-08-22 22:02:02] [Rank 0] step:2321/10000 train_time:197575ms step_avg:85.12ms +[2025-08-22 22:02:04] [Rank 0] step:2341/10000 train_time:199335ms step_avg:85.15ms +[2025-08-22 22:02:04] [Rank 0] step:2341/10000 train_time:199335ms step_avg:85.15ms +[2025-08-22 22:02:05] [Rank 0] step:2361/10000 train_time:201092ms step_avg:85.17ms +[2025-08-22 22:02:05] [Rank 0] step:2361/10000 train_time:201092ms step_avg:85.17ms +[2025-08-22 22:02:07] [Rank 0] step:2381/10000 train_time:202852ms step_avg:85.20ms +[2025-08-22 22:02:07] [Rank 0] step:2381/10000 train_time:202852ms step_avg:85.20ms +[2025-08-22 22:02:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:02:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:02:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.3137 svd_entropy: attn_qk:H=0.6467,top10E=0.42,eRank=83.3,q75/q25=71.58 attn_vo:H=0.5280,top10E=0.60,eRank=48.7,q75/q25=65.14 mlp_w1:H=0.7388,top10E=0.33,eRank=156.2,q75/q25=8.08 mlp_w2:H=0.9421,top10E=0.07,eRank=533.4,q75/q25=4.71 vo_prod:H=0.4168,top10E=0.77,eRank=23.4,q75/q25=2791.21 train_time:204787ms step_avg:85.33ms +[2025-08-22 22:02:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.3137 svd_entropy: attn_qk:H=0.6467,top10E=0.42,eRank=83.3,q75/q25=71.58 attn_vo:H=0.5280,top10E=0.60,eRank=48.7,q75/q25=65.14 mlp_w1:H=0.7388,top10E=0.33,eRank=156.2,q75/q25=8.08 mlp_w2:H=0.9421,top10E=0.07,eRank=533.4,q75/q25=4.71 vo_prod:H=0.4168,top10E=0.77,eRank=23.4,q75/q25=2791.21 train_time:204787ms step_avg:85.33ms +[2025-08-22 22:02:23] [Rank 0] step:2401/10000 train_time:204800ms step_avg:85.30ms +[2025-08-22 22:02:23] [Rank 0] step:2401/10000 train_time:204800ms step_avg:85.30ms +[2025-08-22 22:02:24] [Rank 0] step:2421/10000 train_time:206396ms step_avg:85.25ms +[2025-08-22 22:02:24] [Rank 0] step:2421/10000 train_time:206396ms step_avg:85.25ms +[2025-08-22 22:02:26] [Rank 0] step:2441/10000 train_time:208143ms step_avg:85.27ms +[2025-08-22 22:02:26] [Rank 0] step:2441/10000 train_time:208143ms step_avg:85.27ms +[2025-08-22 22:02:28] [Rank 0] step:2461/10000 train_time:209894ms step_avg:85.29ms +[2025-08-22 22:02:28] [Rank 0] step:2461/10000 train_time:209894ms step_avg:85.29ms +[2025-08-22 22:02:30] [Rank 0] step:2481/10000 train_time:211644ms step_avg:85.31ms +[2025-08-22 22:02:30] [Rank 0] step:2481/10000 train_time:211644ms step_avg:85.31ms +[2025-08-22 22:02:31] [Rank 0] step:2501/10000 train_time:213395ms step_avg:85.32ms +[2025-08-22 22:02:31] [Rank 0] step:2501/10000 train_time:213395ms step_avg:85.32ms +[2025-08-22 22:02:33] [Rank 0] step:2521/10000 train_time:215149ms step_avg:85.34ms +[2025-08-22 22:02:33] [Rank 0] step:2521/10000 train_time:215149ms step_avg:85.34ms +[2025-08-22 22:02:35] [Rank 0] step:2541/10000 train_time:216903ms step_avg:85.36ms +[2025-08-22 22:02:35] [Rank 0] step:2541/10000 train_time:216903ms step_avg:85.36ms +[2025-08-22 22:02:37] [Rank 0] step:2561/10000 train_time:218659ms step_avg:85.38ms +[2025-08-22 22:02:37] [Rank 0] step:2561/10000 train_time:218659ms step_avg:85.38ms +[2025-08-22 22:02:38] [Rank 0] step:2581/10000 train_time:220414ms step_avg:85.40ms +[2025-08-22 22:02:38] [Rank 0] step:2581/10000 train_time:220414ms step_avg:85.40ms +[2025-08-22 22:02:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:02:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:02:54] [Rank 0] PRINT: step:2600/10000 val_loss:4.2728 svd_entropy: attn_qk:H=0.6503,top10E=0.41,eRank=85.6,q75/q25=72.10 attn_vo:H=0.5357,top10E=0.59,eRank=51.3,q75/q25=67.71 mlp_w1:H=0.7457,top10E=0.32,eRank=162.2,q75/q25=8.03 mlp_w2:H=0.9428,top10E=0.07,eRank=535.9,q75/q25=4.60 vo_prod:H=0.4251,top10E=0.76,eRank=24.8,q75/q25=3225.99 train_time:222344ms step_avg:85.52ms +[2025-08-22 22:02:54] [Rank 0] PRINT: step:2600/10000 val_loss:4.2728 svd_entropy: attn_qk:H=0.6503,top10E=0.41,eRank=85.6,q75/q25=72.10 attn_vo:H=0.5357,top10E=0.59,eRank=51.3,q75/q25=67.71 mlp_w1:H=0.7457,top10E=0.32,eRank=162.2,q75/q25=8.03 mlp_w2:H=0.9428,top10E=0.07,eRank=535.9,q75/q25=4.60 vo_prod:H=0.4251,top10E=0.76,eRank=24.8,q75/q25=3225.99 train_time:222344ms step_avg:85.52ms +[2025-08-22 22:02:54] [Rank 0] step:2601/10000 train_time:222357ms step_avg:85.49ms +[2025-08-22 22:02:54] [Rank 0] step:2601/10000 train_time:222357ms step_avg:85.49ms +[2025-08-22 22:02:56] [Rank 0] step:2621/10000 train_time:223939ms step_avg:85.44ms +[2025-08-22 22:02:56] [Rank 0] step:2621/10000 train_time:223939ms step_avg:85.44ms +[2025-08-22 22:02:57] [Rank 0] step:2641/10000 train_time:225684ms step_avg:85.45ms +[2025-08-22 22:02:57] [Rank 0] step:2641/10000 train_time:225684ms step_avg:85.45ms +[2025-08-22 22:02:59] [Rank 0] step:2661/10000 train_time:227433ms step_avg:85.47ms +[2025-08-22 22:02:59] [Rank 0] step:2661/10000 train_time:227433ms step_avg:85.47ms +[2025-08-22 22:03:01] [Rank 0] step:2681/10000 train_time:229182ms step_avg:85.48ms +[2025-08-22 22:03:01] [Rank 0] step:2681/10000 train_time:229182ms step_avg:85.48ms +[2025-08-22 22:03:03] [Rank 0] step:2701/10000 train_time:230932ms step_avg:85.50ms +[2025-08-22 22:03:03] [Rank 0] step:2701/10000 train_time:230932ms step_avg:85.50ms +[2025-08-22 22:03:04] [Rank 0] step:2721/10000 train_time:232683ms step_avg:85.51ms +[2025-08-22 22:03:04] [Rank 0] step:2721/10000 train_time:232683ms step_avg:85.51ms +[2025-08-22 22:03:06] [Rank 0] step:2741/10000 train_time:234436ms step_avg:85.53ms +[2025-08-22 22:03:06] [Rank 0] step:2741/10000 train_time:234436ms step_avg:85.53ms +[2025-08-22 22:03:08] [Rank 0] step:2761/10000 train_time:236189ms step_avg:85.54ms +[2025-08-22 22:03:08] [Rank 0] step:2761/10000 train_time:236189ms step_avg:85.54ms +[2025-08-22 22:03:10] [Rank 0] step:2781/10000 train_time:237941ms step_avg:85.56ms +[2025-08-22 22:03:10] [Rank 0] step:2781/10000 train_time:237941ms step_avg:85.56ms +[2025-08-22 22:03:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:03:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:03:25] [Rank 0] PRINT: step:2800/10000 val_loss:4.2410 svd_entropy: attn_qk:H=0.6540,top10E=0.41,eRank=87.9,q75/q25=72.43 attn_vo:H=0.5422,top10E=0.58,eRank=53.7,q75/q25=70.92 mlp_w1:H=0.7500,top10E=0.31,eRank=167.4,q75/q25=7.94 mlp_w2:H=0.9434,top10E=0.07,eRank=538.1,q75/q25=4.54 vo_prod:H=0.4320,top10E=0.75,eRank=26.0,q75/q25=3748.76 train_time:239872ms step_avg:85.67ms +[2025-08-22 22:03:25] [Rank 0] PRINT: step:2800/10000 val_loss:4.2410 svd_entropy: attn_qk:H=0.6540,top10E=0.41,eRank=87.9,q75/q25=72.43 attn_vo:H=0.5422,top10E=0.58,eRank=53.7,q75/q25=70.92 mlp_w1:H=0.7500,top10E=0.31,eRank=167.4,q75/q25=7.94 mlp_w2:H=0.9434,top10E=0.07,eRank=538.1,q75/q25=4.54 vo_prod:H=0.4320,top10E=0.75,eRank=26.0,q75/q25=3748.76 train_time:239872ms step_avg:85.67ms +[2025-08-22 22:03:25] [Rank 0] step:2801/10000 train_time:239885ms step_avg:85.64ms +[2025-08-22 22:03:25] [Rank 0] step:2801/10000 train_time:239885ms step_avg:85.64ms +[2025-08-22 22:03:27] [Rank 0] step:2821/10000 train_time:241463ms step_avg:85.59ms +[2025-08-22 22:03:27] [Rank 0] step:2821/10000 train_time:241463ms step_avg:85.59ms +[2025-08-22 22:03:29] [Rank 0] step:2841/10000 train_time:243212ms step_avg:85.61ms +[2025-08-22 22:03:29] [Rank 0] step:2841/10000 train_time:243212ms step_avg:85.61ms +[2025-08-22 22:03:30] [Rank 0] step:2861/10000 train_time:244963ms step_avg:85.62ms +[2025-08-22 22:03:30] [Rank 0] step:2861/10000 train_time:244963ms step_avg:85.62ms +[2025-08-22 22:03:32] [Rank 0] step:2881/10000 train_time:246713ms step_avg:85.63ms +[2025-08-22 22:03:32] [Rank 0] step:2881/10000 train_time:246713ms step_avg:85.63ms +[2025-08-22 22:03:34] [Rank 0] step:2901/10000 train_time:248463ms step_avg:85.65ms +[2025-08-22 22:03:34] [Rank 0] step:2901/10000 train_time:248463ms step_avg:85.65ms +[2025-08-22 22:03:36] [Rank 0] step:2921/10000 train_time:250216ms step_avg:85.66ms +[2025-08-22 22:03:36] [Rank 0] step:2921/10000 train_time:250216ms step_avg:85.66ms +[2025-08-22 22:03:37] [Rank 0] step:2941/10000 train_time:251970ms step_avg:85.67ms +[2025-08-22 22:03:37] [Rank 0] step:2941/10000 train_time:251970ms step_avg:85.67ms +[2025-08-22 22:03:39] [Rank 0] step:2961/10000 train_time:253724ms step_avg:85.69ms +[2025-08-22 22:03:39] [Rank 0] step:2961/10000 train_time:253724ms step_avg:85.69ms +[2025-08-22 22:03:41] [Rank 0] step:2981/10000 train_time:255484ms step_avg:85.70ms +[2025-08-22 22:03:41] [Rank 0] step:2981/10000 train_time:255484ms step_avg:85.70ms +[2025-08-22 22:03:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:03:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:03:56] [Rank 0] PRINT: step:3000/10000 val_loss:4.1960 svd_entropy: attn_qk:H=0.6570,top10E=0.40,eRank=89.9,q75/q25=72.97 attn_vo:H=0.5480,top10E=0.56,eRank=56.0,q75/q25=73.90 mlp_w1:H=0.7523,top10E=0.31,eRank=171.6,q75/q25=7.85 mlp_w2:H=0.9437,top10E=0.07,eRank=539.6,q75/q25=4.49 vo_prod:H=0.4375,top10E=0.74,eRank=27.2,q75/q25=4311.23 train_time:257423ms step_avg:85.81ms +[2025-08-22 22:03:56] [Rank 0] PRINT: step:3000/10000 val_loss:4.1960 svd_entropy: attn_qk:H=0.6570,top10E=0.40,eRank=89.9,q75/q25=72.97 attn_vo:H=0.5480,top10E=0.56,eRank=56.0,q75/q25=73.90 mlp_w1:H=0.7523,top10E=0.31,eRank=171.6,q75/q25=7.85 mlp_w2:H=0.9437,top10E=0.07,eRank=539.6,q75/q25=4.49 vo_prod:H=0.4375,top10E=0.74,eRank=27.2,q75/q25=4311.23 train_time:257423ms step_avg:85.81ms +[2025-08-22 22:03:56] [Rank 0] step:3001/10000 train_time:257435ms step_avg:85.78ms +[2025-08-22 22:03:56] [Rank 0] step:3001/10000 train_time:257435ms step_avg:85.78ms +[2025-08-22 22:03:58] [Rank 0] step:3021/10000 train_time:259041ms step_avg:85.75ms +[2025-08-22 22:03:58] [Rank 0] step:3021/10000 train_time:259041ms step_avg:85.75ms +[2025-08-22 22:04:00] [Rank 0] step:3041/10000 train_time:260799ms step_avg:85.76ms +[2025-08-22 22:04:00] [Rank 0] step:3041/10000 train_time:260799ms step_avg:85.76ms +[2025-08-22 22:04:02] [Rank 0] step:3061/10000 train_time:262559ms step_avg:85.78ms +[2025-08-22 22:04:02] [Rank 0] step:3061/10000 train_time:262559ms step_avg:85.78ms +[2025-08-22 22:04:03] [Rank 0] step:3081/10000 train_time:264323ms step_avg:85.79ms +[2025-08-22 22:04:03] [Rank 0] step:3081/10000 train_time:264323ms step_avg:85.79ms +[2025-08-22 22:04:05] [Rank 0] step:3101/10000 train_time:266087ms step_avg:85.81ms +[2025-08-22 22:04:05] [Rank 0] step:3101/10000 train_time:266087ms step_avg:85.81ms +[2025-08-22 22:04:07] [Rank 0] step:3121/10000 train_time:267852ms step_avg:85.82ms +[2025-08-22 22:04:07] [Rank 0] step:3121/10000 train_time:267852ms step_avg:85.82ms +[2025-08-22 22:04:09] [Rank 0] step:3141/10000 train_time:269615ms step_avg:85.84ms +[2025-08-22 22:04:09] [Rank 0] step:3141/10000 train_time:269615ms step_avg:85.84ms +[2025-08-22 22:04:10] [Rank 0] step:3161/10000 train_time:271381ms step_avg:85.85ms +[2025-08-22 22:04:10] [Rank 0] step:3161/10000 train_time:271381ms step_avg:85.85ms +[2025-08-22 22:04:12] [Rank 0] step:3181/10000 train_time:273148ms step_avg:85.87ms +[2025-08-22 22:04:12] [Rank 0] step:3181/10000 train_time:273148ms step_avg:85.87ms +[2025-08-22 22:04:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:04:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:04:27] [Rank 0] PRINT: step:3200/10000 val_loss:4.1699 svd_entropy: attn_qk:H=0.6597,top10E=0.40,eRank=91.7,q75/q25=73.60 attn_vo:H=0.5536,top10E=0.55,eRank=58.2,q75/q25=76.05 mlp_w1:H=0.7541,top10E=0.31,eRank=175.5,q75/q25=7.79 mlp_w2:H=0.9439,top10E=0.07,eRank=540.6,q75/q25=4.47 vo_prod:H=0.4436,top10E=0.73,eRank=28.4,q75/q25=4813.57 train_time:275093ms step_avg:85.97ms +[2025-08-22 22:04:27] [Rank 0] PRINT: step:3200/10000 val_loss:4.1699 svd_entropy: attn_qk:H=0.6597,top10E=0.40,eRank=91.7,q75/q25=73.60 attn_vo:H=0.5536,top10E=0.55,eRank=58.2,q75/q25=76.05 mlp_w1:H=0.7541,top10E=0.31,eRank=175.5,q75/q25=7.79 mlp_w2:H=0.9439,top10E=0.07,eRank=540.6,q75/q25=4.47 vo_prod:H=0.4436,top10E=0.73,eRank=28.4,q75/q25=4813.57 train_time:275093ms step_avg:85.97ms +[2025-08-22 22:04:28] [Rank 0] step:3201/10000 train_time:275105ms step_avg:85.94ms +[2025-08-22 22:04:28] [Rank 0] step:3201/10000 train_time:275105ms step_avg:85.94ms +[2025-08-22 22:04:29] [Rank 0] step:3221/10000 train_time:276688ms step_avg:85.90ms +[2025-08-22 22:04:29] [Rank 0] step:3221/10000 train_time:276688ms step_avg:85.90ms +[2025-08-22 22:04:31] [Rank 0] step:3241/10000 train_time:278449ms step_avg:85.91ms +[2025-08-22 22:04:31] [Rank 0] step:3241/10000 train_time:278449ms step_avg:85.91ms +[2025-08-22 22:04:33] [Rank 0] step:3261/10000 train_time:280210ms step_avg:85.93ms +[2025-08-22 22:04:33] [Rank 0] step:3261/10000 train_time:280210ms step_avg:85.93ms +[2025-08-22 22:04:35] [Rank 0] step:3281/10000 train_time:281973ms step_avg:85.94ms +[2025-08-22 22:04:35] [Rank 0] step:3281/10000 train_time:281973ms step_avg:85.94ms +[2025-08-22 22:04:36] [Rank 0] step:3301/10000 train_time:283733ms step_avg:85.95ms +[2025-08-22 22:04:36] [Rank 0] step:3301/10000 train_time:283733ms step_avg:85.95ms +[2025-08-22 22:04:38] [Rank 0] step:3321/10000 train_time:285497ms step_avg:85.97ms +[2025-08-22 22:04:38] [Rank 0] step:3321/10000 train_time:285497ms step_avg:85.97ms +[2025-08-22 22:04:40] [Rank 0] step:3341/10000 train_time:287260ms step_avg:85.98ms +[2025-08-22 22:04:40] [Rank 0] step:3341/10000 train_time:287260ms step_avg:85.98ms +[2025-08-22 22:04:42] [Rank 0] step:3361/10000 train_time:289023ms step_avg:85.99ms +[2025-08-22 22:04:42] [Rank 0] step:3361/10000 train_time:289023ms step_avg:85.99ms +[2025-08-22 22:04:43] [Rank 0] step:3381/10000 train_time:290787ms step_avg:86.01ms +[2025-08-22 22:04:43] [Rank 0] step:3381/10000 train_time:290787ms step_avg:86.01ms +[2025-08-22 22:04:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:04:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:04:59] [Rank 0] PRINT: step:3400/10000 val_loss:4.1395 svd_entropy: attn_qk:H=0.6622,top10E=0.40,eRank=93.5,q75/q25=73.75 attn_vo:H=0.5591,top10E=0.54,eRank=60.3,q75/q25=78.15 mlp_w1:H=0.7559,top10E=0.31,eRank=179.4,q75/q25=7.72 mlp_w2:H=0.9441,top10E=0.07,eRank=541.8,q75/q25=4.41 vo_prod:H=0.4503,top10E=0.72,eRank=29.6,q75/q25=5223.39 train_time:292726ms step_avg:86.10ms +[2025-08-22 22:04:59] [Rank 0] PRINT: step:3400/10000 val_loss:4.1395 svd_entropy: attn_qk:H=0.6622,top10E=0.40,eRank=93.5,q75/q25=73.75 attn_vo:H=0.5591,top10E=0.54,eRank=60.3,q75/q25=78.15 mlp_w1:H=0.7559,top10E=0.31,eRank=179.4,q75/q25=7.72 mlp_w2:H=0.9441,top10E=0.07,eRank=541.8,q75/q25=4.41 vo_prod:H=0.4503,top10E=0.72,eRank=29.6,q75/q25=5223.39 train_time:292726ms step_avg:86.10ms +[2025-08-22 22:04:59] [Rank 0] step:3401/10000 train_time:292740ms step_avg:86.07ms +[2025-08-22 22:04:59] [Rank 0] step:3401/10000 train_time:292740ms step_avg:86.07ms +[2025-08-22 22:05:01] [Rank 0] step:3421/10000 train_time:294326ms step_avg:86.04ms +[2025-08-22 22:05:01] [Rank 0] step:3421/10000 train_time:294326ms step_avg:86.04ms +[2025-08-22 22:05:02] [Rank 0] step:3441/10000 train_time:296081ms step_avg:86.04ms +[2025-08-22 22:05:02] [Rank 0] step:3441/10000 train_time:296081ms step_avg:86.04ms +[2025-08-22 22:05:04] [Rank 0] step:3461/10000 train_time:297838ms step_avg:86.06ms +[2025-08-22 22:05:04] [Rank 0] step:3461/10000 train_time:297838ms step_avg:86.06ms +[2025-08-22 22:05:06] [Rank 0] step:3481/10000 train_time:299596ms step_avg:86.07ms +[2025-08-22 22:05:06] [Rank 0] step:3481/10000 train_time:299596ms step_avg:86.07ms +[2025-08-22 22:05:08] [Rank 0] step:3501/10000 train_time:301361ms step_avg:86.08ms +[2025-08-22 22:05:08] [Rank 0] step:3501/10000 train_time:301361ms step_avg:86.08ms +[2025-08-22 22:05:09] [Rank 0] step:3521/10000 train_time:303123ms step_avg:86.09ms +[2025-08-22 22:05:09] [Rank 0] step:3521/10000 train_time:303123ms step_avg:86.09ms +[2025-08-22 22:05:11] [Rank 0] step:3541/10000 train_time:304882ms step_avg:86.10ms +[2025-08-22 22:05:11] [Rank 0] step:3541/10000 train_time:304882ms step_avg:86.10ms +[2025-08-22 22:05:13] [Rank 0] step:3561/10000 train_time:306645ms step_avg:86.11ms +[2025-08-22 22:05:13] [Rank 0] step:3561/10000 train_time:306645ms step_avg:86.11ms +[2025-08-22 22:05:15] [Rank 0] step:3581/10000 train_time:308409ms step_avg:86.12ms +[2025-08-22 22:05:15] [Rank 0] step:3581/10000 train_time:308409ms step_avg:86.12ms +[2025-08-22 22:05:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:05:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:05:30] [Rank 0] PRINT: step:3600/10000 val_loss:4.1309 svd_entropy: attn_qk:H=0.6646,top10E=0.39,eRank=95.2,q75/q25=74.43 attn_vo:H=0.5638,top10E=0.53,eRank=62.3,q75/q25=80.45 mlp_w1:H=0.7569,top10E=0.31,eRank=182.8,q75/q25=7.64 mlp_w2:H=0.9443,top10E=0.07,eRank=542.6,q75/q25=4.40 vo_prod:H=0.4554,top10E=0.71,eRank=30.5,q75/q25=5617.76 train_time:310351ms step_avg:86.21ms +[2025-08-22 22:05:30] [Rank 0] PRINT: step:3600/10000 val_loss:4.1309 svd_entropy: attn_qk:H=0.6646,top10E=0.39,eRank=95.2,q75/q25=74.43 attn_vo:H=0.5638,top10E=0.53,eRank=62.3,q75/q25=80.45 mlp_w1:H=0.7569,top10E=0.31,eRank=182.8,q75/q25=7.64 mlp_w2:H=0.9443,top10E=0.07,eRank=542.6,q75/q25=4.40 vo_prod:H=0.4554,top10E=0.71,eRank=30.5,q75/q25=5617.76 train_time:310351ms step_avg:86.21ms +[2025-08-22 22:05:30] [Rank 0] step:3601/10000 train_time:310366ms step_avg:86.19ms +[2025-08-22 22:05:30] [Rank 0] step:3601/10000 train_time:310366ms step_avg:86.19ms +[2025-08-22 22:05:32] [Rank 0] step:3621/10000 train_time:311957ms step_avg:86.15ms +[2025-08-22 22:05:32] [Rank 0] step:3621/10000 train_time:311957ms step_avg:86.15ms +[2025-08-22 22:05:34] [Rank 0] step:3641/10000 train_time:313718ms step_avg:86.16ms +[2025-08-22 22:05:34] [Rank 0] step:3641/10000 train_time:313718ms step_avg:86.16ms +[2025-08-22 22:05:35] [Rank 0] step:3661/10000 train_time:315478ms step_avg:86.17ms +[2025-08-22 22:05:35] [Rank 0] step:3661/10000 train_time:315478ms step_avg:86.17ms +[2025-08-22 22:05:37] [Rank 0] step:3681/10000 train_time:317242ms step_avg:86.18ms +[2025-08-22 22:05:37] [Rank 0] step:3681/10000 train_time:317242ms step_avg:86.18ms +[2025-08-22 22:05:39] [Rank 0] step:3701/10000 train_time:319006ms step_avg:86.19ms +[2025-08-22 22:05:39] [Rank 0] step:3701/10000 train_time:319006ms step_avg:86.19ms +[2025-08-22 22:05:41] [Rank 0] step:3721/10000 train_time:320798ms step_avg:86.21ms +[2025-08-22 22:05:41] [Rank 0] step:3721/10000 train_time:320798ms step_avg:86.21ms +[2025-08-22 22:05:43] [Rank 0] step:3741/10000 train_time:322599ms step_avg:86.23ms +[2025-08-22 22:05:43] [Rank 0] step:3741/10000 train_time:322599ms step_avg:86.23ms +[2025-08-22 22:05:44] [Rank 0] step:3761/10000 train_time:324401ms step_avg:86.25ms +[2025-08-22 22:05:44] [Rank 0] step:3761/10000 train_time:324401ms step_avg:86.25ms +[2025-08-22 22:05:46] [Rank 0] step:3781/10000 train_time:326205ms step_avg:86.27ms +[2025-08-22 22:05:46] [Rank 0] step:3781/10000 train_time:326205ms step_avg:86.27ms +[2025-08-22 22:05:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:05:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:06:02] [Rank 0] PRINT: step:3800/10000 val_loss:4.0810 svd_entropy: attn_qk:H=0.6664,top10E=0.39,eRank=96.6,q75/q25=74.06 attn_vo:H=0.5678,top10E=0.52,eRank=64.0,q75/q25=82.13 mlp_w1:H=0.7588,top10E=0.30,eRank=186.3,q75/q25=7.59 mlp_w2:H=0.9444,top10E=0.06,eRank=543.3,q75/q25=4.35 vo_prod:H=0.4596,top10E=0.71,eRank=31.4,q75/q25=6021.30 train_time:328189ms step_avg:86.37ms +[2025-08-22 22:06:02] [Rank 0] PRINT: step:3800/10000 val_loss:4.0810 svd_entropy: attn_qk:H=0.6664,top10E=0.39,eRank=96.6,q75/q25=74.06 attn_vo:H=0.5678,top10E=0.52,eRank=64.0,q75/q25=82.13 mlp_w1:H=0.7588,top10E=0.30,eRank=186.3,q75/q25=7.59 mlp_w2:H=0.9444,top10E=0.06,eRank=543.3,q75/q25=4.35 vo_prod:H=0.4596,top10E=0.71,eRank=31.4,q75/q25=6021.30 train_time:328189ms step_avg:86.37ms +[2025-08-22 22:06:02] [Rank 0] step:3801/10000 train_time:328204ms step_avg:86.35ms +[2025-08-22 22:06:02] [Rank 0] step:3801/10000 train_time:328204ms step_avg:86.35ms +[2025-08-22 22:06:04] [Rank 0] step:3821/10000 train_time:329831ms step_avg:86.32ms +[2025-08-22 22:06:04] [Rank 0] step:3821/10000 train_time:329831ms step_avg:86.32ms +[2025-08-22 22:06:05] [Rank 0] step:3841/10000 train_time:331630ms step_avg:86.34ms +[2025-08-22 22:06:05] [Rank 0] step:3841/10000 train_time:331630ms step_avg:86.34ms +[2025-08-22 22:06:07] [Rank 0] step:3861/10000 train_time:333425ms step_avg:86.36ms +[2025-08-22 22:06:07] [Rank 0] step:3861/10000 train_time:333425ms step_avg:86.36ms +[2025-08-22 22:06:09] [Rank 0] step:3881/10000 train_time:335219ms step_avg:86.37ms +[2025-08-22 22:06:09] [Rank 0] step:3881/10000 train_time:335219ms step_avg:86.37ms +[2025-08-22 22:06:11] [Rank 0] step:3901/10000 train_time:337013ms step_avg:86.39ms +[2025-08-22 22:06:11] [Rank 0] step:3901/10000 train_time:337013ms step_avg:86.39ms +[2025-08-22 22:06:13] [Rank 0] step:3921/10000 train_time:338808ms step_avg:86.41ms +[2025-08-22 22:06:13] [Rank 0] step:3921/10000 train_time:338808ms step_avg:86.41ms +[2025-08-22 22:06:14] [Rank 0] step:3941/10000 train_time:340603ms step_avg:86.43ms +[2025-08-22 22:06:14] [Rank 0] step:3941/10000 train_time:340603ms step_avg:86.43ms +[2025-08-22 22:06:16] [Rank 0] step:3961/10000 train_time:342399ms step_avg:86.44ms +[2025-08-22 22:06:16] [Rank 0] step:3961/10000 train_time:342399ms step_avg:86.44ms +[2025-08-22 22:06:18] [Rank 0] step:3981/10000 train_time:344198ms step_avg:86.46ms +[2025-08-22 22:06:18] [Rank 0] step:3981/10000 train_time:344198ms step_avg:86.46ms +[2025-08-22 22:06:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:06:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:06:34] [Rank 0] PRINT: step:4000/10000 val_loss:4.0597 svd_entropy: attn_qk:H=0.6687,top10E=0.39,eRank=98.3,q75/q25=73.98 attn_vo:H=0.5721,top10E=0.52,eRank=65.8,q75/q25=83.07 mlp_w1:H=0.7603,top10E=0.30,eRank=189.7,q75/q25=7.54 mlp_w2:H=0.9445,top10E=0.06,eRank=544.0,q75/q25=4.33 vo_prod:H=0.4651,top10E=0.70,eRank=32.5,q75/q25=6290.20 train_time:346176ms step_avg:86.54ms +[2025-08-22 22:06:34] [Rank 0] PRINT: step:4000/10000 val_loss:4.0597 svd_entropy: attn_qk:H=0.6687,top10E=0.39,eRank=98.3,q75/q25=73.98 attn_vo:H=0.5721,top10E=0.52,eRank=65.8,q75/q25=83.07 mlp_w1:H=0.7603,top10E=0.30,eRank=189.7,q75/q25=7.54 mlp_w2:H=0.9445,top10E=0.06,eRank=544.0,q75/q25=4.33 vo_prod:H=0.4651,top10E=0.70,eRank=32.5,q75/q25=6290.20 train_time:346176ms step_avg:86.54ms +[2025-08-22 22:06:34] [Rank 0] step:4001/10000 train_time:346189ms step_avg:86.53ms +[2025-08-22 22:06:34] [Rank 0] step:4001/10000 train_time:346189ms step_avg:86.53ms +[2025-08-22 22:06:35] [Rank 0] step:4021/10000 train_time:347813ms step_avg:86.50ms +[2025-08-22 22:06:35] [Rank 0] step:4021/10000 train_time:347813ms step_avg:86.50ms +[2025-08-22 22:06:37] [Rank 0] step:4041/10000 train_time:349605ms step_avg:86.51ms +[2025-08-22 22:06:37] [Rank 0] step:4041/10000 train_time:349605ms step_avg:86.51ms +[2025-08-22 22:06:39] [Rank 0] step:4061/10000 train_time:351396ms step_avg:86.53ms +[2025-08-22 22:06:39] [Rank 0] step:4061/10000 train_time:351396ms step_avg:86.53ms +[2025-08-22 22:06:41] [Rank 0] step:4081/10000 train_time:353682ms step_avg:86.67ms +[2025-08-22 22:06:41] [Rank 0] step:4081/10000 train_time:353682ms step_avg:86.67ms +[2025-08-22 22:06:43] [Rank 0] step:4101/10000 train_time:355478ms step_avg:86.68ms +[2025-08-22 22:06:43] [Rank 0] step:4101/10000 train_time:355478ms step_avg:86.68ms +[2025-08-22 22:06:45] [Rank 0] step:4121/10000 train_time:357275ms step_avg:86.70ms +[2025-08-22 22:06:45] [Rank 0] step:4121/10000 train_time:357275ms step_avg:86.70ms +[2025-08-22 22:06:47] [Rank 0] step:4141/10000 train_time:359071ms step_avg:86.71ms +[2025-08-22 22:06:47] [Rank 0] step:4141/10000 train_time:359071ms step_avg:86.71ms +[2025-08-22 22:06:49] [Rank 0] step:4161/10000 train_time:360866ms step_avg:86.73ms +[2025-08-22 22:06:49] [Rank 0] step:4161/10000 train_time:360866ms step_avg:86.73ms +[2025-08-22 22:06:50] [Rank 0] step:4181/10000 train_time:362665ms step_avg:86.74ms +[2025-08-22 22:06:50] [Rank 0] step:4181/10000 train_time:362665ms step_avg:86.74ms +[2025-08-22 22:06:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:06:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:07:06] [Rank 0] PRINT: step:4200/10000 val_loss:4.0461 svd_entropy: attn_qk:H=0.6709,top10E=0.38,eRank=99.8,q75/q25=74.13 attn_vo:H=0.5762,top10E=0.51,eRank=67.6,q75/q25=84.29 mlp_w1:H=0.7618,top10E=0.30,eRank=193.0,q75/q25=7.46 mlp_w2:H=0.9446,top10E=0.06,eRank=544.5,q75/q25=4.31 vo_prod:H=0.4694,top10E=0.69,eRank=33.4,q75/q25=6436.08 train_time:364641ms step_avg:86.82ms +[2025-08-22 22:07:06] [Rank 0] PRINT: step:4200/10000 val_loss:4.0461 svd_entropy: attn_qk:H=0.6709,top10E=0.38,eRank=99.8,q75/q25=74.13 attn_vo:H=0.5762,top10E=0.51,eRank=67.6,q75/q25=84.29 mlp_w1:H=0.7618,top10E=0.30,eRank=193.0,q75/q25=7.46 mlp_w2:H=0.9446,top10E=0.06,eRank=544.5,q75/q25=4.31 vo_prod:H=0.4694,top10E=0.69,eRank=33.4,q75/q25=6436.08 train_time:364641ms step_avg:86.82ms +[2025-08-22 22:07:06] [Rank 0] step:4201/10000 train_time:364655ms step_avg:86.80ms +[2025-08-22 22:07:06] [Rank 0] step:4201/10000 train_time:364655ms step_avg:86.80ms +[2025-08-22 22:07:08] [Rank 0] step:4221/10000 train_time:366287ms step_avg:86.78ms +[2025-08-22 22:07:08] [Rank 0] step:4221/10000 train_time:366287ms step_avg:86.78ms +[2025-08-22 22:07:09] [Rank 0] step:4241/10000 train_time:368082ms step_avg:86.79ms +[2025-08-22 22:07:09] [Rank 0] step:4241/10000 train_time:368082ms step_avg:86.79ms +[2025-08-22 22:07:11] [Rank 0] step:4261/10000 train_time:369871ms step_avg:86.80ms +[2025-08-22 22:07:11] [Rank 0] step:4261/10000 train_time:369871ms step_avg:86.80ms +[2025-08-22 22:07:13] [Rank 0] step:4281/10000 train_time:371664ms step_avg:86.82ms +[2025-08-22 22:07:13] [Rank 0] step:4281/10000 train_time:371664ms step_avg:86.82ms +[2025-08-22 22:07:15] [Rank 0] step:4301/10000 train_time:373457ms step_avg:86.83ms +[2025-08-22 22:07:15] [Rank 0] step:4301/10000 train_time:373457ms step_avg:86.83ms +[2025-08-22 22:07:17] [Rank 0] step:4321/10000 train_time:375253ms step_avg:86.84ms +[2025-08-22 22:07:17] [Rank 0] step:4321/10000 train_time:375253ms step_avg:86.84ms +[2025-08-22 22:07:18] [Rank 0] step:4341/10000 train_time:377047ms step_avg:86.86ms +[2025-08-22 22:07:18] [Rank 0] step:4341/10000 train_time:377047ms step_avg:86.86ms +[2025-08-22 22:07:20] [Rank 0] step:4361/10000 train_time:378844ms step_avg:86.87ms +[2025-08-22 22:07:20] [Rank 0] step:4361/10000 train_time:378844ms step_avg:86.87ms +[2025-08-22 22:07:22] [Rank 0] step:4381/10000 train_time:380640ms step_avg:86.88ms +[2025-08-22 22:07:22] [Rank 0] step:4381/10000 train_time:380640ms step_avg:86.88ms +[2025-08-22 22:07:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:07:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:07:38] [Rank 0] PRINT: step:4400/10000 val_loss:4.0327 svd_entropy: attn_qk:H=0.6724,top10E=0.38,eRank=101.1,q75/q25=73.67 attn_vo:H=0.5800,top10E=0.50,eRank=69.3,q75/q25=84.88 mlp_w1:H=0.7632,top10E=0.30,eRank=196.1,q75/q25=7.38 mlp_w2:H=0.9447,top10E=0.06,eRank=545.1,q75/q25=4.31 vo_prod:H=0.4730,top10E=0.68,eRank=34.2,q75/q25=6693.11 train_time:382618ms step_avg:86.96ms +[2025-08-22 22:07:38] [Rank 0] PRINT: step:4400/10000 val_loss:4.0327 svd_entropy: attn_qk:H=0.6724,top10E=0.38,eRank=101.1,q75/q25=73.67 attn_vo:H=0.5800,top10E=0.50,eRank=69.3,q75/q25=84.88 mlp_w1:H=0.7632,top10E=0.30,eRank=196.1,q75/q25=7.38 mlp_w2:H=0.9447,top10E=0.06,eRank=545.1,q75/q25=4.31 vo_prod:H=0.4730,top10E=0.68,eRank=34.2,q75/q25=6693.11 train_time:382618ms step_avg:86.96ms +[2025-08-22 22:07:38] [Rank 0] step:4401/10000 train_time:382631ms step_avg:86.94ms +[2025-08-22 22:07:38] [Rank 0] step:4401/10000 train_time:382631ms step_avg:86.94ms +[2025-08-22 22:07:39] [Rank 0] step:4421/10000 train_time:384243ms step_avg:86.91ms +[2025-08-22 22:07:39] [Rank 0] step:4421/10000 train_time:384243ms step_avg:86.91ms +[2025-08-22 22:07:41] [Rank 0] step:4441/10000 train_time:386037ms step_avg:86.93ms +[2025-08-22 22:07:41] [Rank 0] step:4441/10000 train_time:386037ms step_avg:86.93ms +[2025-08-22 22:07:43] [Rank 0] step:4461/10000 train_time:387835ms step_avg:86.94ms +[2025-08-22 22:07:43] [Rank 0] step:4461/10000 train_time:387835ms step_avg:86.94ms +[2025-08-22 22:07:45] [Rank 0] step:4481/10000 train_time:389639ms step_avg:86.95ms +[2025-08-22 22:07:45] [Rank 0] step:4481/10000 train_time:389639ms step_avg:86.95ms +[2025-08-22 22:07:47] [Rank 0] step:4501/10000 train_time:391444ms step_avg:86.97ms +[2025-08-22 22:07:47] [Rank 0] step:4501/10000 train_time:391444ms step_avg:86.97ms +[2025-08-22 22:07:48] [Rank 0] step:4521/10000 train_time:393250ms step_avg:86.98ms +[2025-08-22 22:07:48] [Rank 0] step:4521/10000 train_time:393250ms step_avg:86.98ms +[2025-08-22 22:07:50] [Rank 0] step:4541/10000 train_time:395059ms step_avg:87.00ms +[2025-08-22 22:07:50] [Rank 0] step:4541/10000 train_time:395059ms step_avg:87.00ms +[2025-08-22 22:07:52] [Rank 0] step:4561/10000 train_time:396866ms step_avg:87.01ms +[2025-08-22 22:07:52] [Rank 0] step:4561/10000 train_time:396866ms step_avg:87.01ms +[2025-08-22 22:07:54] [Rank 0] step:4581/10000 train_time:398676ms step_avg:87.03ms +[2025-08-22 22:07:54] [Rank 0] step:4581/10000 train_time:398676ms step_avg:87.03ms +[2025-08-22 22:07:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:07:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:08:09] [Rank 0] PRINT: step:4600/10000 val_loss:4.0049 svd_entropy: attn_qk:H=0.6743,top10E=0.38,eRank=102.5,q75/q25=73.96 attn_vo:H=0.5836,top10E=0.49,eRank=70.9,q75/q25=86.40 mlp_w1:H=0.7649,top10E=0.29,eRank=199.3,q75/q25=7.33 mlp_w2:H=0.9448,top10E=0.06,eRank=545.6,q75/q25=4.29 vo_prod:H=0.4768,top10E=0.68,eRank=35.1,q75/q25=6920.09 train_time:400667ms step_avg:87.10ms +[2025-08-22 22:08:09] [Rank 0] PRINT: step:4600/10000 val_loss:4.0049 svd_entropy: attn_qk:H=0.6743,top10E=0.38,eRank=102.5,q75/q25=73.96 attn_vo:H=0.5836,top10E=0.49,eRank=70.9,q75/q25=86.40 mlp_w1:H=0.7649,top10E=0.29,eRank=199.3,q75/q25=7.33 mlp_w2:H=0.9448,top10E=0.06,eRank=545.6,q75/q25=4.29 vo_prod:H=0.4768,top10E=0.68,eRank=35.1,q75/q25=6920.09 train_time:400667ms step_avg:87.10ms +[2025-08-22 22:08:10] [Rank 0] step:4601/10000 train_time:400680ms step_avg:87.09ms +[2025-08-22 22:08:10] [Rank 0] step:4601/10000 train_time:400680ms step_avg:87.09ms +[2025-08-22 22:08:11] [Rank 0] step:4621/10000 train_time:402303ms step_avg:87.06ms +[2025-08-22 22:08:11] [Rank 0] step:4621/10000 train_time:402303ms step_avg:87.06ms +[2025-08-22 22:08:13] [Rank 0] step:4641/10000 train_time:404103ms step_avg:87.07ms +[2025-08-22 22:08:13] [Rank 0] step:4641/10000 train_time:404103ms step_avg:87.07ms +[2025-08-22 22:08:15] [Rank 0] step:4661/10000 train_time:405904ms step_avg:87.09ms +[2025-08-22 22:08:15] [Rank 0] step:4661/10000 train_time:405904ms step_avg:87.09ms +[2025-08-22 22:08:17] [Rank 0] step:4681/10000 train_time:407707ms step_avg:87.10ms +[2025-08-22 22:08:17] [Rank 0] step:4681/10000 train_time:407707ms step_avg:87.10ms +[2025-08-22 22:08:19] [Rank 0] step:4701/10000 train_time:409510ms step_avg:87.11ms +[2025-08-22 22:08:19] [Rank 0] step:4701/10000 train_time:409510ms step_avg:87.11ms +[2025-08-22 22:08:20] [Rank 0] step:4721/10000 train_time:411313ms step_avg:87.12ms +[2025-08-22 22:08:20] [Rank 0] step:4721/10000 train_time:411313ms step_avg:87.12ms +[2025-08-22 22:08:22] [Rank 0] step:4741/10000 train_time:413116ms step_avg:87.14ms +[2025-08-22 22:08:22] [Rank 0] step:4741/10000 train_time:413116ms step_avg:87.14ms +[2025-08-22 22:08:24] [Rank 0] step:4761/10000 train_time:414918ms step_avg:87.15ms +[2025-08-22 22:08:24] [Rank 0] step:4761/10000 train_time:414918ms step_avg:87.15ms +[2025-08-22 22:08:26] [Rank 0] step:4781/10000 train_time:416721ms step_avg:87.16ms +[2025-08-22 22:08:26] [Rank 0] step:4781/10000 train_time:416721ms step_avg:87.16ms +[2025-08-22 22:08:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:08:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:08:41] [Rank 0] PRINT: step:4800/10000 val_loss:3.9964 svd_entropy: attn_qk:H=0.6760,top10E=0.38,eRank=103.9,q75/q25=74.31 attn_vo:H=0.5870,top10E=0.49,eRank=72.5,q75/q25=87.61 mlp_w1:H=0.7674,top10E=0.29,eRank=202.5,q75/q25=7.26 mlp_w2:H=0.9449,top10E=0.06,eRank=546.2,q75/q25=4.26 vo_prod:H=0.4801,top10E=0.67,eRank=35.9,q75/q25=7180.85 train_time:418706ms step_avg:87.23ms +[2025-08-22 22:08:41] [Rank 0] PRINT: step:4800/10000 val_loss:3.9964 svd_entropy: attn_qk:H=0.6760,top10E=0.38,eRank=103.9,q75/q25=74.31 attn_vo:H=0.5870,top10E=0.49,eRank=72.5,q75/q25=87.61 mlp_w1:H=0.7674,top10E=0.29,eRank=202.5,q75/q25=7.26 mlp_w2:H=0.9449,top10E=0.06,eRank=546.2,q75/q25=4.26 vo_prod:H=0.4801,top10E=0.67,eRank=35.9,q75/q25=7180.85 train_time:418706ms step_avg:87.23ms +[2025-08-22 22:08:41] [Rank 0] step:4801/10000 train_time:418718ms step_avg:87.21ms +[2025-08-22 22:08:41] [Rank 0] step:4801/10000 train_time:418718ms step_avg:87.21ms +[2025-08-22 22:08:43] [Rank 0] step:4821/10000 train_time:420341ms step_avg:87.19ms +[2025-08-22 22:08:43] [Rank 0] step:4821/10000 train_time:420341ms step_avg:87.19ms +[2025-08-22 22:08:45] [Rank 0] step:4841/10000 train_time:422139ms step_avg:87.20ms +[2025-08-22 22:08:45] [Rank 0] step:4841/10000 train_time:422139ms step_avg:87.20ms +[2025-08-22 22:08:47] [Rank 0] step:4861/10000 train_time:423938ms step_avg:87.21ms +[2025-08-22 22:08:47] [Rank 0] step:4861/10000 train_time:423938ms step_avg:87.21ms +[2025-08-22 22:08:49] [Rank 0] step:4881/10000 train_time:425737ms step_avg:87.22ms +[2025-08-22 22:08:49] [Rank 0] step:4881/10000 train_time:425737ms step_avg:87.22ms +[2025-08-22 22:08:50] [Rank 0] step:4901/10000 train_time:427536ms step_avg:87.23ms +[2025-08-22 22:08:50] [Rank 0] step:4901/10000 train_time:427536ms step_avg:87.23ms +[2025-08-22 22:08:52] [Rank 0] step:4921/10000 train_time:429337ms step_avg:87.25ms +[2025-08-22 22:08:52] [Rank 0] step:4921/10000 train_time:429337ms step_avg:87.25ms +[2025-08-22 22:08:54] [Rank 0] step:4941/10000 train_time:431141ms step_avg:87.26ms +[2025-08-22 22:08:54] [Rank 0] step:4941/10000 train_time:431141ms step_avg:87.26ms +[2025-08-22 22:08:56] [Rank 0] step:4961/10000 train_time:432946ms step_avg:87.27ms +[2025-08-22 22:08:56] [Rank 0] step:4961/10000 train_time:432946ms step_avg:87.27ms +[2025-08-22 22:08:58] [Rank 0] step:4981/10000 train_time:434755ms step_avg:87.28ms +[2025-08-22 22:08:58] [Rank 0] step:4981/10000 train_time:434755ms step_avg:87.28ms +[2025-08-22 22:08:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:08:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:09:13] [Rank 0] PRINT: step:5000/10000 val_loss:3.9791 svd_entropy: attn_qk:H=0.6777,top10E=0.37,eRank=105.2,q75/q25=74.56 attn_vo:H=0.5903,top10E=0.48,eRank=74.0,q75/q25=88.36 mlp_w1:H=0.7693,top10E=0.29,eRank=205.4,q75/q25=7.22 mlp_w2:H=0.9450,top10E=0.06,eRank=546.7,q75/q25=4.25 vo_prod:H=0.4834,top10E=0.66,eRank=36.6,q75/q25=7430.48 train_time:436744ms step_avg:87.35ms +[2025-08-22 22:09:13] [Rank 0] PRINT: step:5000/10000 val_loss:3.9791 svd_entropy: attn_qk:H=0.6777,top10E=0.37,eRank=105.2,q75/q25=74.56 attn_vo:H=0.5903,top10E=0.48,eRank=74.0,q75/q25=88.36 mlp_w1:H=0.7693,top10E=0.29,eRank=205.4,q75/q25=7.22 mlp_w2:H=0.9450,top10E=0.06,eRank=546.7,q75/q25=4.25 vo_prod:H=0.4834,top10E=0.66,eRank=36.6,q75/q25=7430.48 train_time:436744ms step_avg:87.35ms +[2025-08-22 22:09:13] [Rank 0] step:5001/10000 train_time:436757ms step_avg:87.33ms +[2025-08-22 22:09:13] [Rank 0] step:5001/10000 train_time:436757ms step_avg:87.33ms +[2025-08-22 22:09:15] [Rank 0] step:5021/10000 train_time:438400ms step_avg:87.31ms +[2025-08-22 22:09:15] [Rank 0] step:5021/10000 train_time:438400ms step_avg:87.31ms +[2025-08-22 22:09:17] [Rank 0] step:5041/10000 train_time:440203ms step_avg:87.32ms +[2025-08-22 22:09:17] [Rank 0] step:5041/10000 train_time:440203ms step_avg:87.32ms +[2025-08-22 22:09:18] [Rank 0] step:5061/10000 train_time:442006ms step_avg:87.34ms +[2025-08-22 22:09:18] [Rank 0] step:5061/10000 train_time:442006ms step_avg:87.34ms +[2025-08-22 22:09:20] [Rank 0] step:5081/10000 train_time:443814ms step_avg:87.35ms +[2025-08-22 22:09:20] [Rank 0] step:5081/10000 train_time:443814ms step_avg:87.35ms +[2025-08-22 22:09:22] [Rank 0] step:5101/10000 train_time:445620ms step_avg:87.36ms +[2025-08-22 22:09:22] [Rank 0] step:5101/10000 train_time:445620ms step_avg:87.36ms +[2025-08-22 22:09:24] [Rank 0] step:5121/10000 train_time:447427ms step_avg:87.37ms +[2025-08-22 22:09:24] [Rank 0] step:5121/10000 train_time:447427ms step_avg:87.37ms +[2025-08-22 22:09:26] [Rank 0] step:5141/10000 train_time:449239ms step_avg:87.38ms +[2025-08-22 22:09:26] [Rank 0] step:5141/10000 train_time:449239ms step_avg:87.38ms +[2025-08-22 22:09:27] [Rank 0] step:5161/10000 train_time:451045ms step_avg:87.39ms +[2025-08-22 22:09:27] [Rank 0] step:5161/10000 train_time:451045ms step_avg:87.39ms +[2025-08-22 22:09:29] [Rank 0] step:5181/10000 train_time:452855ms step_avg:87.41ms +[2025-08-22 22:09:29] [Rank 0] step:5181/10000 train_time:452855ms step_avg:87.41ms +[2025-08-22 22:09:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:09:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:09:45] [Rank 0] PRINT: step:5200/10000 val_loss:3.9646 svd_entropy: attn_qk:H=0.6793,top10E=0.37,eRank=106.5,q75/q25=74.07 attn_vo:H=0.5934,top10E=0.47,eRank=75.5,q75/q25=88.52 mlp_w1:H=0.7705,top10E=0.29,eRank=208.0,q75/q25=7.17 mlp_w2:H=0.9451,top10E=0.06,eRank=547.0,q75/q25=4.24 vo_prod:H=0.4866,top10E=0.66,eRank=37.4,q75/q25=7384.29 train_time:454867ms step_avg:87.47ms +[2025-08-22 22:09:45] [Rank 0] PRINT: step:5200/10000 val_loss:3.9646 svd_entropy: attn_qk:H=0.6793,top10E=0.37,eRank=106.5,q75/q25=74.07 attn_vo:H=0.5934,top10E=0.47,eRank=75.5,q75/q25=88.52 mlp_w1:H=0.7705,top10E=0.29,eRank=208.0,q75/q25=7.17 mlp_w2:H=0.9451,top10E=0.06,eRank=547.0,q75/q25=4.24 vo_prod:H=0.4866,top10E=0.66,eRank=37.4,q75/q25=7384.29 train_time:454867ms step_avg:87.47ms +[2025-08-22 22:09:45] [Rank 0] step:5201/10000 train_time:454880ms step_avg:87.46ms +[2025-08-22 22:09:45] [Rank 0] step:5201/10000 train_time:454880ms step_avg:87.46ms +[2025-08-22 22:09:47] [Rank 0] step:5221/10000 train_time:456551ms step_avg:87.45ms +[2025-08-22 22:09:47] [Rank 0] step:5221/10000 train_time:456551ms step_avg:87.45ms +[2025-08-22 22:09:48] [Rank 0] step:5241/10000 train_time:458378ms step_avg:87.46ms +[2025-08-22 22:09:48] [Rank 0] step:5241/10000 train_time:458378ms step_avg:87.46ms +[2025-08-22 22:09:50] [Rank 0] step:5261/10000 train_time:460209ms step_avg:87.48ms +[2025-08-22 22:09:50] [Rank 0] step:5261/10000 train_time:460209ms step_avg:87.48ms +[2025-08-22 22:09:52] [Rank 0] step:5281/10000 train_time:462039ms step_avg:87.49ms +[2025-08-22 22:09:52] [Rank 0] step:5281/10000 train_time:462039ms step_avg:87.49ms +[2025-08-22 22:09:54] [Rank 0] step:5301/10000 train_time:463879ms step_avg:87.51ms +[2025-08-22 22:09:54] [Rank 0] step:5301/10000 train_time:463879ms step_avg:87.51ms +[2025-08-22 22:09:56] [Rank 0] step:5321/10000 train_time:465710ms step_avg:87.52ms +[2025-08-22 22:09:56] [Rank 0] step:5321/10000 train_time:465710ms step_avg:87.52ms +[2025-08-22 22:09:57] [Rank 0] step:5341/10000 train_time:467540ms step_avg:87.54ms +[2025-08-22 22:09:57] [Rank 0] step:5341/10000 train_time:467540ms step_avg:87.54ms +[2025-08-22 22:09:59] [Rank 0] step:5361/10000 train_time:469373ms step_avg:87.55ms +[2025-08-22 22:09:59] [Rank 0] step:5361/10000 train_time:469373ms step_avg:87.55ms +[2025-08-22 22:10:01] [Rank 0] step:5381/10000 train_time:471205ms step_avg:87.57ms +[2025-08-22 22:10:01] [Rank 0] step:5381/10000 train_time:471205ms step_avg:87.57ms +[2025-08-22 22:10:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:10:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:10:17] [Rank 0] PRINT: step:5400/10000 val_loss:3.9487 svd_entropy: attn_qk:H=0.6807,top10E=0.37,eRank=107.6,q75/q25=73.90 attn_vo:H=0.5964,top10E=0.47,eRank=76.9,q75/q25=89.23 mlp_w1:H=0.7724,top10E=0.28,eRank=210.8,q75/q25=7.13 mlp_w2:H=0.9452,top10E=0.06,eRank=547.4,q75/q25=4.23 vo_prod:H=0.4900,top10E=0.65,eRank=38.1,q75/q25=7627.59 train_time:473221ms step_avg:87.63ms +[2025-08-22 22:10:17] [Rank 0] PRINT: step:5400/10000 val_loss:3.9487 svd_entropy: attn_qk:H=0.6807,top10E=0.37,eRank=107.6,q75/q25=73.90 attn_vo:H=0.5964,top10E=0.47,eRank=76.9,q75/q25=89.23 mlp_w1:H=0.7724,top10E=0.28,eRank=210.8,q75/q25=7.13 mlp_w2:H=0.9452,top10E=0.06,eRank=547.4,q75/q25=4.23 vo_prod:H=0.4900,top10E=0.65,eRank=38.1,q75/q25=7627.59 train_time:473221ms step_avg:87.63ms +[2025-08-22 22:10:17] [Rank 0] step:5401/10000 train_time:473233ms step_avg:87.62ms +[2025-08-22 22:10:17] [Rank 0] step:5401/10000 train_time:473233ms step_avg:87.62ms +[2025-08-22 22:10:19] [Rank 0] step:5421/10000 train_time:474881ms step_avg:87.60ms +[2025-08-22 22:10:19] [Rank 0] step:5421/10000 train_time:474881ms step_avg:87.60ms +[2025-08-22 22:10:21] [Rank 0] step:5441/10000 train_time:476707ms step_avg:87.61ms +[2025-08-22 22:10:21] [Rank 0] step:5441/10000 train_time:476707ms step_avg:87.61ms +[2025-08-22 22:10:22] [Rank 0] step:5461/10000 train_time:478537ms step_avg:87.63ms +[2025-08-22 22:10:22] [Rank 0] step:5461/10000 train_time:478537ms step_avg:87.63ms +[2025-08-22 22:10:24] [Rank 0] step:5481/10000 train_time:480366ms step_avg:87.64ms +[2025-08-22 22:10:24] [Rank 0] step:5481/10000 train_time:480366ms step_avg:87.64ms +[2025-08-22 22:10:26] [Rank 0] step:5501/10000 train_time:482202ms step_avg:87.66ms +[2025-08-22 22:10:26] [Rank 0] step:5501/10000 train_time:482202ms step_avg:87.66ms +[2025-08-22 22:10:28] [Rank 0] step:5521/10000 train_time:484037ms step_avg:87.67ms +[2025-08-22 22:10:28] [Rank 0] step:5521/10000 train_time:484037ms step_avg:87.67ms +[2025-08-22 22:10:30] [Rank 0] step:5541/10000 train_time:485870ms step_avg:87.69ms +[2025-08-22 22:10:30] [Rank 0] step:5541/10000 train_time:485870ms step_avg:87.69ms +[2025-08-22 22:10:32] [Rank 0] step:5561/10000 train_time:487701ms step_avg:87.70ms +[2025-08-22 22:10:32] [Rank 0] step:5561/10000 train_time:487701ms step_avg:87.70ms +[2025-08-22 22:10:33] [Rank 0] step:5581/10000 train_time:489536ms step_avg:87.71ms +[2025-08-22 22:10:33] [Rank 0] step:5581/10000 train_time:489536ms step_avg:87.71ms +[2025-08-22 22:10:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:10:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:10:49] [Rank 0] PRINT: step:5600/10000 val_loss:3.9398 svd_entropy: attn_qk:H=0.6823,top10E=0.37,eRank=108.8,q75/q25=74.04 attn_vo:H=0.5992,top10E=0.46,eRank=78.2,q75/q25=89.36 mlp_w1:H=0.7744,top10E=0.28,eRank=213.6,q75/q25=7.05 mlp_w2:H=0.9453,top10E=0.06,eRank=547.8,q75/q25=4.22 vo_prod:H=0.4929,top10E=0.65,eRank=38.8,q75/q25=7813.10 train_time:491555ms step_avg:87.78ms +[2025-08-22 22:10:49] [Rank 0] PRINT: step:5600/10000 val_loss:3.9398 svd_entropy: attn_qk:H=0.6823,top10E=0.37,eRank=108.8,q75/q25=74.04 attn_vo:H=0.5992,top10E=0.46,eRank=78.2,q75/q25=89.36 mlp_w1:H=0.7744,top10E=0.28,eRank=213.6,q75/q25=7.05 mlp_w2:H=0.9453,top10E=0.06,eRank=547.8,q75/q25=4.22 vo_prod:H=0.4929,top10E=0.65,eRank=38.8,q75/q25=7813.10 train_time:491555ms step_avg:87.78ms +[2025-08-22 22:10:49] [Rank 0] step:5601/10000 train_time:491567ms step_avg:87.76ms +[2025-08-22 22:10:49] [Rank 0] step:5601/10000 train_time:491567ms step_avg:87.76ms +[2025-08-22 22:10:51] [Rank 0] step:5621/10000 train_time:493237ms step_avg:87.75ms +[2025-08-22 22:10:51] [Rank 0] step:5621/10000 train_time:493237ms step_avg:87.75ms +[2025-08-22 22:10:52] [Rank 0] step:5641/10000 train_time:495066ms step_avg:87.76ms +[2025-08-22 22:10:52] [Rank 0] step:5641/10000 train_time:495066ms step_avg:87.76ms +[2025-08-22 22:10:54] [Rank 0] step:5661/10000 train_time:496892ms step_avg:87.77ms +[2025-08-22 22:10:54] [Rank 0] step:5661/10000 train_time:496892ms step_avg:87.77ms +[2025-08-22 22:10:56] [Rank 0] step:5681/10000 train_time:498725ms step_avg:87.79ms +[2025-08-22 22:10:56] [Rank 0] step:5681/10000 train_time:498725ms step_avg:87.79ms +[2025-08-22 22:10:58] [Rank 0] step:5701/10000 train_time:500557ms step_avg:87.80ms +[2025-08-22 22:10:58] [Rank 0] step:5701/10000 train_time:500557ms step_avg:87.80ms +[2025-08-22 22:11:00] [Rank 0] step:5721/10000 train_time:502393ms step_avg:87.82ms +[2025-08-22 22:11:00] [Rank 0] step:5721/10000 train_time:502393ms step_avg:87.82ms +[2025-08-22 22:11:02] [Rank 0] step:5741/10000 train_time:504229ms step_avg:87.83ms +[2025-08-22 22:11:02] [Rank 0] step:5741/10000 train_time:504229ms step_avg:87.83ms +[2025-08-22 22:11:03] [Rank 0] step:5761/10000 train_time:506064ms step_avg:87.84ms +[2025-08-22 22:11:03] [Rank 0] step:5761/10000 train_time:506064ms step_avg:87.84ms +[2025-08-22 22:11:05] [Rank 0] step:5781/10000 train_time:507900ms step_avg:87.86ms +[2025-08-22 22:11:05] [Rank 0] step:5781/10000 train_time:507900ms step_avg:87.86ms +[2025-08-22 22:11:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:11:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:11:21] [Rank 0] PRINT: step:5800/10000 val_loss:3.9364 svd_entropy: attn_qk:H=0.6840,top10E=0.37,eRank=110.1,q75/q25=73.68 attn_vo:H=0.6019,top10E=0.46,eRank=79.5,q75/q25=89.71 mlp_w1:H=0.7758,top10E=0.28,eRank=216.0,q75/q25=7.01 mlp_w2:H=0.9453,top10E=0.06,eRank=548.1,q75/q25=4.22 vo_prod:H=0.4953,top10E=0.64,eRank=39.3,q75/q25=8042.41 train_time:509924ms step_avg:87.92ms +[2025-08-22 22:11:21] [Rank 0] PRINT: step:5800/10000 val_loss:3.9364 svd_entropy: attn_qk:H=0.6840,top10E=0.37,eRank=110.1,q75/q25=73.68 attn_vo:H=0.6019,top10E=0.46,eRank=79.5,q75/q25=89.71 mlp_w1:H=0.7758,top10E=0.28,eRank=216.0,q75/q25=7.01 mlp_w2:H=0.9453,top10E=0.06,eRank=548.1,q75/q25=4.22 vo_prod:H=0.4953,top10E=0.64,eRank=39.3,q75/q25=8042.41 train_time:509924ms step_avg:87.92ms +[2025-08-22 22:11:21] [Rank 0] step:5801/10000 train_time:509935ms step_avg:87.90ms +[2025-08-22 22:11:21] [Rank 0] step:5801/10000 train_time:509935ms step_avg:87.90ms +[2025-08-22 22:11:23] [Rank 0] step:5821/10000 train_time:511592ms step_avg:87.89ms +[2025-08-22 22:11:23] [Rank 0] step:5821/10000 train_time:511592ms step_avg:87.89ms +[2025-08-22 22:11:24] [Rank 0] step:5841/10000 train_time:513420ms step_avg:87.90ms +[2025-08-22 22:11:24] [Rank 0] step:5841/10000 train_time:513420ms step_avg:87.90ms +[2025-08-22 22:11:26] [Rank 0] step:5861/10000 train_time:515260ms step_avg:87.91ms +[2025-08-22 22:11:26] [Rank 0] step:5861/10000 train_time:515260ms step_avg:87.91ms +[2025-08-22 22:11:28] [Rank 0] step:5881/10000 train_time:517099ms step_avg:87.93ms +[2025-08-22 22:11:28] [Rank 0] step:5881/10000 train_time:517099ms step_avg:87.93ms +[2025-08-22 22:11:30] [Rank 0] step:5901/10000 train_time:518936ms step_avg:87.94ms +[2025-08-22 22:11:30] [Rank 0] step:5901/10000 train_time:518936ms step_avg:87.94ms +[2025-08-22 22:11:32] [Rank 0] step:5921/10000 train_time:520778ms step_avg:87.95ms +[2025-08-22 22:11:32] [Rank 0] step:5921/10000 train_time:520778ms step_avg:87.95ms +[2025-08-22 22:11:34] [Rank 0] step:5941/10000 train_time:522623ms step_avg:87.97ms +[2025-08-22 22:11:34] [Rank 0] step:5941/10000 train_time:522623ms step_avg:87.97ms +[2025-08-22 22:11:35] [Rank 0] step:5961/10000 train_time:524467ms step_avg:87.98ms +[2025-08-22 22:11:35] [Rank 0] step:5961/10000 train_time:524467ms step_avg:87.98ms +[2025-08-22 22:11:37] [Rank 0] step:5981/10000 train_time:526310ms step_avg:88.00ms +[2025-08-22 22:11:37] [Rank 0] step:5981/10000 train_time:526310ms step_avg:88.00ms +[2025-08-22 22:11:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:11:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:11:53] [Rank 0] PRINT: step:6000/10000 val_loss:3.9105 svd_entropy: attn_qk:H=0.6854,top10E=0.36,eRank=111.2,q75/q25=74.16 attn_vo:H=0.6046,top10E=0.45,eRank=80.8,q75/q25=90.78 mlp_w1:H=0.7772,top10E=0.28,eRank=218.4,q75/q25=6.97 mlp_w2:H=0.9455,top10E=0.06,eRank=548.5,q75/q25=4.21 vo_prod:H=0.4984,top10E=0.64,eRank=40.0,q75/q25=8359.63 train_time:528335ms step_avg:88.06ms +[2025-08-22 22:11:53] [Rank 0] PRINT: step:6000/10000 val_loss:3.9105 svd_entropy: attn_qk:H=0.6854,top10E=0.36,eRank=111.2,q75/q25=74.16 attn_vo:H=0.6046,top10E=0.45,eRank=80.8,q75/q25=90.78 mlp_w1:H=0.7772,top10E=0.28,eRank=218.4,q75/q25=6.97 mlp_w2:H=0.9455,top10E=0.06,eRank=548.5,q75/q25=4.21 vo_prod:H=0.4984,top10E=0.64,eRank=40.0,q75/q25=8359.63 train_time:528335ms step_avg:88.06ms +[2025-08-22 22:11:53] [Rank 0] step:6001/10000 train_time:528347ms step_avg:88.04ms +[2025-08-22 22:11:53] [Rank 0] step:6001/10000 train_time:528347ms step_avg:88.04ms +[2025-08-22 22:11:55] [Rank 0] step:6021/10000 train_time:530023ms step_avg:88.03ms +[2025-08-22 22:11:55] [Rank 0] step:6021/10000 train_time:530023ms step_avg:88.03ms +[2025-08-22 22:11:57] [Rank 0] step:6041/10000 train_time:531857ms step_avg:88.04ms +[2025-08-22 22:11:57] [Rank 0] step:6041/10000 train_time:531857ms step_avg:88.04ms +[2025-08-22 22:11:58] [Rank 0] step:6061/10000 train_time:533700ms step_avg:88.05ms +[2025-08-22 22:11:58] [Rank 0] step:6061/10000 train_time:533700ms step_avg:88.05ms +[2025-08-22 22:12:00] [Rank 0] step:6081/10000 train_time:535535ms step_avg:88.07ms +[2025-08-22 22:12:00] [Rank 0] step:6081/10000 train_time:535535ms step_avg:88.07ms +[2025-08-22 22:12:02] [Rank 0] step:6101/10000 train_time:537378ms step_avg:88.08ms +[2025-08-22 22:12:02] [Rank 0] step:6101/10000 train_time:537378ms step_avg:88.08ms +[2025-08-22 22:12:04] [Rank 0] step:6121/10000 train_time:539479ms step_avg:88.14ms +[2025-08-22 22:12:04] [Rank 0] step:6121/10000 train_time:539479ms step_avg:88.14ms +[2025-08-22 22:12:06] [Rank 0] step:6141/10000 train_time:541328ms step_avg:88.15ms +[2025-08-22 22:12:06] [Rank 0] step:6141/10000 train_time:541328ms step_avg:88.15ms +[2025-08-22 22:12:08] [Rank 0] step:6161/10000 train_time:543169ms step_avg:88.16ms +[2025-08-22 22:12:08] [Rank 0] step:6161/10000 train_time:543169ms step_avg:88.16ms +[2025-08-22 22:12:10] [Rank 0] step:6181/10000 train_time:545009ms step_avg:88.17ms +[2025-08-22 22:12:10] [Rank 0] step:6181/10000 train_time:545009ms step_avg:88.17ms +[2025-08-22 22:12:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:12:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:12:25] [Rank 0] PRINT: step:6200/10000 val_loss:3.8970 svd_entropy: attn_qk:H=0.6868,top10E=0.36,eRank=112.3,q75/q25=73.91 attn_vo:H=0.6068,top10E=0.45,eRank=82.0,q75/q25=90.88 mlp_w1:H=0.7788,top10E=0.28,eRank=220.8,q75/q25=6.94 mlp_w2:H=0.9456,top10E=0.06,eRank=548.9,q75/q25=4.20 vo_prod:H=0.5005,top10E=0.63,eRank=40.6,q75/q25=8482.62 train_time:547036ms step_avg:88.23ms +[2025-08-22 22:12:25] [Rank 0] PRINT: step:6200/10000 val_loss:3.8970 svd_entropy: attn_qk:H=0.6868,top10E=0.36,eRank=112.3,q75/q25=73.91 attn_vo:H=0.6068,top10E=0.45,eRank=82.0,q75/q25=90.88 mlp_w1:H=0.7788,top10E=0.28,eRank=220.8,q75/q25=6.94 mlp_w2:H=0.9456,top10E=0.06,eRank=548.9,q75/q25=4.20 vo_prod:H=0.5005,top10E=0.63,eRank=40.6,q75/q25=8482.62 train_time:547036ms step_avg:88.23ms +[2025-08-22 22:12:25] [Rank 0] step:6201/10000 train_time:547048ms step_avg:88.22ms +[2025-08-22 22:12:25] [Rank 0] step:6201/10000 train_time:547048ms step_avg:88.22ms +[2025-08-22 22:12:27] [Rank 0] step:6221/10000 train_time:548723ms step_avg:88.21ms +[2025-08-22 22:12:27] [Rank 0] step:6221/10000 train_time:548723ms step_avg:88.21ms +[2025-08-22 22:12:29] [Rank 0] step:6241/10000 train_time:550552ms step_avg:88.22ms +[2025-08-22 22:12:29] [Rank 0] step:6241/10000 train_time:550552ms step_avg:88.22ms +[2025-08-22 22:12:31] [Rank 0] step:6261/10000 train_time:552387ms step_avg:88.23ms +[2025-08-22 22:12:31] [Rank 0] step:6261/10000 train_time:552387ms step_avg:88.23ms +[2025-08-22 22:12:32] [Rank 0] step:6281/10000 train_time:554227ms step_avg:88.24ms +[2025-08-22 22:12:32] [Rank 0] step:6281/10000 train_time:554227ms step_avg:88.24ms +[2025-08-22 22:12:34] [Rank 0] step:6301/10000 train_time:556065ms step_avg:88.25ms +[2025-08-22 22:12:34] [Rank 0] step:6301/10000 train_time:556065ms step_avg:88.25ms +[2025-08-22 22:12:36] [Rank 0] step:6321/10000 train_time:557901ms step_avg:88.26ms +[2025-08-22 22:12:36] [Rank 0] step:6321/10000 train_time:557901ms step_avg:88.26ms +[2025-08-22 22:12:38] [Rank 0] step:6341/10000 train_time:559743ms step_avg:88.27ms +[2025-08-22 22:12:38] [Rank 0] step:6341/10000 train_time:559743ms step_avg:88.27ms +[2025-08-22 22:12:40] [Rank 0] step:6361/10000 train_time:561589ms step_avg:88.29ms +[2025-08-22 22:12:40] [Rank 0] step:6361/10000 train_time:561589ms step_avg:88.29ms +[2025-08-22 22:12:42] [Rank 0] step:6381/10000 train_time:563433ms step_avg:88.30ms +[2025-08-22 22:12:42] [Rank 0] step:6381/10000 train_time:563433ms step_avg:88.30ms +[2025-08-22 22:12:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:12:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:12:57] [Rank 0] PRINT: step:6400/10000 val_loss:3.8824 svd_entropy: attn_qk:H=0.6880,top10E=0.36,eRank=113.3,q75/q25=74.54 attn_vo:H=0.6091,top10E=0.44,eRank=83.0,q75/q25=91.34 mlp_w1:H=0.7800,top10E=0.28,eRank=222.7,q75/q25=6.90 mlp_w2:H=0.9457,top10E=0.06,eRank=549.3,q75/q25=4.21 vo_prod:H=0.5031,top10E=0.63,eRank=41.2,q75/q25=8755.73 train_time:565454ms step_avg:88.35ms +[2025-08-22 22:12:57] [Rank 0] PRINT: step:6400/10000 val_loss:3.8824 svd_entropy: attn_qk:H=0.6880,top10E=0.36,eRank=113.3,q75/q25=74.54 attn_vo:H=0.6091,top10E=0.44,eRank=83.0,q75/q25=91.34 mlp_w1:H=0.7800,top10E=0.28,eRank=222.7,q75/q25=6.90 mlp_w2:H=0.9457,top10E=0.06,eRank=549.3,q75/q25=4.21 vo_prod:H=0.5031,top10E=0.63,eRank=41.2,q75/q25=8755.73 train_time:565454ms step_avg:88.35ms +[2025-08-22 22:12:57] [Rank 0] step:6401/10000 train_time:565466ms step_avg:88.34ms +[2025-08-22 22:12:57] [Rank 0] step:6401/10000 train_time:565466ms step_avg:88.34ms +[2025-08-22 22:12:59] [Rank 0] step:6421/10000 train_time:567127ms step_avg:88.32ms +[2025-08-22 22:12:59] [Rank 0] step:6421/10000 train_time:567127ms step_avg:88.32ms +[2025-08-22 22:13:01] [Rank 0] step:6441/10000 train_time:568961ms step_avg:88.33ms +[2025-08-22 22:13:01] [Rank 0] step:6441/10000 train_time:568961ms step_avg:88.33ms +[2025-08-22 22:13:03] [Rank 0] step:6461/10000 train_time:570801ms step_avg:88.35ms +[2025-08-22 22:13:03] [Rank 0] step:6461/10000 train_time:570801ms step_avg:88.35ms +[2025-08-22 22:13:04] [Rank 0] step:6481/10000 train_time:572648ms step_avg:88.36ms +[2025-08-22 22:13:04] [Rank 0] step:6481/10000 train_time:572648ms step_avg:88.36ms +[2025-08-22 22:13:06] [Rank 0] step:6501/10000 train_time:574481ms step_avg:88.37ms +[2025-08-22 22:13:06] [Rank 0] step:6501/10000 train_time:574481ms step_avg:88.37ms +[2025-08-22 22:13:08] [Rank 0] step:6521/10000 train_time:576317ms step_avg:88.38ms +[2025-08-22 22:13:08] [Rank 0] step:6521/10000 train_time:576317ms step_avg:88.38ms +[2025-08-22 22:13:10] [Rank 0] step:6541/10000 train_time:578160ms step_avg:88.39ms +[2025-08-22 22:13:10] [Rank 0] step:6541/10000 train_time:578160ms step_avg:88.39ms +[2025-08-22 22:13:12] [Rank 0] step:6561/10000 train_time:580003ms step_avg:88.40ms +[2025-08-22 22:13:12] [Rank 0] step:6561/10000 train_time:580003ms step_avg:88.40ms +[2025-08-22 22:13:14] [Rank 0] step:6581/10000 train_time:581843ms step_avg:88.41ms +[2025-08-22 22:13:14] [Rank 0] step:6581/10000 train_time:581843ms step_avg:88.41ms +[2025-08-22 22:13:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:13:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:13:29] [Rank 0] PRINT: step:6600/10000 val_loss:3.8701 svd_entropy: attn_qk:H=0.6891,top10E=0.36,eRank=114.2,q75/q25=74.27 attn_vo:H=0.6111,top10E=0.44,eRank=84.1,q75/q25=92.20 mlp_w1:H=0.7813,top10E=0.27,eRank=224.6,q75/q25=6.86 mlp_w2:H=0.9458,top10E=0.06,eRank=549.7,q75/q25=4.20 vo_prod:H=0.5055,top10E=0.63,eRank=41.8,q75/q25=8993.25 train_time:583871ms step_avg:88.47ms +[2025-08-22 22:13:29] [Rank 0] PRINT: step:6600/10000 val_loss:3.8701 svd_entropy: attn_qk:H=0.6891,top10E=0.36,eRank=114.2,q75/q25=74.27 attn_vo:H=0.6111,top10E=0.44,eRank=84.1,q75/q25=92.20 mlp_w1:H=0.7813,top10E=0.27,eRank=224.6,q75/q25=6.86 mlp_w2:H=0.9458,top10E=0.06,eRank=549.7,q75/q25=4.20 vo_prod:H=0.5055,top10E=0.63,eRank=41.8,q75/q25=8993.25 train_time:583871ms step_avg:88.47ms +[2025-08-22 22:13:29] [Rank 0] step:6601/10000 train_time:583884ms step_avg:88.45ms +[2025-08-22 22:13:29] [Rank 0] step:6601/10000 train_time:583884ms step_avg:88.45ms +[2025-08-22 22:13:31] [Rank 0] step:6621/10000 train_time:585562ms step_avg:88.44ms +[2025-08-22 22:13:31] [Rank 0] step:6621/10000 train_time:585562ms step_avg:88.44ms +[2025-08-22 22:13:33] [Rank 0] step:6641/10000 train_time:587406ms step_avg:88.45ms +[2025-08-22 22:13:33] [Rank 0] step:6641/10000 train_time:587406ms step_avg:88.45ms +[2025-08-22 22:13:35] [Rank 0] step:6661/10000 train_time:589251ms step_avg:88.46ms +[2025-08-22 22:13:35] [Rank 0] step:6661/10000 train_time:589251ms step_avg:88.46ms +[2025-08-22 22:13:36] [Rank 0] step:6681/10000 train_time:591110ms step_avg:88.48ms +[2025-08-22 22:13:36] [Rank 0] step:6681/10000 train_time:591110ms step_avg:88.48ms +[2025-08-22 22:13:38] [Rank 0] step:6701/10000 train_time:592991ms step_avg:88.49ms +[2025-08-22 22:13:38] [Rank 0] step:6701/10000 train_time:592991ms step_avg:88.49ms +[2025-08-22 22:13:40] [Rank 0] step:6721/10000 train_time:594868ms step_avg:88.51ms +[2025-08-22 22:13:40] [Rank 0] step:6721/10000 train_time:594868ms step_avg:88.51ms +[2025-08-22 22:13:42] [Rank 0] step:6741/10000 train_time:596740ms step_avg:88.52ms +[2025-08-22 22:13:42] [Rank 0] step:6741/10000 train_time:596740ms step_avg:88.52ms +[2025-08-22 22:13:44] [Rank 0] step:6761/10000 train_time:598613ms step_avg:88.54ms +[2025-08-22 22:13:44] [Rank 0] step:6761/10000 train_time:598613ms step_avg:88.54ms +[2025-08-22 22:13:46] [Rank 0] step:6781/10000 train_time:600488ms step_avg:88.55ms +[2025-08-22 22:13:46] [Rank 0] step:6781/10000 train_time:600488ms step_avg:88.55ms +[2025-08-22 22:13:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:13:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:14:01] [Rank 0] PRINT: step:6800/10000 val_loss:3.8529 svd_entropy: attn_qk:H=0.6901,top10E=0.36,eRank=115.1,q75/q25=74.20 attn_vo:H=0.6131,top10E=0.44,eRank=85.1,q75/q25=91.80 mlp_w1:H=0.7824,top10E=0.27,eRank=226.5,q75/q25=6.83 mlp_w2:H=0.9459,top10E=0.06,eRank=550.0,q75/q25=4.18 vo_prod:H=0.5075,top10E=0.62,eRank=42.3,q75/q25=9173.53 train_time:602557ms step_avg:88.61ms +[2025-08-22 22:14:01] [Rank 0] PRINT: step:6800/10000 val_loss:3.8529 svd_entropy: attn_qk:H=0.6901,top10E=0.36,eRank=115.1,q75/q25=74.20 attn_vo:H=0.6131,top10E=0.44,eRank=85.1,q75/q25=91.80 mlp_w1:H=0.7824,top10E=0.27,eRank=226.5,q75/q25=6.83 mlp_w2:H=0.9459,top10E=0.06,eRank=550.0,q75/q25=4.18 vo_prod:H=0.5075,top10E=0.62,eRank=42.3,q75/q25=9173.53 train_time:602557ms step_avg:88.61ms +[2025-08-22 22:14:01] [Rank 0] step:6801/10000 train_time:602571ms step_avg:88.60ms +[2025-08-22 22:14:01] [Rank 0] step:6801/10000 train_time:602571ms step_avg:88.60ms +[2025-08-22 22:14:03] [Rank 0] step:6821/10000 train_time:604253ms step_avg:88.59ms +[2025-08-22 22:14:03] [Rank 0] step:6821/10000 train_time:604253ms step_avg:88.59ms +[2025-08-22 22:14:05] [Rank 0] step:6841/10000 train_time:606117ms step_avg:88.60ms +[2025-08-22 22:14:05] [Rank 0] step:6841/10000 train_time:606117ms step_avg:88.60ms +[2025-08-22 22:14:07] [Rank 0] step:6861/10000 train_time:607983ms step_avg:88.61ms +[2025-08-22 22:14:07] [Rank 0] step:6861/10000 train_time:607983ms step_avg:88.61ms +[2025-08-22 22:14:09] [Rank 0] step:6881/10000 train_time:609848ms step_avg:88.63ms +[2025-08-22 22:14:09] [Rank 0] step:6881/10000 train_time:609848ms step_avg:88.63ms +[2025-08-22 22:14:11] [Rank 0] step:6901/10000 train_time:611716ms step_avg:88.64ms +[2025-08-22 22:14:11] [Rank 0] step:6901/10000 train_time:611716ms step_avg:88.64ms +[2025-08-22 22:14:13] [Rank 0] step:6921/10000 train_time:613576ms step_avg:88.65ms +[2025-08-22 22:14:13] [Rank 0] step:6921/10000 train_time:613576ms step_avg:88.65ms +[2025-08-22 22:14:14] [Rank 0] step:6941/10000 train_time:615450ms step_avg:88.67ms +[2025-08-22 22:14:14] [Rank 0] step:6941/10000 train_time:615450ms step_avg:88.67ms +[2025-08-22 22:14:16] [Rank 0] step:6961/10000 train_time:617331ms step_avg:88.68ms +[2025-08-22 22:14:16] [Rank 0] step:6961/10000 train_time:617331ms step_avg:88.68ms +[2025-08-22 22:14:18] [Rank 0] step:6981/10000 train_time:619207ms step_avg:88.70ms +[2025-08-22 22:14:18] [Rank 0] step:6981/10000 train_time:619207ms step_avg:88.70ms +[2025-08-22 22:14:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:14:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:14:34] [Rank 0] PRINT: step:7000/10000 val_loss:3.8353 svd_entropy: attn_qk:H=0.6911,top10E=0.36,eRank=115.8,q75/q25=74.03 attn_vo:H=0.6148,top10E=0.43,eRank=86.0,q75/q25=92.28 mlp_w1:H=0.7833,top10E=0.27,eRank=228.0,q75/q25=6.80 mlp_w2:H=0.9461,top10E=0.06,eRank=550.4,q75/q25=4.18 vo_prod:H=0.5095,top10E=0.62,eRank=42.8,q75/q25=9349.19 train_time:621267ms step_avg:88.75ms +[2025-08-22 22:14:34] [Rank 0] PRINT: step:7000/10000 val_loss:3.8353 svd_entropy: attn_qk:H=0.6911,top10E=0.36,eRank=115.8,q75/q25=74.03 attn_vo:H=0.6148,top10E=0.43,eRank=86.0,q75/q25=92.28 mlp_w1:H=0.7833,top10E=0.27,eRank=228.0,q75/q25=6.80 mlp_w2:H=0.9461,top10E=0.06,eRank=550.4,q75/q25=4.18 vo_prod:H=0.5095,top10E=0.62,eRank=42.8,q75/q25=9349.19 train_time:621267ms step_avg:88.75ms +[2025-08-22 22:14:34] [Rank 0] step:7001/10000 train_time:621280ms step_avg:88.74ms +[2025-08-22 22:14:34] [Rank 0] step:7001/10000 train_time:621280ms step_avg:88.74ms +[2025-08-22 22:14:36] [Rank 0] step:7021/10000 train_time:622981ms step_avg:88.73ms +[2025-08-22 22:14:36] [Rank 0] step:7021/10000 train_time:622981ms step_avg:88.73ms +[2025-08-22 22:14:37] [Rank 0] step:7041/10000 train_time:624846ms step_avg:88.74ms +[2025-08-22 22:14:37] [Rank 0] step:7041/10000 train_time:624846ms step_avg:88.74ms +[2025-08-22 22:14:39] [Rank 0] step:7061/10000 train_time:626707ms step_avg:88.76ms +[2025-08-22 22:14:39] [Rank 0] step:7061/10000 train_time:626707ms step_avg:88.76ms +[2025-08-22 22:14:41] [Rank 0] step:7081/10000 train_time:628573ms step_avg:88.77ms +[2025-08-22 22:14:41] [Rank 0] step:7081/10000 train_time:628573ms step_avg:88.77ms +[2025-08-22 22:14:43] [Rank 0] step:7101/10000 train_time:630443ms step_avg:88.78ms +[2025-08-22 22:14:43] [Rank 0] step:7101/10000 train_time:630443ms step_avg:88.78ms +[2025-08-22 22:14:45] [Rank 0] step:7121/10000 train_time:632310ms step_avg:88.80ms +[2025-08-22 22:14:45] [Rank 0] step:7121/10000 train_time:632310ms step_avg:88.80ms +[2025-08-22 22:14:47] [Rank 0] step:7141/10000 train_time:634177ms step_avg:88.81ms +[2025-08-22 22:14:47] [Rank 0] step:7141/10000 train_time:634177ms step_avg:88.81ms +[2025-08-22 22:14:49] [Rank 0] step:7161/10000 train_time:636047ms step_avg:88.82ms +[2025-08-22 22:14:49] [Rank 0] step:7161/10000 train_time:636047ms step_avg:88.82ms +[2025-08-22 22:14:50] [Rank 0] step:7181/10000 train_time:637915ms step_avg:88.83ms +[2025-08-22 22:14:50] [Rank 0] step:7181/10000 train_time:637915ms step_avg:88.83ms +[2025-08-22 22:14:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:14:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:15:06] [Rank 0] PRINT: step:7200/10000 val_loss:3.8247 svd_entropy: attn_qk:H=0.6920,top10E=0.35,eRank=116.5,q75/q25=73.56 attn_vo:H=0.6165,top10E=0.43,eRank=86.9,q75/q25=92.60 mlp_w1:H=0.7844,top10E=0.27,eRank=229.6,q75/q25=6.77 mlp_w2:H=0.9462,top10E=0.06,eRank=550.7,q75/q25=4.19 vo_prod:H=0.5115,top10E=0.61,eRank=43.3,q75/q25=9484.36 train_time:639976ms step_avg:88.89ms +[2025-08-22 22:15:06] [Rank 0] PRINT: step:7200/10000 val_loss:3.8247 svd_entropy: attn_qk:H=0.6920,top10E=0.35,eRank=116.5,q75/q25=73.56 attn_vo:H=0.6165,top10E=0.43,eRank=86.9,q75/q25=92.60 mlp_w1:H=0.7844,top10E=0.27,eRank=229.6,q75/q25=6.77 mlp_w2:H=0.9462,top10E=0.06,eRank=550.7,q75/q25=4.19 vo_prod:H=0.5115,top10E=0.61,eRank=43.3,q75/q25=9484.36 train_time:639976ms step_avg:88.89ms +[2025-08-22 22:15:06] [Rank 0] step:7201/10000 train_time:639988ms step_avg:88.87ms +[2025-08-22 22:15:06] [Rank 0] step:7201/10000 train_time:639988ms step_avg:88.87ms +[2025-08-22 22:15:08] [Rank 0] step:7221/10000 train_time:641689ms step_avg:88.86ms +[2025-08-22 22:15:08] [Rank 0] step:7221/10000 train_time:641689ms step_avg:88.86ms +[2025-08-22 22:15:10] [Rank 0] step:7241/10000 train_time:643549ms step_avg:88.88ms +[2025-08-22 22:15:10] [Rank 0] step:7241/10000 train_time:643549ms step_avg:88.88ms +[2025-08-22 22:15:12] [Rank 0] step:7261/10000 train_time:645410ms step_avg:88.89ms +[2025-08-22 22:15:12] [Rank 0] step:7261/10000 train_time:645410ms step_avg:88.89ms +[2025-08-22 22:15:13] [Rank 0] step:7281/10000 train_time:647282ms step_avg:88.90ms +[2025-08-22 22:15:13] [Rank 0] step:7281/10000 train_time:647282ms step_avg:88.90ms +[2025-08-22 22:15:15] [Rank 0] step:7301/10000 train_time:649149ms step_avg:88.91ms +[2025-08-22 22:15:15] [Rank 0] step:7301/10000 train_time:649149ms step_avg:88.91ms +[2025-08-22 22:15:17] [Rank 0] step:7321/10000 train_time:651026ms step_avg:88.93ms +[2025-08-22 22:15:17] [Rank 0] step:7321/10000 train_time:651026ms step_avg:88.93ms +[2025-08-22 22:15:19] [Rank 0] step:7341/10000 train_time:652896ms step_avg:88.94ms +[2025-08-22 22:15:19] [Rank 0] step:7341/10000 train_time:652896ms step_avg:88.94ms +[2025-08-22 22:15:21] [Rank 0] step:7361/10000 train_time:654774ms step_avg:88.95ms +[2025-08-22 22:15:21] [Rank 0] step:7361/10000 train_time:654774ms step_avg:88.95ms +[2025-08-22 22:15:23] [Rank 0] step:7381/10000 train_time:656728ms step_avg:88.98ms +[2025-08-22 22:15:23] [Rank 0] step:7381/10000 train_time:656728ms step_avg:88.98ms +[2025-08-22 22:15:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:15:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:15:38] [Rank 0] PRINT: step:7400/10000 val_loss:3.8025 svd_entropy: attn_qk:H=0.6928,top10E=0.35,eRank=117.2,q75/q25=73.76 attn_vo:H=0.6180,top10E=0.43,eRank=87.7,q75/q25=91.31 mlp_w1:H=0.7852,top10E=0.27,eRank=230.9,q75/q25=6.75 mlp_w2:H=0.9462,top10E=0.06,eRank=550.9,q75/q25=4.19 vo_prod:H=0.5129,top10E=0.61,eRank=43.6,q75/q25=9241.65 train_time:658699ms step_avg:89.01ms +[2025-08-22 22:15:38] [Rank 0] PRINT: step:7400/10000 val_loss:3.8025 svd_entropy: attn_qk:H=0.6928,top10E=0.35,eRank=117.2,q75/q25=73.76 attn_vo:H=0.6180,top10E=0.43,eRank=87.7,q75/q25=91.31 mlp_w1:H=0.7852,top10E=0.27,eRank=230.9,q75/q25=6.75 mlp_w2:H=0.9462,top10E=0.06,eRank=550.9,q75/q25=4.19 vo_prod:H=0.5129,top10E=0.61,eRank=43.6,q75/q25=9241.65 train_time:658699ms step_avg:89.01ms +[2025-08-22 22:15:38] [Rank 0] step:7401/10000 train_time:658710ms step_avg:89.00ms +[2025-08-22 22:15:38] [Rank 0] step:7401/10000 train_time:658710ms step_avg:89.00ms +[2025-08-22 22:15:40] [Rank 0] step:7421/10000 train_time:660403ms step_avg:88.99ms +[2025-08-22 22:15:40] [Rank 0] step:7421/10000 train_time:660403ms step_avg:88.99ms +[2025-08-22 22:15:42] [Rank 0] step:7441/10000 train_time:662274ms step_avg:89.00ms +[2025-08-22 22:15:42] [Rank 0] step:7441/10000 train_time:662274ms step_avg:89.00ms +[2025-08-22 22:15:44] [Rank 0] step:7461/10000 train_time:664144ms step_avg:89.02ms +[2025-08-22 22:15:44] [Rank 0] step:7461/10000 train_time:664144ms step_avg:89.02ms +[2025-08-22 22:15:46] [Rank 0] step:7481/10000 train_time:666026ms step_avg:89.03ms +[2025-08-22 22:15:46] [Rank 0] step:7481/10000 train_time:666026ms step_avg:89.03ms +[2025-08-22 22:15:48] [Rank 0] step:7501/10000 train_time:667906ms step_avg:89.04ms +[2025-08-22 22:15:48] [Rank 0] step:7501/10000 train_time:667906ms step_avg:89.04ms +[2025-08-22 22:15:49] [Rank 0] step:7521/10000 train_time:669784ms step_avg:89.06ms +[2025-08-22 22:15:49] [Rank 0] step:7521/10000 train_time:669784ms step_avg:89.06ms +[2025-08-22 22:15:51] [Rank 0] step:7541/10000 train_time:671677ms step_avg:89.07ms +[2025-08-22 22:15:51] [Rank 0] step:7541/10000 train_time:671677ms step_avg:89.07ms +[2025-08-22 22:15:53] [Rank 0] step:7561/10000 train_time:673548ms step_avg:89.08ms +[2025-08-22 22:15:53] [Rank 0] step:7561/10000 train_time:673548ms step_avg:89.08ms +[2025-08-22 22:15:55] [Rank 0] step:7581/10000 train_time:675438ms step_avg:89.10ms +[2025-08-22 22:15:55] [Rank 0] step:7581/10000 train_time:675438ms step_avg:89.10ms +[2025-08-22 22:15:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:15:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:16:10] [Rank 0] PRINT: step:7600/10000 val_loss:3.7966 svd_entropy: attn_qk:H=0.6936,top10E=0.35,eRank=117.8,q75/q25=73.86 attn_vo:H=0.6195,top10E=0.42,eRank=88.4,q75/q25=90.42 mlp_w1:H=0.7860,top10E=0.27,eRank=232.2,q75/q25=6.73 mlp_w2:H=0.9463,top10E=0.06,eRank=551.1,q75/q25=4.19 vo_prod:H=0.5147,top10E=0.61,eRank=44.1,q75/q25=9117.75 train_time:677516ms step_avg:89.15ms +[2025-08-22 22:16:10] [Rank 0] PRINT: step:7600/10000 val_loss:3.7966 svd_entropy: attn_qk:H=0.6936,top10E=0.35,eRank=117.8,q75/q25=73.86 attn_vo:H=0.6195,top10E=0.42,eRank=88.4,q75/q25=90.42 mlp_w1:H=0.7860,top10E=0.27,eRank=232.2,q75/q25=6.73 mlp_w2:H=0.9463,top10E=0.06,eRank=551.1,q75/q25=4.19 vo_prod:H=0.5147,top10E=0.61,eRank=44.1,q75/q25=9117.75 train_time:677516ms step_avg:89.15ms +[2025-08-22 22:16:11] [Rank 0] step:7601/10000 train_time:677528ms step_avg:89.14ms +[2025-08-22 22:16:11] [Rank 0] step:7601/10000 train_time:677528ms step_avg:89.14ms +[2025-08-22 22:16:12] [Rank 0] step:7621/10000 train_time:679235ms step_avg:89.13ms +[2025-08-22 22:16:12] [Rank 0] step:7621/10000 train_time:679235ms step_avg:89.13ms +[2025-08-22 22:16:14] [Rank 0] step:7641/10000 train_time:681103ms step_avg:89.14ms +[2025-08-22 22:16:14] [Rank 0] step:7641/10000 train_time:681103ms step_avg:89.14ms +[2025-08-22 22:16:16] [Rank 0] step:7661/10000 train_time:682976ms step_avg:89.15ms +[2025-08-22 22:16:16] [Rank 0] step:7661/10000 train_time:682976ms step_avg:89.15ms +[2025-08-22 22:16:18] [Rank 0] step:7681/10000 train_time:684842ms step_avg:89.16ms +[2025-08-22 22:16:18] [Rank 0] step:7681/10000 train_time:684842ms step_avg:89.16ms +[2025-08-22 22:16:20] [Rank 0] step:7701/10000 train_time:686713ms step_avg:89.17ms +[2025-08-22 22:16:20] [Rank 0] step:7701/10000 train_time:686713ms step_avg:89.17ms +[2025-08-22 22:16:22] [Rank 0] step:7721/10000 train_time:688600ms step_avg:89.19ms +[2025-08-22 22:16:22] [Rank 0] step:7721/10000 train_time:688600ms step_avg:89.19ms +[2025-08-22 22:16:24] [Rank 0] step:7741/10000 train_time:690474ms step_avg:89.20ms +[2025-08-22 22:16:24] [Rank 0] step:7741/10000 train_time:690474ms step_avg:89.20ms +[2025-08-22 22:16:26] [Rank 0] step:7761/10000 train_time:692350ms step_avg:89.21ms +[2025-08-22 22:16:26] [Rank 0] step:7761/10000 train_time:692350ms step_avg:89.21ms +[2025-08-22 22:16:27] [Rank 0] step:7781/10000 train_time:694296ms step_avg:89.23ms +[2025-08-22 22:16:27] [Rank 0] step:7781/10000 train_time:694296ms step_avg:89.23ms +[2025-08-22 22:16:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:16:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:16:43] [Rank 0] PRINT: step:7800/10000 val_loss:3.7818 svd_entropy: attn_qk:H=0.6944,top10E=0.35,eRank=118.4,q75/q25=73.64 attn_vo:H=0.6209,top10E=0.42,eRank=89.2,q75/q25=90.64 mlp_w1:H=0.7866,top10E=0.27,eRank=233.3,q75/q25=6.71 mlp_w2:H=0.9464,top10E=0.06,eRank=551.3,q75/q25=4.20 vo_prod:H=0.5160,top10E=0.60,eRank=44.4,q75/q25=9140.88 train_time:696384ms step_avg:89.28ms +[2025-08-22 22:16:43] [Rank 0] PRINT: step:7800/10000 val_loss:3.7818 svd_entropy: attn_qk:H=0.6944,top10E=0.35,eRank=118.4,q75/q25=73.64 attn_vo:H=0.6209,top10E=0.42,eRank=89.2,q75/q25=90.64 mlp_w1:H=0.7866,top10E=0.27,eRank=233.3,q75/q25=6.71 mlp_w2:H=0.9464,top10E=0.06,eRank=551.3,q75/q25=4.20 vo_prod:H=0.5160,top10E=0.60,eRank=44.4,q75/q25=9140.88 train_time:696384ms step_avg:89.28ms +[2025-08-22 22:16:43] [Rank 0] step:7801/10000 train_time:696395ms step_avg:89.27ms +[2025-08-22 22:16:43] [Rank 0] step:7801/10000 train_time:696395ms step_avg:89.27ms +[2025-08-22 22:16:45] [Rank 0] step:7821/10000 train_time:698080ms step_avg:89.26ms +[2025-08-22 22:16:45] [Rank 0] step:7821/10000 train_time:698080ms step_avg:89.26ms +[2025-08-22 22:16:47] [Rank 0] step:7841/10000 train_time:699947ms step_avg:89.27ms +[2025-08-22 22:16:47] [Rank 0] step:7841/10000 train_time:699947ms step_avg:89.27ms +[2025-08-22 22:16:49] [Rank 0] step:7861/10000 train_time:701822ms step_avg:89.28ms +[2025-08-22 22:16:49] [Rank 0] step:7861/10000 train_time:701822ms step_avg:89.28ms +[2025-08-22 22:16:50] [Rank 0] step:7881/10000 train_time:703699ms step_avg:89.29ms +[2025-08-22 22:16:50] [Rank 0] step:7881/10000 train_time:703699ms step_avg:89.29ms +[2025-08-22 22:16:52] [Rank 0] step:7901/10000 train_time:705572ms step_avg:89.30ms +[2025-08-22 22:16:52] [Rank 0] step:7901/10000 train_time:705572ms step_avg:89.30ms +[2025-08-22 22:16:54] [Rank 0] step:7921/10000 train_time:707448ms step_avg:89.31ms +[2025-08-22 22:16:54] [Rank 0] step:7921/10000 train_time:707448ms step_avg:89.31ms +[2025-08-22 22:16:56] [Rank 0] step:7941/10000 train_time:709329ms step_avg:89.32ms +[2025-08-22 22:16:56] [Rank 0] step:7941/10000 train_time:709329ms step_avg:89.32ms +[2025-08-22 22:16:58] [Rank 0] step:7961/10000 train_time:711207ms step_avg:89.34ms +[2025-08-22 22:16:58] [Rank 0] step:7961/10000 train_time:711207ms step_avg:89.34ms +[2025-08-22 22:17:00] [Rank 0] step:7981/10000 train_time:713080ms step_avg:89.35ms +[2025-08-22 22:17:00] [Rank 0] step:7981/10000 train_time:713080ms step_avg:89.35ms +[2025-08-22 22:17:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:17:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:17:15] [Rank 0] PRINT: step:8000/10000 val_loss:3.7631 svd_entropy: attn_qk:H=0.6951,top10E=0.35,eRank=119.0,q75/q25=73.68 attn_vo:H=0.6221,top10E=0.42,eRank=89.9,q75/q25=90.65 mlp_w1:H=0.7873,top10E=0.27,eRank=234.3,q75/q25=6.69 mlp_w2:H=0.9465,top10E=0.06,eRank=551.6,q75/q25=4.19 vo_prod:H=0.5174,top10E=0.60,eRank=44.8,q75/q25=9134.28 train_time:715147ms step_avg:89.39ms +[2025-08-22 22:17:15] [Rank 0] PRINT: step:8000/10000 val_loss:3.7631 svd_entropy: attn_qk:H=0.6951,top10E=0.35,eRank=119.0,q75/q25=73.68 attn_vo:H=0.6221,top10E=0.42,eRank=89.9,q75/q25=90.65 mlp_w1:H=0.7873,top10E=0.27,eRank=234.3,q75/q25=6.69 mlp_w2:H=0.9465,top10E=0.06,eRank=551.6,q75/q25=4.19 vo_prod:H=0.5174,top10E=0.60,eRank=44.8,q75/q25=9134.28 train_time:715147ms step_avg:89.39ms +[2025-08-22 22:17:15] [Rank 0] step:8001/10000 train_time:715159ms step_avg:89.38ms +[2025-08-22 22:17:15] [Rank 0] step:8001/10000 train_time:715159ms step_avg:89.38ms +[2025-08-22 22:17:17] [Rank 0] step:8021/10000 train_time:716858ms step_avg:89.37ms +[2025-08-22 22:17:17] [Rank 0] step:8021/10000 train_time:716858ms step_avg:89.37ms +[2025-08-22 22:17:19] [Rank 0] step:8041/10000 train_time:718742ms step_avg:89.38ms +[2025-08-22 22:17:19] [Rank 0] step:8041/10000 train_time:718742ms step_avg:89.38ms +[2025-08-22 22:17:21] [Rank 0] step:8061/10000 train_time:720617ms step_avg:89.40ms +[2025-08-22 22:17:21] [Rank 0] step:8061/10000 train_time:720617ms step_avg:89.40ms +[2025-08-22 22:17:23] [Rank 0] step:8081/10000 train_time:722487ms step_avg:89.41ms +[2025-08-22 22:17:23] [Rank 0] step:8081/10000 train_time:722487ms step_avg:89.41ms +[2025-08-22 22:17:25] [Rank 0] step:8101/10000 train_time:724370ms step_avg:89.42ms +[2025-08-22 22:17:25] [Rank 0] step:8101/10000 train_time:724370ms step_avg:89.42ms +[2025-08-22 22:17:27] [Rank 0] step:8121/10000 train_time:726244ms step_avg:89.43ms +[2025-08-22 22:17:27] [Rank 0] step:8121/10000 train_time:726244ms step_avg:89.43ms +[2025-08-22 22:17:29] [Rank 0] step:8141/10000 train_time:728763ms step_avg:89.52ms +[2025-08-22 22:17:29] [Rank 0] step:8141/10000 train_time:728763ms step_avg:89.52ms +[2025-08-22 22:17:31] [Rank 0] step:8161/10000 train_time:730630ms step_avg:89.53ms +[2025-08-22 22:17:31] [Rank 0] step:8161/10000 train_time:730630ms step_avg:89.53ms +[2025-08-22 22:17:33] [Rank 0] step:8181/10000 train_time:732539ms step_avg:89.54ms +[2025-08-22 22:17:33] [Rank 0] step:8181/10000 train_time:732539ms step_avg:89.54ms +[2025-08-22 22:17:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:17:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:17:48] [Rank 0] PRINT: step:8200/10000 val_loss:3.7525 svd_entropy: attn_qk:H=0.6956,top10E=0.35,eRank=119.4,q75/q25=73.65 attn_vo:H=0.6233,top10E=0.42,eRank=90.5,q75/q25=90.88 mlp_w1:H=0.7878,top10E=0.27,eRank=235.3,q75/q25=6.67 mlp_w2:H=0.9466,top10E=0.06,eRank=551.8,q75/q25=4.20 vo_prod:H=0.5187,top10E=0.60,eRank=45.1,q75/q25=9244.61 train_time:734659ms step_avg:89.59ms +[2025-08-22 22:17:48] [Rank 0] PRINT: step:8200/10000 val_loss:3.7525 svd_entropy: attn_qk:H=0.6956,top10E=0.35,eRank=119.4,q75/q25=73.65 attn_vo:H=0.6233,top10E=0.42,eRank=90.5,q75/q25=90.88 mlp_w1:H=0.7878,top10E=0.27,eRank=235.3,q75/q25=6.67 mlp_w2:H=0.9466,top10E=0.06,eRank=551.8,q75/q25=4.20 vo_prod:H=0.5187,top10E=0.60,eRank=45.1,q75/q25=9244.61 train_time:734659ms step_avg:89.59ms +[2025-08-22 22:17:48] [Rank 0] step:8201/10000 train_time:734671ms step_avg:89.58ms +[2025-08-22 22:17:48] [Rank 0] step:8201/10000 train_time:734671ms step_avg:89.58ms +[2025-08-22 22:17:50] [Rank 0] step:8221/10000 train_time:736397ms step_avg:89.58ms +[2025-08-22 22:17:50] [Rank 0] step:8221/10000 train_time:736397ms step_avg:89.58ms +[2025-08-22 22:17:52] [Rank 0] step:8241/10000 train_time:738304ms step_avg:89.59ms +[2025-08-22 22:17:52] [Rank 0] step:8241/10000 train_time:738304ms step_avg:89.59ms +[2025-08-22 22:17:54] [Rank 0] step:8261/10000 train_time:740204ms step_avg:89.60ms +[2025-08-22 22:17:54] [Rank 0] step:8261/10000 train_time:740204ms step_avg:89.60ms +[2025-08-22 22:17:56] [Rank 0] step:8281/10000 train_time:742107ms step_avg:89.62ms +[2025-08-22 22:17:56] [Rank 0] step:8281/10000 train_time:742107ms step_avg:89.62ms +[2025-08-22 22:17:58] [Rank 0] step:8301/10000 train_time:744006ms step_avg:89.63ms +[2025-08-22 22:17:58] [Rank 0] step:8301/10000 train_time:744006ms step_avg:89.63ms +[2025-08-22 22:18:00] [Rank 0] step:8321/10000 train_time:745899ms step_avg:89.64ms +[2025-08-22 22:18:00] [Rank 0] step:8321/10000 train_time:745899ms step_avg:89.64ms +[2025-08-22 22:18:02] [Rank 0] step:8341/10000 train_time:747804ms step_avg:89.65ms +[2025-08-22 22:18:02] [Rank 0] step:8341/10000 train_time:747804ms step_avg:89.65ms +[2025-08-22 22:18:04] [Rank 0] step:8361/10000 train_time:749706ms step_avg:89.67ms +[2025-08-22 22:18:04] [Rank 0] step:8361/10000 train_time:749706ms step_avg:89.67ms +[2025-08-22 22:18:05] [Rank 0] step:8381/10000 train_time:751611ms step_avg:89.68ms +[2025-08-22 22:18:05] [Rank 0] step:8381/10000 train_time:751611ms step_avg:89.68ms +[2025-08-22 22:18:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:18:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:18:21] [Rank 0] PRINT: step:8400/10000 val_loss:3.7388 svd_entropy: attn_qk:H=0.6961,top10E=0.35,eRank=119.8,q75/q25=73.71 attn_vo:H=0.6244,top10E=0.41,eRank=91.1,q75/q25=90.63 mlp_w1:H=0.7884,top10E=0.27,eRank=236.1,q75/q25=6.65 mlp_w2:H=0.9466,top10E=0.06,eRank=552.0,q75/q25=4.21 vo_prod:H=0.5200,top10E=0.60,eRank=45.4,q75/q25=9300.18 train_time:753697ms step_avg:89.73ms +[2025-08-22 22:18:21] [Rank 0] PRINT: step:8400/10000 val_loss:3.7388 svd_entropy: attn_qk:H=0.6961,top10E=0.35,eRank=119.8,q75/q25=73.71 attn_vo:H=0.6244,top10E=0.41,eRank=91.1,q75/q25=90.63 mlp_w1:H=0.7884,top10E=0.27,eRank=236.1,q75/q25=6.65 mlp_w2:H=0.9466,top10E=0.06,eRank=552.0,q75/q25=4.21 vo_prod:H=0.5200,top10E=0.60,eRank=45.4,q75/q25=9300.18 train_time:753697ms step_avg:89.73ms +[2025-08-22 22:18:21] [Rank 0] step:8401/10000 train_time:753709ms step_avg:89.72ms +[2025-08-22 22:18:21] [Rank 0] step:8401/10000 train_time:753709ms step_avg:89.72ms +[2025-08-22 22:18:23] [Rank 0] step:8421/10000 train_time:755424ms step_avg:89.71ms +[2025-08-22 22:18:23] [Rank 0] step:8421/10000 train_time:755424ms step_avg:89.71ms +[2025-08-22 22:18:25] [Rank 0] step:8441/10000 train_time:757315ms step_avg:89.72ms +[2025-08-22 22:18:25] [Rank 0] step:8441/10000 train_time:757315ms step_avg:89.72ms +[2025-08-22 22:18:27] [Rank 0] step:8461/10000 train_time:759208ms step_avg:89.73ms +[2025-08-22 22:18:27] [Rank 0] step:8461/10000 train_time:759208ms step_avg:89.73ms +[2025-08-22 22:18:29] [Rank 0] step:8481/10000 train_time:761110ms step_avg:89.74ms +[2025-08-22 22:18:29] [Rank 0] step:8481/10000 train_time:761110ms step_avg:89.74ms +[2025-08-22 22:18:31] [Rank 0] step:8501/10000 train_time:763030ms step_avg:89.76ms +[2025-08-22 22:18:31] [Rank 0] step:8501/10000 train_time:763030ms step_avg:89.76ms +[2025-08-22 22:18:32] [Rank 0] step:8521/10000 train_time:765018ms step_avg:89.78ms +[2025-08-22 22:18:32] [Rank 0] step:8521/10000 train_time:765018ms step_avg:89.78ms +[2025-08-22 22:18:34] [Rank 0] step:8541/10000 train_time:766849ms step_avg:89.78ms +[2025-08-22 22:18:34] [Rank 0] step:8541/10000 train_time:766849ms step_avg:89.78ms +[2025-08-22 22:18:36] [Rank 0] step:8561/10000 train_time:768754ms step_avg:89.80ms +[2025-08-22 22:18:36] [Rank 0] step:8561/10000 train_time:768754ms step_avg:89.80ms +[2025-08-22 22:18:38] [Rank 0] step:8581/10000 train_time:770660ms step_avg:89.81ms +[2025-08-22 22:18:38] [Rank 0] step:8581/10000 train_time:770660ms step_avg:89.81ms +[2025-08-22 22:18:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:18:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:18:54] [Rank 0] PRINT: step:8600/10000 val_loss:3.7289 svd_entropy: attn_qk:H=0.6965,top10E=0.35,eRank=120.1,q75/q25=73.60 attn_vo:H=0.6252,top10E=0.41,eRank=91.5,q75/q25=90.76 mlp_w1:H=0.7889,top10E=0.27,eRank=236.9,q75/q25=6.64 mlp_w2:H=0.9467,top10E=0.06,eRank=552.2,q75/q25=4.21 vo_prod:H=0.5210,top10E=0.59,eRank=45.7,q75/q25=9439.92 train_time:772743ms step_avg:89.85ms +[2025-08-22 22:18:54] [Rank 0] PRINT: step:8600/10000 val_loss:3.7289 svd_entropy: attn_qk:H=0.6965,top10E=0.35,eRank=120.1,q75/q25=73.60 attn_vo:H=0.6252,top10E=0.41,eRank=91.5,q75/q25=90.76 mlp_w1:H=0.7889,top10E=0.27,eRank=236.9,q75/q25=6.64 mlp_w2:H=0.9467,top10E=0.06,eRank=552.2,q75/q25=4.21 vo_prod:H=0.5210,top10E=0.59,eRank=45.7,q75/q25=9439.92 train_time:772743ms step_avg:89.85ms +[2025-08-22 22:18:54] [Rank 0] step:8601/10000 train_time:772756ms step_avg:89.84ms +[2025-08-22 22:18:54] [Rank 0] step:8601/10000 train_time:772756ms step_avg:89.84ms +[2025-08-22 22:18:56] [Rank 0] step:8621/10000 train_time:774490ms step_avg:89.84ms +[2025-08-22 22:18:56] [Rank 0] step:8621/10000 train_time:774490ms step_avg:89.84ms +[2025-08-22 22:18:57] [Rank 0] step:8641/10000 train_time:776381ms step_avg:89.85ms +[2025-08-22 22:18:57] [Rank 0] step:8641/10000 train_time:776381ms step_avg:89.85ms +[2025-08-22 22:18:59] [Rank 0] step:8661/10000 train_time:778277ms step_avg:89.86ms +[2025-08-22 22:18:59] [Rank 0] step:8661/10000 train_time:778277ms step_avg:89.86ms +[2025-08-22 22:19:01] [Rank 0] step:8681/10000 train_time:780175ms step_avg:89.87ms +[2025-08-22 22:19:01] [Rank 0] step:8681/10000 train_time:780175ms step_avg:89.87ms +[2025-08-22 22:19:03] [Rank 0] step:8701/10000 train_time:782070ms step_avg:89.88ms +[2025-08-22 22:19:03] [Rank 0] step:8701/10000 train_time:782070ms step_avg:89.88ms +[2025-08-22 22:19:05] [Rank 0] step:8721/10000 train_time:783971ms step_avg:89.89ms +[2025-08-22 22:19:05] [Rank 0] step:8721/10000 train_time:783971ms step_avg:89.89ms +[2025-08-22 22:19:07] [Rank 0] step:8741/10000 train_time:785866ms step_avg:89.91ms +[2025-08-22 22:19:07] [Rank 0] step:8741/10000 train_time:785866ms step_avg:89.91ms +[2025-08-22 22:19:09] [Rank 0] step:8761/10000 train_time:787763ms step_avg:89.92ms +[2025-08-22 22:19:09] [Rank 0] step:8761/10000 train_time:787763ms step_avg:89.92ms +[2025-08-22 22:19:11] [Rank 0] step:8781/10000 train_time:789668ms step_avg:89.93ms +[2025-08-22 22:19:11] [Rank 0] step:8781/10000 train_time:789668ms step_avg:89.93ms +[2025-08-22 22:19:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:19:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:19:26] [Rank 0] PRINT: step:8800/10000 val_loss:3.7165 svd_entropy: attn_qk:H=0.6970,top10E=0.35,eRank=120.5,q75/q25=73.45 attn_vo:H=0.6261,top10E=0.41,eRank=92.0,q75/q25=90.66 mlp_w1:H=0.7893,top10E=0.27,eRank=237.5,q75/q25=6.62 mlp_w2:H=0.9468,top10E=0.06,eRank=552.4,q75/q25=4.21 vo_prod:H=0.5219,top10E=0.59,eRank=45.9,q75/q25=9389.15 train_time:791759ms step_avg:89.97ms +[2025-08-22 22:19:26] [Rank 0] PRINT: step:8800/10000 val_loss:3.7165 svd_entropy: attn_qk:H=0.6970,top10E=0.35,eRank=120.5,q75/q25=73.45 attn_vo:H=0.6261,top10E=0.41,eRank=92.0,q75/q25=90.66 mlp_w1:H=0.7893,top10E=0.27,eRank=237.5,q75/q25=6.62 mlp_w2:H=0.9468,top10E=0.06,eRank=552.4,q75/q25=4.21 vo_prod:H=0.5219,top10E=0.59,eRank=45.9,q75/q25=9389.15 train_time:791759ms step_avg:89.97ms +[2025-08-22 22:19:27] [Rank 0] step:8801/10000 train_time:791772ms step_avg:89.96ms +[2025-08-22 22:19:27] [Rank 0] step:8801/10000 train_time:791772ms step_avg:89.96ms +[2025-08-22 22:19:28] [Rank 0] step:8821/10000 train_time:793505ms step_avg:89.96ms +[2025-08-22 22:19:28] [Rank 0] step:8821/10000 train_time:793505ms step_avg:89.96ms +[2025-08-22 22:19:30] [Rank 0] step:8841/10000 train_time:795426ms step_avg:89.97ms +[2025-08-22 22:19:30] [Rank 0] step:8841/10000 train_time:795426ms step_avg:89.97ms +[2025-08-22 22:19:32] [Rank 0] step:8861/10000 train_time:797323ms step_avg:89.98ms +[2025-08-22 22:19:32] [Rank 0] step:8861/10000 train_time:797323ms step_avg:89.98ms +[2025-08-22 22:19:34] [Rank 0] step:8881/10000 train_time:799221ms step_avg:89.99ms +[2025-08-22 22:19:34] [Rank 0] step:8881/10000 train_time:799221ms step_avg:89.99ms +[2025-08-22 22:19:36] [Rank 0] step:8901/10000 train_time:801128ms step_avg:90.00ms +[2025-08-22 22:19:36] [Rank 0] step:8901/10000 train_time:801128ms step_avg:90.00ms +[2025-08-22 22:19:38] [Rank 0] step:8921/10000 train_time:803039ms step_avg:90.02ms +[2025-08-22 22:19:38] [Rank 0] step:8921/10000 train_time:803039ms step_avg:90.02ms +[2025-08-22 22:19:40] [Rank 0] step:8941/10000 train_time:804957ms step_avg:90.03ms +[2025-08-22 22:19:40] [Rank 0] step:8941/10000 train_time:804957ms step_avg:90.03ms +[2025-08-22 22:19:42] [Rank 0] step:8961/10000 train_time:806861ms step_avg:90.04ms +[2025-08-22 22:19:42] [Rank 0] step:8961/10000 train_time:806861ms step_avg:90.04ms +[2025-08-22 22:19:44] [Rank 0] step:8981/10000 train_time:808764ms step_avg:90.05ms +[2025-08-22 22:19:44] [Rank 0] step:8981/10000 train_time:808764ms step_avg:90.05ms +[2025-08-22 22:19:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:19:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:19:59] [Rank 0] PRINT: step:9000/10000 val_loss:3.7054 svd_entropy: attn_qk:H=0.6974,top10E=0.35,eRank=120.8,q75/q25=73.78 attn_vo:H=0.6269,top10E=0.41,eRank=92.4,q75/q25=90.63 mlp_w1:H=0.7897,top10E=0.27,eRank=238.1,q75/q25=6.61 mlp_w2:H=0.9468,top10E=0.06,eRank=552.5,q75/q25=4.21 vo_prod:H=0.5228,top10E=0.59,eRank=46.1,q75/q25=9492.85 train_time:810857ms step_avg:90.10ms +[2025-08-22 22:19:59] [Rank 0] PRINT: step:9000/10000 val_loss:3.7054 svd_entropy: attn_qk:H=0.6974,top10E=0.35,eRank=120.8,q75/q25=73.78 attn_vo:H=0.6269,top10E=0.41,eRank=92.4,q75/q25=90.63 mlp_w1:H=0.7897,top10E=0.27,eRank=238.1,q75/q25=6.61 mlp_w2:H=0.9468,top10E=0.06,eRank=552.5,q75/q25=4.21 vo_prod:H=0.5228,top10E=0.59,eRank=46.1,q75/q25=9492.85 train_time:810857ms step_avg:90.10ms +[2025-08-22 22:19:59] [Rank 0] step:9001/10000 train_time:810869ms step_avg:90.09ms +[2025-08-22 22:19:59] [Rank 0] step:9001/10000 train_time:810869ms step_avg:90.09ms +[2025-08-22 22:20:01] [Rank 0] step:9021/10000 train_time:812592ms step_avg:90.08ms +[2025-08-22 22:20:01] [Rank 0] step:9021/10000 train_time:812592ms step_avg:90.08ms +[2025-08-22 22:20:03] [Rank 0] step:9041/10000 train_time:814488ms step_avg:90.09ms +[2025-08-22 22:20:03] [Rank 0] step:9041/10000 train_time:814488ms step_avg:90.09ms +[2025-08-22 22:20:05] [Rank 0] step:9061/10000 train_time:816401ms step_avg:90.10ms +[2025-08-22 22:20:05] [Rank 0] step:9061/10000 train_time:816401ms step_avg:90.10ms +[2025-08-22 22:20:07] [Rank 0] step:9081/10000 train_time:818308ms step_avg:90.11ms +[2025-08-22 22:20:07] [Rank 0] step:9081/10000 train_time:818308ms step_avg:90.11ms +[2025-08-22 22:20:09] [Rank 0] step:9101/10000 train_time:820228ms step_avg:90.13ms +[2025-08-22 22:20:09] [Rank 0] step:9101/10000 train_time:820228ms step_avg:90.13ms +[2025-08-22 22:20:11] [Rank 0] step:9121/10000 train_time:822133ms step_avg:90.14ms +[2025-08-22 22:20:11] [Rank 0] step:9121/10000 train_time:822133ms step_avg:90.14ms +[2025-08-22 22:20:13] [Rank 0] step:9141/10000 train_time:824024ms step_avg:90.15ms +[2025-08-22 22:20:13] [Rank 0] step:9141/10000 train_time:824024ms step_avg:90.15ms +[2025-08-22 22:20:15] [Rank 0] step:9161/10000 train_time:825916ms step_avg:90.16ms +[2025-08-22 22:20:15] [Rank 0] step:9161/10000 train_time:825916ms step_avg:90.16ms +[2025-08-22 22:20:17] [Rank 0] step:9181/10000 train_time:827847ms step_avg:90.17ms +[2025-08-22 22:20:17] [Rank 0] step:9181/10000 train_time:827847ms step_avg:90.17ms +[2025-08-22 22:20:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:20:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:20:32] [Rank 0] PRINT: step:9200/10000 val_loss:3.6959 svd_entropy: attn_qk:H=0.6977,top10E=0.35,eRank=121.0,q75/q25=73.67 attn_vo:H=0.6276,top10E=0.41,eRank=92.7,q75/q25=90.99 mlp_w1:H=0.7900,top10E=0.27,eRank=238.5,q75/q25=6.60 mlp_w2:H=0.9468,top10E=0.06,eRank=552.7,q75/q25=4.20 vo_prod:H=0.5234,top10E=0.59,eRank=46.2,q75/q25=9538.51 train_time:829939ms step_avg:90.21ms +[2025-08-22 22:20:32] [Rank 0] PRINT: step:9200/10000 val_loss:3.6959 svd_entropy: attn_qk:H=0.6977,top10E=0.35,eRank=121.0,q75/q25=73.67 attn_vo:H=0.6276,top10E=0.41,eRank=92.7,q75/q25=90.99 mlp_w1:H=0.7900,top10E=0.27,eRank=238.5,q75/q25=6.60 mlp_w2:H=0.9468,top10E=0.06,eRank=552.7,q75/q25=4.20 vo_prod:H=0.5234,top10E=0.59,eRank=46.2,q75/q25=9538.51 train_time:829939ms step_avg:90.21ms +[2025-08-22 22:20:32] [Rank 0] step:9201/10000 train_time:829951ms step_avg:90.20ms +[2025-08-22 22:20:32] [Rank 0] step:9201/10000 train_time:829951ms step_avg:90.20ms +[2025-08-22 22:20:34] [Rank 0] step:9221/10000 train_time:831684ms step_avg:90.19ms +[2025-08-22 22:20:34] [Rank 0] step:9221/10000 train_time:831684ms step_avg:90.19ms +[2025-08-22 22:20:36] [Rank 0] step:9241/10000 train_time:833592ms step_avg:90.21ms +[2025-08-22 22:20:36] [Rank 0] step:9241/10000 train_time:833592ms step_avg:90.21ms +[2025-08-22 22:20:38] [Rank 0] step:9261/10000 train_time:835502ms step_avg:90.22ms +[2025-08-22 22:20:38] [Rank 0] step:9261/10000 train_time:835502ms step_avg:90.22ms +[2025-08-22 22:20:40] [Rank 0] step:9281/10000 train_time:837387ms step_avg:90.23ms +[2025-08-22 22:20:40] [Rank 0] step:9281/10000 train_time:837387ms step_avg:90.23ms +[2025-08-22 22:20:42] [Rank 0] step:9301/10000 train_time:839283ms step_avg:90.24ms +[2025-08-22 22:20:42] [Rank 0] step:9301/10000 train_time:839283ms step_avg:90.24ms +[2025-08-22 22:20:44] [Rank 0] step:9321/10000 train_time:841188ms step_avg:90.25ms +[2025-08-22 22:20:44] [Rank 0] step:9321/10000 train_time:841188ms step_avg:90.25ms +[2025-08-22 22:20:46] [Rank 0] step:9341/10000 train_time:843088ms step_avg:90.26ms +[2025-08-22 22:20:46] [Rank 0] step:9341/10000 train_time:843088ms step_avg:90.26ms +[2025-08-22 22:20:48] [Rank 0] step:9361/10000 train_time:844994ms step_avg:90.27ms +[2025-08-22 22:20:48] [Rank 0] step:9361/10000 train_time:844994ms step_avg:90.27ms +[2025-08-22 22:20:50] [Rank 0] step:9381/10000 train_time:846913ms step_avg:90.28ms +[2025-08-22 22:20:50] [Rank 0] step:9381/10000 train_time:846913ms step_avg:90.28ms +[2025-08-22 22:20:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:20:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:21:05] [Rank 0] PRINT: step:9400/10000 val_loss:3.6869 svd_entropy: attn_qk:H=0.6980,top10E=0.35,eRank=121.2,q75/q25=73.59 attn_vo:H=0.6281,top10E=0.41,eRank=93.0,q75/q25=91.14 mlp_w1:H=0.7903,top10E=0.27,eRank=239.0,q75/q25=6.59 mlp_w2:H=0.9469,top10E=0.06,eRank=552.8,q75/q25=4.21 vo_prod:H=0.5242,top10E=0.59,eRank=46.4,q75/q25=9683.97 train_time:849013ms step_avg:90.32ms +[2025-08-22 22:21:05] [Rank 0] PRINT: step:9400/10000 val_loss:3.6869 svd_entropy: attn_qk:H=0.6980,top10E=0.35,eRank=121.2,q75/q25=73.59 attn_vo:H=0.6281,top10E=0.41,eRank=93.0,q75/q25=91.14 mlp_w1:H=0.7903,top10E=0.27,eRank=239.0,q75/q25=6.59 mlp_w2:H=0.9469,top10E=0.06,eRank=552.8,q75/q25=4.21 vo_prod:H=0.5242,top10E=0.59,eRank=46.4,q75/q25=9683.97 train_time:849013ms step_avg:90.32ms +[2025-08-22 22:21:05] [Rank 0] step:9401/10000 train_time:849025ms step_avg:90.31ms +[2025-08-22 22:21:05] [Rank 0] step:9401/10000 train_time:849025ms step_avg:90.31ms +[2025-08-22 22:21:07] [Rank 0] step:9421/10000 train_time:850736ms step_avg:90.30ms +[2025-08-22 22:21:07] [Rank 0] step:9421/10000 train_time:850736ms step_avg:90.30ms +[2025-08-22 22:21:09] [Rank 0] step:9441/10000 train_time:852639ms step_avg:90.31ms +[2025-08-22 22:21:09] [Rank 0] step:9441/10000 train_time:852639ms step_avg:90.31ms +[2025-08-22 22:21:11] [Rank 0] step:9461/10000 train_time:854542ms step_avg:90.32ms +[2025-08-22 22:21:11] [Rank 0] step:9461/10000 train_time:854542ms step_avg:90.32ms +[2025-08-22 22:21:13] [Rank 0] step:9481/10000 train_time:856446ms step_avg:90.33ms +[2025-08-22 22:21:13] [Rank 0] step:9481/10000 train_time:856446ms step_avg:90.33ms +[2025-08-22 22:21:15] [Rank 0] step:9501/10000 train_time:858360ms step_avg:90.34ms +[2025-08-22 22:21:15] [Rank 0] step:9501/10000 train_time:858360ms step_avg:90.34ms +[2025-08-22 22:21:17] [Rank 0] step:9521/10000 train_time:860259ms step_avg:90.35ms +[2025-08-22 22:21:17] [Rank 0] step:9521/10000 train_time:860259ms step_avg:90.35ms +[2025-08-22 22:21:19] [Rank 0] step:9541/10000 train_time:862161ms step_avg:90.36ms +[2025-08-22 22:21:19] [Rank 0] step:9541/10000 train_time:862161ms step_avg:90.36ms +[2025-08-22 22:21:21] [Rank 0] step:9561/10000 train_time:864060ms step_avg:90.37ms +[2025-08-22 22:21:21] [Rank 0] step:9561/10000 train_time:864060ms step_avg:90.37ms +[2025-08-22 22:21:23] [Rank 0] step:9581/10000 train_time:865968ms step_avg:90.38ms +[2025-08-22 22:21:23] [Rank 0] step:9581/10000 train_time:865968ms step_avg:90.38ms +[2025-08-22 22:21:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:21:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:21:38] [Rank 0] PRINT: step:9600/10000 val_loss:3.6784 svd_entropy: attn_qk:H=0.6983,top10E=0.35,eRank=121.4,q75/q25=73.57 attn_vo:H=0.6286,top10E=0.41,eRank=93.3,q75/q25=91.06 mlp_w1:H=0.7905,top10E=0.26,eRank=239.3,q75/q25=6.57 mlp_w2:H=0.9469,top10E=0.06,eRank=552.9,q75/q25=4.20 vo_prod:H=0.5246,top10E=0.59,eRank=46.5,q75/q25=9694.21 train_time:868076ms step_avg:90.42ms +[2025-08-22 22:21:38] [Rank 0] PRINT: step:9600/10000 val_loss:3.6784 svd_entropy: attn_qk:H=0.6983,top10E=0.35,eRank=121.4,q75/q25=73.57 attn_vo:H=0.6286,top10E=0.41,eRank=93.3,q75/q25=91.06 mlp_w1:H=0.7905,top10E=0.26,eRank=239.3,q75/q25=6.57 mlp_w2:H=0.9469,top10E=0.06,eRank=552.9,q75/q25=4.20 vo_prod:H=0.5246,top10E=0.59,eRank=46.5,q75/q25=9694.21 train_time:868076ms step_avg:90.42ms +[2025-08-22 22:21:38] [Rank 0] step:9601/10000 train_time:868088ms step_avg:90.42ms +[2025-08-22 22:21:38] [Rank 0] step:9601/10000 train_time:868088ms step_avg:90.42ms +[2025-08-22 22:21:40] [Rank 0] step:9621/10000 train_time:869816ms step_avg:90.41ms +[2025-08-22 22:21:40] [Rank 0] step:9621/10000 train_time:869816ms step_avg:90.41ms +[2025-08-22 22:21:42] [Rank 0] step:9641/10000 train_time:871790ms step_avg:90.43ms +[2025-08-22 22:21:42] [Rank 0] step:9641/10000 train_time:871790ms step_avg:90.43ms +[2025-08-22 22:21:44] [Rank 0] step:9661/10000 train_time:873653ms step_avg:90.43ms +[2025-08-22 22:21:44] [Rank 0] step:9661/10000 train_time:873653ms step_avg:90.43ms +[2025-08-22 22:21:46] [Rank 0] step:9681/10000 train_time:875580ms step_avg:90.44ms +[2025-08-22 22:21:46] [Rank 0] step:9681/10000 train_time:875580ms step_avg:90.44ms +[2025-08-22 22:21:48] [Rank 0] step:9701/10000 train_time:877526ms step_avg:90.46ms +[2025-08-22 22:21:48] [Rank 0] step:9701/10000 train_time:877526ms step_avg:90.46ms +[2025-08-22 22:21:50] [Rank 0] step:9721/10000 train_time:879451ms step_avg:90.47ms +[2025-08-22 22:21:50] [Rank 0] step:9721/10000 train_time:879451ms step_avg:90.47ms +[2025-08-22 22:21:52] [Rank 0] step:9741/10000 train_time:881406ms step_avg:90.48ms +[2025-08-22 22:21:52] [Rank 0] step:9741/10000 train_time:881406ms step_avg:90.48ms +[2025-08-22 22:21:54] [Rank 0] step:9761/10000 train_time:883340ms step_avg:90.50ms +[2025-08-22 22:21:54] [Rank 0] step:9761/10000 train_time:883340ms step_avg:90.50ms +[2025-08-22 22:21:56] [Rank 0] step:9781/10000 train_time:885292ms step_avg:90.51ms +[2025-08-22 22:21:56] [Rank 0] step:9781/10000 train_time:885292ms step_avg:90.51ms +[2025-08-22 22:21:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:21:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:22:11] [Rank 0] PRINT: step:9800/10000 val_loss:3.6701 svd_entropy: attn_qk:H=0.6984,top10E=0.35,eRank=121.5,q75/q25=73.64 attn_vo:H=0.6290,top10E=0.41,eRank=93.5,q75/q25=91.22 mlp_w1:H=0.7907,top10E=0.26,eRank=239.6,q75/q25=6.56 mlp_w2:H=0.9469,top10E=0.06,eRank=552.9,q75/q25=4.21 vo_prod:H=0.5250,top10E=0.59,eRank=46.6,q75/q25=9665.22 train_time:887434ms step_avg:90.55ms +[2025-08-22 22:22:11] [Rank 0] PRINT: step:9800/10000 val_loss:3.6701 svd_entropy: attn_qk:H=0.6984,top10E=0.35,eRank=121.5,q75/q25=73.64 attn_vo:H=0.6290,top10E=0.41,eRank=93.5,q75/q25=91.22 mlp_w1:H=0.7907,top10E=0.26,eRank=239.6,q75/q25=6.56 mlp_w2:H=0.9469,top10E=0.06,eRank=552.9,q75/q25=4.21 vo_prod:H=0.5250,top10E=0.59,eRank=46.6,q75/q25=9665.22 train_time:887434ms step_avg:90.55ms +[2025-08-22 22:22:12] [Rank 0] step:9801/10000 train_time:887446ms step_avg:90.55ms +[2025-08-22 22:22:12] [Rank 0] step:9801/10000 train_time:887446ms step_avg:90.55ms +[2025-08-22 22:22:13] [Rank 0] step:9821/10000 train_time:889180ms step_avg:90.54ms +[2025-08-22 22:22:13] [Rank 0] step:9821/10000 train_time:889180ms step_avg:90.54ms +[2025-08-22 22:22:15] [Rank 0] step:9841/10000 train_time:891123ms step_avg:90.55ms +[2025-08-22 22:22:15] [Rank 0] step:9841/10000 train_time:891123ms step_avg:90.55ms +[2025-08-22 22:22:17] [Rank 0] step:9861/10000 train_time:893040ms step_avg:90.56ms +[2025-08-22 22:22:17] [Rank 0] step:9861/10000 train_time:893040ms step_avg:90.56ms +[2025-08-22 22:22:19] [Rank 0] step:9881/10000 train_time:894958ms step_avg:90.57ms +[2025-08-22 22:22:19] [Rank 0] step:9881/10000 train_time:894958ms step_avg:90.57ms +[2025-08-22 22:22:21] [Rank 0] step:9901/10000 train_time:896896ms step_avg:90.59ms +[2025-08-22 22:22:21] [Rank 0] step:9901/10000 train_time:896896ms step_avg:90.59ms +[2025-08-22 22:22:23] [Rank 0] step:9921/10000 train_time:898821ms step_avg:90.60ms +[2025-08-22 22:22:23] [Rank 0] step:9921/10000 train_time:898821ms step_avg:90.60ms +[2025-08-22 22:22:25] [Rank 0] step:9941/10000 train_time:900753ms step_avg:90.61ms +[2025-08-22 22:22:25] [Rank 0] step:9941/10000 train_time:900753ms step_avg:90.61ms +[2025-08-22 22:22:27] [Rank 0] step:9961/10000 train_time:902681ms step_avg:90.62ms +[2025-08-22 22:22:27] [Rank 0] step:9961/10000 train_time:902681ms step_avg:90.62ms +[2025-08-22 22:22:29] [Rank 0] step:9981/10000 train_time:904609ms step_avg:90.63ms +[2025-08-22 22:22:29] [Rank 0] step:9981/10000 train_time:904609ms step_avg:90.63ms +[2025-08-22 22:22:31] [Rank 0] step:10000/10000 train_time:906447ms step_avg:90.64ms +[2025-08-22 22:22:31] [Rank 0] step:10000/10000 train_time:906447ms step_avg:90.64ms +[2025-08-22 22:22:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:22:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:22:45] [Rank 0] PRINT: step:10000/10000 val_loss:3.6625 svd_entropy: attn_qk:H=0.6985,top10E=0.35,eRank=121.6,q75/q25=73.48 attn_vo:H=0.6292,top10E=0.41,eRank=93.6,q75/q25=91.22 mlp_w1:H=0.7908,top10E=0.26,eRank=239.7,q75/q25=6.56 mlp_w2:H=0.9470,top10E=0.06,eRank=553.0,q75/q25=4.20 vo_prod:H=0.5253,top10E=0.59,eRank=46.7,q75/q25=9787.10 train_time:906743ms step_avg:90.67ms +[2025-08-22 22:22:45] [Rank 0] PRINT: step:10000/10000 val_loss:3.6625 svd_entropy: attn_qk:H=0.6985,top10E=0.35,eRank=121.6,q75/q25=73.48 attn_vo:H=0.6292,top10E=0.41,eRank=93.6,q75/q25=91.22 mlp_w1:H=0.7908,top10E=0.26,eRank=239.7,q75/q25=6.56 mlp_w2:H=0.9470,top10E=0.06,eRank=553.0,q75/q25=4.20 vo_prod:H=0.5253,top10E=0.59,eRank=46.7,q75/q25=9787.10 train_time:906743ms step_avg:90.67ms +[2025-08-22 22:22:45] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 22:22:45 2025 --- +[2025-08-22 22:22:45] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 22:22:45 2025 --- +[2025-08-22 22:22:45] [Rank 0] PRINT: Peak memory allocated: 11478 MiB reserved: 16356 MiB +[2025-08-22 22:22:45] [Rank 0] PRINT: Peak memory allocated: 11478 MiB reserved: 16356 MiB diff --git a/logs_svd_gated/mode_7_param_gated_seed_41/config.json b/logs_svd_gated/mode_7_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..36cd08a3252799be1ae0a4ba6c54daad6cd453f0 --- /dev/null +++ b/logs_svd_gated/mode_7_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 7, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "97089cfe-8607-4161-a6f0-0636786fcda7", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_7_param_gated_seed_41/training_log_97089cfe-8607-4161-a6f0-0636786fcda7.txt b/logs_svd_gated/mode_7_param_gated_seed_41/training_log_97089cfe-8607-4161-a6f0-0636786fcda7.txt new file mode 100644 index 0000000000000000000000000000000000000000..96d595fad2291e5981b30322de8715597fbb54e5 --- /dev/null +++ b/logs_svd_gated/mode_7_param_gated_seed_41/training_log_97089cfe-8607-4161-a6f0-0636786fcda7.txt @@ -0,0 +1,2926 @@ +[2025-08-22 11:55:38] [Rank 0] PRINT: --- Script Start: Fri Aug 22 11:55:38 2025 --- +[2025-08-22 11:55:38] [Rank 0] PRINT: --- Script Start: Fri Aug 22 11:55:38 2025 --- +[2025-08-22 11:55:38] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=7, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 11:55:38] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=7, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 11:55:38] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 11:55:38] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 11:55:38] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 11:55:38] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 11:55:38] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_7_param_gated_seed_41 +[2025-08-22 11:55:38] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_7_param_gated_seed_41 +[2025-08-22 11:55:38] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 11:55:38] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 11:55:38] [Rank 0] PRINT: Constructing model... +[2025-08-22 11:55:38] [Rank 0] PRINT: Constructing model... +[2025-08-22 11:55:40] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 11:55:40] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 11:55:40] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 11:55:40] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 11:55:40] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 11:55:40] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 11:55:40] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-08-22 11:55:40] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-08-22 11:55:40] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.05). +[2025-08-22 11:55:40] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.05). +[2025-08-22 11:55:40] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 11:55:40] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 11:55:40] [Rank 0] PRINT: Muon optimizer is active with 58 parameters. +[2025-08-22 11:55:40] [Rank 0] PRINT: Muon optimizer is active with 58 parameters. +[2025-08-22 11:55:40] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 11:55:40] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 11:55:41] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 11:55:41] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 11:55:41] [Rank 0] PRINT: Starting warmup... +[2025-08-22 11:55:41] [Rank 0] PRINT: Starting warmup... +[2025-08-22 11:56:26] [Rank 0] PRINT: Warmup complete. +[2025-08-22 11:56:26] [Rank 0] PRINT: Warmup complete. +[2025-08-22 11:56:26] [Rank 0] PRINT: Starting training... +[2025-08-22 11:56:26] [Rank 0] PRINT: Starting training... +[2025-08-22 11:56:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:56:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:56:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 11:56:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 11:56:46] [Rank 0] step:21/10000 train_time:1883ms step_avg:89.66ms +[2025-08-22 11:56:46] [Rank 0] step:21/10000 train_time:1883ms step_avg:89.66ms +[2025-08-22 11:56:48] [Rank 0] step:41/10000 train_time:3673ms step_avg:89.58ms +[2025-08-22 11:56:48] [Rank 0] step:41/10000 train_time:3673ms step_avg:89.58ms +[2025-08-22 11:56:50] [Rank 0] step:61/10000 train_time:5491ms step_avg:90.01ms +[2025-08-22 11:56:50] [Rank 0] step:61/10000 train_time:5491ms step_avg:90.01ms +[2025-08-22 11:56:52] [Rank 0] step:81/10000 train_time:7310ms step_avg:90.25ms +[2025-08-22 11:56:52] [Rank 0] step:81/10000 train_time:7310ms step_avg:90.25ms +[2025-08-22 11:56:54] [Rank 0] step:101/10000 train_time:9129ms step_avg:90.39ms +[2025-08-22 11:56:54] [Rank 0] step:101/10000 train_time:9129ms step_avg:90.39ms +[2025-08-22 11:56:56] [Rank 0] step:121/10000 train_time:10949ms step_avg:90.49ms +[2025-08-22 11:56:56] [Rank 0] step:121/10000 train_time:10949ms step_avg:90.49ms +[2025-08-22 11:56:57] [Rank 0] step:141/10000 train_time:12769ms step_avg:90.56ms +[2025-08-22 11:56:57] [Rank 0] step:141/10000 train_time:12769ms step_avg:90.56ms +[2025-08-22 11:56:59] [Rank 0] step:161/10000 train_time:14589ms step_avg:90.62ms +[2025-08-22 11:56:59] [Rank 0] step:161/10000 train_time:14589ms step_avg:90.62ms +[2025-08-22 11:57:01] [Rank 0] step:181/10000 train_time:16411ms step_avg:90.67ms +[2025-08-22 11:57:01] [Rank 0] step:181/10000 train_time:16411ms step_avg:90.67ms +[2025-08-22 11:57:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:57:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:57:16] [Rank 0] PRINT: step:200/10000 val_loss:5.3112 svd_entropy: attn_qk:H=0.7100,top10E=0.36,eRank=152.9,q75/q25=34.30 attn_vo:H=0.8365,top10E=0.05,eRank=410.8,q75/q25=inf mlp_w1:H=0.9737,top10E=0.04,eRank=644.8,q75/q25=2.78 mlp_w2:H=0.9729,top10E=0.04,eRank=641.4,q75/q25=2.84 vo_prod:H=0.6916,top10E=0.10,eRank=225.4,q75/q25=inf train_time:18236ms step_avg:91.18ms +[2025-08-22 11:57:16] [Rank 0] PRINT: step:200/10000 val_loss:5.3112 svd_entropy: attn_qk:H=0.7100,top10E=0.36,eRank=152.9,q75/q25=34.30 attn_vo:H=0.8365,top10E=0.05,eRank=410.8,q75/q25=inf mlp_w1:H=0.9737,top10E=0.04,eRank=644.8,q75/q25=2.78 mlp_w2:H=0.9729,top10E=0.04,eRank=641.4,q75/q25=2.84 vo_prod:H=0.6916,top10E=0.10,eRank=225.4,q75/q25=inf train_time:18236ms step_avg:91.18ms +[2025-08-22 11:57:17] [Rank 0] step:201/10000 train_time:18260ms step_avg:90.85ms +[2025-08-22 11:57:17] [Rank 0] step:201/10000 train_time:18260ms step_avg:90.85ms +[2025-08-22 11:57:18] [Rank 0] step:221/10000 train_time:20082ms step_avg:90.87ms +[2025-08-22 11:57:18] [Rank 0] step:221/10000 train_time:20082ms step_avg:90.87ms +[2025-08-22 11:57:20] [Rank 0] step:241/10000 train_time:21901ms step_avg:90.88ms +[2025-08-22 11:57:20] [Rank 0] step:241/10000 train_time:21901ms step_avg:90.88ms +[2025-08-22 11:57:22] [Rank 0] step:261/10000 train_time:23721ms step_avg:90.88ms +[2025-08-22 11:57:22] [Rank 0] step:261/10000 train_time:23721ms step_avg:90.88ms +[2025-08-22 11:57:24] [Rank 0] step:281/10000 train_time:25541ms step_avg:90.89ms +[2025-08-22 11:57:24] [Rank 0] step:281/10000 train_time:25541ms step_avg:90.89ms +[2025-08-22 11:57:26] [Rank 0] step:301/10000 train_time:27360ms step_avg:90.90ms +[2025-08-22 11:57:26] [Rank 0] step:301/10000 train_time:27360ms step_avg:90.90ms +[2025-08-22 11:57:28] [Rank 0] step:321/10000 train_time:29180ms step_avg:90.90ms +[2025-08-22 11:57:28] [Rank 0] step:321/10000 train_time:29180ms step_avg:90.90ms +[2025-08-22 11:57:29] [Rank 0] step:341/10000 train_time:31000ms step_avg:90.91ms +[2025-08-22 11:57:29] [Rank 0] step:341/10000 train_time:31000ms step_avg:90.91ms +[2025-08-22 11:57:31] [Rank 0] step:361/10000 train_time:32820ms step_avg:90.92ms +[2025-08-22 11:57:31] [Rank 0] step:361/10000 train_time:32820ms step_avg:90.92ms +[2025-08-22 11:57:33] [Rank 0] step:381/10000 train_time:34642ms step_avg:90.92ms +[2025-08-22 11:57:33] [Rank 0] step:381/10000 train_time:34642ms step_avg:90.92ms +[2025-08-22 11:57:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:57:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:57:48] [Rank 0] PRINT: step:400/10000 val_loss:4.9706 svd_entropy: attn_qk:H=0.7269,top10E=0.32,eRank=162.4,q75/q25=40.79 attn_vo:H=0.8383,top10E=0.05,eRank=416.4,q75/q25=inf mlp_w1:H=0.9729,top10E=0.04,eRank=641.5,q75/q25=2.81 mlp_w2:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.87 vo_prod:H=0.6957,top10E=0.10,eRank=233.1,q75/q25=inf train_time:36465ms step_avg:91.16ms +[2025-08-22 11:57:48] [Rank 0] PRINT: step:400/10000 val_loss:4.9706 svd_entropy: attn_qk:H=0.7269,top10E=0.32,eRank=162.4,q75/q25=40.79 attn_vo:H=0.8383,top10E=0.05,eRank=416.4,q75/q25=inf mlp_w1:H=0.9729,top10E=0.04,eRank=641.5,q75/q25=2.81 mlp_w2:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.87 vo_prod:H=0.6957,top10E=0.10,eRank=233.1,q75/q25=inf train_time:36465ms step_avg:91.16ms +[2025-08-22 11:57:49] [Rank 0] step:401/10000 train_time:36489ms step_avg:90.99ms +[2025-08-22 11:57:49] [Rank 0] step:401/10000 train_time:36489ms step_avg:90.99ms +[2025-08-22 11:57:50] [Rank 0] step:421/10000 train_time:38299ms step_avg:90.97ms +[2025-08-22 11:57:50] [Rank 0] step:421/10000 train_time:38299ms step_avg:90.97ms +[2025-08-22 11:57:52] [Rank 0] step:441/10000 train_time:40117ms step_avg:90.97ms +[2025-08-22 11:57:52] [Rank 0] step:441/10000 train_time:40117ms step_avg:90.97ms +[2025-08-22 11:57:54] [Rank 0] step:461/10000 train_time:41935ms step_avg:90.97ms +[2025-08-22 11:57:54] [Rank 0] step:461/10000 train_time:41935ms step_avg:90.97ms +[2025-08-22 11:57:56] [Rank 0] step:481/10000 train_time:43756ms step_avg:90.97ms +[2025-08-22 11:57:56] [Rank 0] step:481/10000 train_time:43756ms step_avg:90.97ms +[2025-08-22 11:57:58] [Rank 0] step:501/10000 train_time:45575ms step_avg:90.97ms +[2025-08-22 11:57:58] [Rank 0] step:501/10000 train_time:45575ms step_avg:90.97ms +[2025-08-22 11:57:59] [Rank 0] step:521/10000 train_time:47396ms step_avg:90.97ms +[2025-08-22 11:57:59] [Rank 0] step:521/10000 train_time:47396ms step_avg:90.97ms +[2025-08-22 11:58:01] [Rank 0] step:541/10000 train_time:49218ms step_avg:90.98ms +[2025-08-22 11:58:01] [Rank 0] step:541/10000 train_time:49218ms step_avg:90.98ms +[2025-08-22 11:58:03] [Rank 0] step:561/10000 train_time:51042ms step_avg:90.98ms +[2025-08-22 11:58:03] [Rank 0] step:561/10000 train_time:51042ms step_avg:90.98ms +[2025-08-22 11:58:05] [Rank 0] step:581/10000 train_time:52866ms step_avg:90.99ms +[2025-08-22 11:58:05] [Rank 0] step:581/10000 train_time:52866ms step_avg:90.99ms +[2025-08-22 11:58:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:58:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:58:20] [Rank 0] PRINT: step:600/10000 val_loss:4.7618 svd_entropy: attn_qk:H=0.7318,top10E=0.31,eRank=165.5,q75/q25=43.10 attn_vo:H=0.8366,top10E=0.05,eRank=411.2,q75/q25=inf mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.82 mlp_w2:H=0.9707,top10E=0.04,eRank=632.3,q75/q25=2.89 vo_prod:H=0.6929,top10E=0.10,eRank=228.2,q75/q25=inf train_time:54693ms step_avg:91.16ms +[2025-08-22 11:58:20] [Rank 0] PRINT: step:600/10000 val_loss:4.7618 svd_entropy: attn_qk:H=0.7318,top10E=0.31,eRank=165.5,q75/q25=43.10 attn_vo:H=0.8366,top10E=0.05,eRank=411.2,q75/q25=inf mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.82 mlp_w2:H=0.9707,top10E=0.04,eRank=632.3,q75/q25=2.89 vo_prod:H=0.6929,top10E=0.10,eRank=228.2,q75/q25=inf train_time:54693ms step_avg:91.16ms +[2025-08-22 11:58:21] [Rank 0] step:601/10000 train_time:54717ms step_avg:91.04ms +[2025-08-22 11:58:21] [Rank 0] step:601/10000 train_time:54717ms step_avg:91.04ms +[2025-08-22 11:58:22] [Rank 0] step:621/10000 train_time:56548ms step_avg:91.06ms +[2025-08-22 11:58:22] [Rank 0] step:621/10000 train_time:56548ms step_avg:91.06ms +[2025-08-22 11:58:24] [Rank 0] step:641/10000 train_time:58367ms step_avg:91.06ms +[2025-08-22 11:58:24] [Rank 0] step:641/10000 train_time:58367ms step_avg:91.06ms +[2025-08-22 11:58:26] [Rank 0] step:661/10000 train_time:60189ms step_avg:91.06ms +[2025-08-22 11:58:26] [Rank 0] step:661/10000 train_time:60189ms step_avg:91.06ms +[2025-08-22 11:58:28] [Rank 0] step:681/10000 train_time:62012ms step_avg:91.06ms +[2025-08-22 11:58:28] [Rank 0] step:681/10000 train_time:62012ms step_avg:91.06ms +[2025-08-22 11:58:30] [Rank 0] step:701/10000 train_time:63836ms step_avg:91.06ms +[2025-08-22 11:58:30] [Rank 0] step:701/10000 train_time:63836ms step_avg:91.06ms +[2025-08-22 11:58:31] [Rank 0] step:721/10000 train_time:65661ms step_avg:91.07ms +[2025-08-22 11:58:31] [Rank 0] step:721/10000 train_time:65661ms step_avg:91.07ms +[2025-08-22 11:58:33] [Rank 0] step:741/10000 train_time:67486ms step_avg:91.07ms +[2025-08-22 11:58:33] [Rank 0] step:741/10000 train_time:67486ms step_avg:91.07ms +[2025-08-22 11:58:35] [Rank 0] step:761/10000 train_time:69326ms step_avg:91.10ms +[2025-08-22 11:58:35] [Rank 0] step:761/10000 train_time:69326ms step_avg:91.10ms +[2025-08-22 11:58:37] [Rank 0] step:781/10000 train_time:71166ms step_avg:91.12ms +[2025-08-22 11:58:37] [Rank 0] step:781/10000 train_time:71166ms step_avg:91.12ms +[2025-08-22 11:58:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:58:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:58:52] [Rank 0] PRINT: step:800/10000 val_loss:4.4849 svd_entropy: attn_qk:H=0.7350,top10E=0.30,eRank=167.7,q75/q25=43.44 attn_vo:H=0.8355,top10E=0.05,eRank=407.9,q75/q25=inf mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.81 mlp_w2:H=0.9702,top10E=0.04,eRank=630.0,q75/q25=2.89 vo_prod:H=0.6912,top10E=0.10,eRank=225.2,q75/q25=inf train_time:73008ms step_avg:91.26ms +[2025-08-22 11:58:52] [Rank 0] PRINT: step:800/10000 val_loss:4.4849 svd_entropy: attn_qk:H=0.7350,top10E=0.30,eRank=167.7,q75/q25=43.44 attn_vo:H=0.8355,top10E=0.05,eRank=407.9,q75/q25=inf mlp_w1:H=0.9725,top10E=0.04,eRank=639.8,q75/q25=2.81 mlp_w2:H=0.9702,top10E=0.04,eRank=630.0,q75/q25=2.89 vo_prod:H=0.6912,top10E=0.10,eRank=225.2,q75/q25=inf train_time:73008ms step_avg:91.26ms +[2025-08-22 11:58:53] [Rank 0] step:801/10000 train_time:73031ms step_avg:91.17ms +[2025-08-22 11:58:53] [Rank 0] step:801/10000 train_time:73031ms step_avg:91.17ms +[2025-08-22 11:58:54] [Rank 0] step:821/10000 train_time:74856ms step_avg:91.18ms +[2025-08-22 11:58:54] [Rank 0] step:821/10000 train_time:74856ms step_avg:91.18ms +[2025-08-22 11:58:56] [Rank 0] step:841/10000 train_time:76690ms step_avg:91.19ms +[2025-08-22 11:58:56] [Rank 0] step:841/10000 train_time:76690ms step_avg:91.19ms +[2025-08-22 11:58:58] [Rank 0] step:861/10000 train_time:78526ms step_avg:91.20ms +[2025-08-22 11:58:58] [Rank 0] step:861/10000 train_time:78526ms step_avg:91.20ms +[2025-08-22 11:59:00] [Rank 0] step:881/10000 train_time:80359ms step_avg:91.21ms +[2025-08-22 11:59:00] [Rank 0] step:881/10000 train_time:80359ms step_avg:91.21ms +[2025-08-22 11:59:02] [Rank 0] step:901/10000 train_time:82195ms step_avg:91.23ms +[2025-08-22 11:59:02] [Rank 0] step:901/10000 train_time:82195ms step_avg:91.23ms +[2025-08-22 11:59:04] [Rank 0] step:921/10000 train_time:84031ms step_avg:91.24ms +[2025-08-22 11:59:04] [Rank 0] step:921/10000 train_time:84031ms step_avg:91.24ms +[2025-08-22 11:59:05] [Rank 0] step:941/10000 train_time:85866ms step_avg:91.25ms +[2025-08-22 11:59:05] [Rank 0] step:941/10000 train_time:85866ms step_avg:91.25ms +[2025-08-22 11:59:07] [Rank 0] step:961/10000 train_time:87700ms step_avg:91.26ms +[2025-08-22 11:59:07] [Rank 0] step:961/10000 train_time:87700ms step_avg:91.26ms +[2025-08-22 11:59:09] [Rank 0] step:981/10000 train_time:89536ms step_avg:91.27ms +[2025-08-22 11:59:09] [Rank 0] step:981/10000 train_time:89536ms step_avg:91.27ms +[2025-08-22 11:59:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:59:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:59:25] [Rank 0] PRINT: step:1000/10000 val_loss:4.3595 svd_entropy: attn_qk:H=0.7377,top10E=0.30,eRank=169.6,q75/q25=43.44 attn_vo:H=0.8350,top10E=0.05,eRank=406.5,q75/q25=inf mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.80 mlp_w2:H=0.9698,top10E=0.04,eRank=628.7,q75/q25=2.88 vo_prod:H=0.6909,top10E=0.10,eRank=224.6,q75/q25=inf train_time:91377ms step_avg:91.38ms +[2025-08-22 11:59:25] [Rank 0] PRINT: step:1000/10000 val_loss:4.3595 svd_entropy: attn_qk:H=0.7377,top10E=0.30,eRank=169.6,q75/q25=43.44 attn_vo:H=0.8350,top10E=0.05,eRank=406.5,q75/q25=inf mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.80 mlp_w2:H=0.9698,top10E=0.04,eRank=628.7,q75/q25=2.88 vo_prod:H=0.6909,top10E=0.10,eRank=224.6,q75/q25=inf train_time:91377ms step_avg:91.38ms +[2025-08-22 11:59:25] [Rank 0] step:1001/10000 train_time:91400ms step_avg:91.31ms +[2025-08-22 11:59:25] [Rank 0] step:1001/10000 train_time:91400ms step_avg:91.31ms +[2025-08-22 11:59:26] [Rank 0] step:1021/10000 train_time:93226ms step_avg:91.31ms +[2025-08-22 11:59:26] [Rank 0] step:1021/10000 train_time:93226ms step_avg:91.31ms +[2025-08-22 11:59:28] [Rank 0] step:1041/10000 train_time:95057ms step_avg:91.31ms +[2025-08-22 11:59:28] [Rank 0] step:1041/10000 train_time:95057ms step_avg:91.31ms +[2025-08-22 11:59:30] [Rank 0] step:1061/10000 train_time:96891ms step_avg:91.32ms +[2025-08-22 11:59:30] [Rank 0] step:1061/10000 train_time:96891ms step_avg:91.32ms +[2025-08-22 11:59:32] [Rank 0] step:1081/10000 train_time:98724ms step_avg:91.33ms +[2025-08-22 11:59:32] [Rank 0] step:1081/10000 train_time:98724ms step_avg:91.33ms +[2025-08-22 11:59:34] [Rank 0] step:1101/10000 train_time:100561ms step_avg:91.34ms +[2025-08-22 11:59:34] [Rank 0] step:1101/10000 train_time:100561ms step_avg:91.34ms +[2025-08-22 11:59:36] [Rank 0] step:1121/10000 train_time:102396ms step_avg:91.34ms +[2025-08-22 11:59:36] [Rank 0] step:1121/10000 train_time:102396ms step_avg:91.34ms +[2025-08-22 11:59:37] [Rank 0] step:1141/10000 train_time:104234ms step_avg:91.35ms +[2025-08-22 11:59:37] [Rank 0] step:1141/10000 train_time:104234ms step_avg:91.35ms +[2025-08-22 11:59:39] [Rank 0] step:1161/10000 train_time:106074ms step_avg:91.36ms +[2025-08-22 11:59:39] [Rank 0] step:1161/10000 train_time:106074ms step_avg:91.36ms +[2025-08-22 11:59:41] [Rank 0] step:1181/10000 train_time:107912ms step_avg:91.37ms +[2025-08-22 11:59:41] [Rank 0] step:1181/10000 train_time:107912ms step_avg:91.37ms +[2025-08-22 11:59:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:59:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 11:59:57] [Rank 0] PRINT: step:1200/10000 val_loss:4.2691 svd_entropy: attn_qk:H=0.7398,top10E=0.29,eRank=171.0,q75/q25=44.28 attn_vo:H=0.8347,top10E=0.05,eRank=405.6,q75/q25=inf mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.79 mlp_w2:H=0.9696,top10E=0.05,eRank=627.6,q75/q25=2.87 vo_prod:H=0.6907,top10E=0.10,eRank=224.3,q75/q25=inf train_time:109754ms step_avg:91.46ms +[2025-08-22 11:59:57] [Rank 0] PRINT: step:1200/10000 val_loss:4.2691 svd_entropy: attn_qk:H=0.7398,top10E=0.29,eRank=171.0,q75/q25=44.28 attn_vo:H=0.8347,top10E=0.05,eRank=405.6,q75/q25=inf mlp_w1:H=0.9726,top10E=0.04,eRank=640.3,q75/q25=2.79 mlp_w2:H=0.9696,top10E=0.05,eRank=627.6,q75/q25=2.87 vo_prod:H=0.6907,top10E=0.10,eRank=224.3,q75/q25=inf train_time:109754ms step_avg:91.46ms +[2025-08-22 11:59:57] [Rank 0] step:1201/10000 train_time:109778ms step_avg:91.41ms +[2025-08-22 11:59:57] [Rank 0] step:1201/10000 train_time:109778ms step_avg:91.41ms +[2025-08-22 11:59:59] [Rank 0] step:1221/10000 train_time:111607ms step_avg:91.41ms +[2025-08-22 11:59:59] [Rank 0] step:1221/10000 train_time:111607ms step_avg:91.41ms +[2025-08-22 12:00:00] [Rank 0] step:1241/10000 train_time:113445ms step_avg:91.41ms +[2025-08-22 12:00:00] [Rank 0] step:1241/10000 train_time:113445ms step_avg:91.41ms +[2025-08-22 12:00:02] [Rank 0] step:1261/10000 train_time:115281ms step_avg:91.42ms +[2025-08-22 12:00:02] [Rank 0] step:1261/10000 train_time:115281ms step_avg:91.42ms +[2025-08-22 12:00:04] [Rank 0] step:1281/10000 train_time:117120ms step_avg:91.43ms +[2025-08-22 12:00:04] [Rank 0] step:1281/10000 train_time:117120ms step_avg:91.43ms +[2025-08-22 12:00:06] [Rank 0] step:1301/10000 train_time:118959ms step_avg:91.44ms +[2025-08-22 12:00:06] [Rank 0] step:1301/10000 train_time:118959ms step_avg:91.44ms +[2025-08-22 12:00:08] [Rank 0] step:1321/10000 train_time:120800ms step_avg:91.45ms +[2025-08-22 12:00:08] [Rank 0] step:1321/10000 train_time:120800ms step_avg:91.45ms +[2025-08-22 12:00:10] [Rank 0] step:1341/10000 train_time:122643ms step_avg:91.46ms +[2025-08-22 12:00:10] [Rank 0] step:1341/10000 train_time:122643ms step_avg:91.46ms +[2025-08-22 12:00:11] [Rank 0] step:1361/10000 train_time:124487ms step_avg:91.47ms +[2025-08-22 12:00:11] [Rank 0] step:1361/10000 train_time:124487ms step_avg:91.47ms +[2025-08-22 12:00:13] [Rank 0] step:1381/10000 train_time:126329ms step_avg:91.48ms +[2025-08-22 12:00:13] [Rank 0] step:1381/10000 train_time:126329ms step_avg:91.48ms +[2025-08-22 12:00:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:00:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:00:29] [Rank 0] PRINT: step:1400/10000 val_loss:4.2162 svd_entropy: attn_qk:H=0.7416,top10E=0.29,eRank=172.3,q75/q25=44.94 attn_vo:H=0.8345,top10E=0.05,eRank=405.1,q75/q25=inf mlp_w1:H=0.9726,top10E=0.04,eRank=640.5,q75/q25=2.78 mlp_w2:H=0.9694,top10E=0.05,eRank=626.8,q75/q25=2.87 vo_prod:H=0.6906,top10E=0.10,eRank=224.2,q75/q25=inf train_time:128174ms step_avg:91.55ms +[2025-08-22 12:00:29] [Rank 0] PRINT: step:1400/10000 val_loss:4.2162 svd_entropy: attn_qk:H=0.7416,top10E=0.29,eRank=172.3,q75/q25=44.94 attn_vo:H=0.8345,top10E=0.05,eRank=405.1,q75/q25=inf mlp_w1:H=0.9726,top10E=0.04,eRank=640.5,q75/q25=2.78 mlp_w2:H=0.9694,top10E=0.05,eRank=626.8,q75/q25=2.87 vo_prod:H=0.6906,top10E=0.10,eRank=224.2,q75/q25=inf train_time:128174ms step_avg:91.55ms +[2025-08-22 12:00:29] [Rank 0] step:1401/10000 train_time:128198ms step_avg:91.50ms +[2025-08-22 12:00:29] [Rank 0] step:1401/10000 train_time:128198ms step_avg:91.50ms +[2025-08-22 12:00:31] [Rank 0] step:1421/10000 train_time:130020ms step_avg:91.50ms +[2025-08-22 12:00:31] [Rank 0] step:1421/10000 train_time:130020ms step_avg:91.50ms +[2025-08-22 12:00:33] [Rank 0] step:1441/10000 train_time:131853ms step_avg:91.50ms +[2025-08-22 12:00:33] [Rank 0] step:1441/10000 train_time:131853ms step_avg:91.50ms +[2025-08-22 12:00:34] [Rank 0] step:1461/10000 train_time:133689ms step_avg:91.50ms +[2025-08-22 12:00:34] [Rank 0] step:1461/10000 train_time:133689ms step_avg:91.50ms +[2025-08-22 12:00:36] [Rank 0] step:1481/10000 train_time:135525ms step_avg:91.51ms +[2025-08-22 12:00:36] [Rank 0] step:1481/10000 train_time:135525ms step_avg:91.51ms +[2025-08-22 12:00:38] [Rank 0] step:1501/10000 train_time:137370ms step_avg:91.52ms +[2025-08-22 12:00:38] [Rank 0] step:1501/10000 train_time:137370ms step_avg:91.52ms +[2025-08-22 12:00:40] [Rank 0] step:1521/10000 train_time:139219ms step_avg:91.53ms +[2025-08-22 12:00:40] [Rank 0] step:1521/10000 train_time:139219ms step_avg:91.53ms +[2025-08-22 12:00:42] [Rank 0] step:1541/10000 train_time:141067ms step_avg:91.54ms +[2025-08-22 12:00:42] [Rank 0] step:1541/10000 train_time:141067ms step_avg:91.54ms +[2025-08-22 12:00:44] [Rank 0] step:1561/10000 train_time:142916ms step_avg:91.55ms +[2025-08-22 12:00:44] [Rank 0] step:1561/10000 train_time:142916ms step_avg:91.55ms +[2025-08-22 12:00:45] [Rank 0] step:1581/10000 train_time:144766ms step_avg:91.57ms +[2025-08-22 12:00:45] [Rank 0] step:1581/10000 train_time:144766ms step_avg:91.57ms +[2025-08-22 12:00:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:00:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:01:01] [Rank 0] PRINT: step:1600/10000 val_loss:4.1291 svd_entropy: attn_qk:H=0.7432,top10E=0.29,eRank=173.5,q75/q25=45.50 attn_vo:H=0.8343,top10E=0.05,eRank=404.6,q75/q25=inf mlp_w1:H=0.9727,top10E=0.04,eRank=640.6,q75/q25=2.77 mlp_w2:H=0.9692,top10E=0.05,eRank=626.1,q75/q25=2.86 vo_prod:H=0.6903,top10E=0.10,eRank=223.7,q75/q25=inf train_time:146618ms step_avg:91.64ms +[2025-08-22 12:01:01] [Rank 0] PRINT: step:1600/10000 val_loss:4.1291 svd_entropy: attn_qk:H=0.7432,top10E=0.29,eRank=173.5,q75/q25=45.50 attn_vo:H=0.8343,top10E=0.05,eRank=404.6,q75/q25=inf mlp_w1:H=0.9727,top10E=0.04,eRank=640.6,q75/q25=2.77 mlp_w2:H=0.9692,top10E=0.05,eRank=626.1,q75/q25=2.86 vo_prod:H=0.6903,top10E=0.10,eRank=223.7,q75/q25=inf train_time:146618ms step_avg:91.64ms +[2025-08-22 12:01:01] [Rank 0] step:1601/10000 train_time:146642ms step_avg:91.59ms +[2025-08-22 12:01:01] [Rank 0] step:1601/10000 train_time:146642ms step_avg:91.59ms +[2025-08-22 12:01:03] [Rank 0] step:1621/10000 train_time:148486ms step_avg:91.60ms +[2025-08-22 12:01:03] [Rank 0] step:1621/10000 train_time:148486ms step_avg:91.60ms +[2025-08-22 12:01:05] [Rank 0] step:1641/10000 train_time:150333ms step_avg:91.61ms +[2025-08-22 12:01:05] [Rank 0] step:1641/10000 train_time:150333ms step_avg:91.61ms +[2025-08-22 12:01:07] [Rank 0] step:1661/10000 train_time:152178ms step_avg:91.62ms +[2025-08-22 12:01:07] [Rank 0] step:1661/10000 train_time:152178ms step_avg:91.62ms +[2025-08-22 12:01:08] [Rank 0] step:1681/10000 train_time:154025ms step_avg:91.63ms +[2025-08-22 12:01:08] [Rank 0] step:1681/10000 train_time:154025ms step_avg:91.63ms +[2025-08-22 12:01:10] [Rank 0] step:1701/10000 train_time:155870ms step_avg:91.63ms +[2025-08-22 12:01:10] [Rank 0] step:1701/10000 train_time:155870ms step_avg:91.63ms +[2025-08-22 12:01:12] [Rank 0] step:1721/10000 train_time:157717ms step_avg:91.64ms +[2025-08-22 12:01:12] [Rank 0] step:1721/10000 train_time:157717ms step_avg:91.64ms +[2025-08-22 12:01:14] [Rank 0] step:1741/10000 train_time:159564ms step_avg:91.65ms +[2025-08-22 12:01:14] [Rank 0] step:1741/10000 train_time:159564ms step_avg:91.65ms +[2025-08-22 12:01:16] [Rank 0] step:1761/10000 train_time:161408ms step_avg:91.66ms +[2025-08-22 12:01:16] [Rank 0] step:1761/10000 train_time:161408ms step_avg:91.66ms +[2025-08-22 12:01:18] [Rank 0] step:1781/10000 train_time:163254ms step_avg:91.66ms +[2025-08-22 12:01:18] [Rank 0] step:1781/10000 train_time:163254ms step_avg:91.66ms +[2025-08-22 12:01:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:01:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:01:33] [Rank 0] PRINT: step:1800/10000 val_loss:4.0739 svd_entropy: attn_qk:H=0.7447,top10E=0.28,eRank=174.6,q75/q25=46.20 attn_vo:H=0.8343,top10E=0.05,eRank=404.4,q75/q25=inf mlp_w1:H=0.9727,top10E=0.04,eRank=640.8,q75/q25=2.77 mlp_w2:H=0.9691,top10E=0.05,eRank=625.5,q75/q25=2.86 vo_prod:H=0.6902,top10E=0.10,eRank=223.7,q75/q25=inf train_time:165106ms step_avg:91.73ms +[2025-08-22 12:01:33] [Rank 0] PRINT: step:1800/10000 val_loss:4.0739 svd_entropy: attn_qk:H=0.7447,top10E=0.28,eRank=174.6,q75/q25=46.20 attn_vo:H=0.8343,top10E=0.05,eRank=404.4,q75/q25=inf mlp_w1:H=0.9727,top10E=0.04,eRank=640.8,q75/q25=2.77 mlp_w2:H=0.9691,top10E=0.05,eRank=625.5,q75/q25=2.86 vo_prod:H=0.6902,top10E=0.10,eRank=223.7,q75/q25=inf train_time:165106ms step_avg:91.73ms +[2025-08-22 12:01:33] [Rank 0] step:1801/10000 train_time:165128ms step_avg:91.69ms +[2025-08-22 12:01:33] [Rank 0] step:1801/10000 train_time:165128ms step_avg:91.69ms +[2025-08-22 12:01:35] [Rank 0] step:1821/10000 train_time:166973ms step_avg:91.69ms +[2025-08-22 12:01:35] [Rank 0] step:1821/10000 train_time:166973ms step_avg:91.69ms +[2025-08-22 12:01:37] [Rank 0] step:1841/10000 train_time:168816ms step_avg:91.70ms +[2025-08-22 12:01:37] [Rank 0] step:1841/10000 train_time:168816ms step_avg:91.70ms +[2025-08-22 12:01:39] [Rank 0] step:1861/10000 train_time:170660ms step_avg:91.70ms +[2025-08-22 12:01:39] [Rank 0] step:1861/10000 train_time:170660ms step_avg:91.70ms +[2025-08-22 12:01:41] [Rank 0] step:1881/10000 train_time:172505ms step_avg:91.71ms +[2025-08-22 12:01:41] [Rank 0] step:1881/10000 train_time:172505ms step_avg:91.71ms +[2025-08-22 12:01:43] [Rank 0] step:1901/10000 train_time:174351ms step_avg:91.72ms +[2025-08-22 12:01:43] [Rank 0] step:1901/10000 train_time:174351ms step_avg:91.72ms +[2025-08-22 12:01:44] [Rank 0] step:1921/10000 train_time:176197ms step_avg:91.72ms +[2025-08-22 12:01:44] [Rank 0] step:1921/10000 train_time:176197ms step_avg:91.72ms +[2025-08-22 12:01:46] [Rank 0] step:1941/10000 train_time:178043ms step_avg:91.73ms +[2025-08-22 12:01:46] [Rank 0] step:1941/10000 train_time:178043ms step_avg:91.73ms +[2025-08-22 12:01:48] [Rank 0] step:1961/10000 train_time:179891ms step_avg:91.73ms +[2025-08-22 12:01:48] [Rank 0] step:1961/10000 train_time:179891ms step_avg:91.73ms +[2025-08-22 12:01:50] [Rank 0] step:1981/10000 train_time:181738ms step_avg:91.74ms +[2025-08-22 12:01:50] [Rank 0] step:1981/10000 train_time:181738ms step_avg:91.74ms +[2025-08-22 12:01:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:01:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:02:05] [Rank 0] PRINT: step:2000/10000 val_loss:4.0448 svd_entropy: attn_qk:H=0.7461,top10E=0.28,eRank=175.7,q75/q25=46.70 attn_vo:H=0.8342,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9728,top10E=0.04,eRank=641.0,q75/q25=2.76 mlp_w2:H=0.9690,top10E=0.05,eRank=625.1,q75/q25=2.85 vo_prod:H=0.6902,top10E=0.10,eRank=223.8,q75/q25=inf train_time:183589ms step_avg:91.79ms +[2025-08-22 12:02:05] [Rank 0] PRINT: step:2000/10000 val_loss:4.0448 svd_entropy: attn_qk:H=0.7461,top10E=0.28,eRank=175.7,q75/q25=46.70 attn_vo:H=0.8342,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9728,top10E=0.04,eRank=641.0,q75/q25=2.76 mlp_w2:H=0.9690,top10E=0.05,eRank=625.1,q75/q25=2.85 vo_prod:H=0.6902,top10E=0.10,eRank=223.8,q75/q25=inf train_time:183589ms step_avg:91.79ms +[2025-08-22 12:02:06] [Rank 0] step:2001/10000 train_time:183613ms step_avg:91.76ms +[2025-08-22 12:02:06] [Rank 0] step:2001/10000 train_time:183613ms step_avg:91.76ms +[2025-08-22 12:02:07] [Rank 0] step:2021/10000 train_time:185466ms step_avg:91.77ms +[2025-08-22 12:02:07] [Rank 0] step:2021/10000 train_time:185466ms step_avg:91.77ms +[2025-08-22 12:02:09] [Rank 0] step:2041/10000 train_time:187370ms step_avg:91.80ms +[2025-08-22 12:02:09] [Rank 0] step:2041/10000 train_time:187370ms step_avg:91.80ms +[2025-08-22 12:02:11] [Rank 0] step:2061/10000 train_time:189214ms step_avg:91.81ms +[2025-08-22 12:02:11] [Rank 0] step:2061/10000 train_time:189214ms step_avg:91.81ms +[2025-08-22 12:02:13] [Rank 0] step:2081/10000 train_time:191062ms step_avg:91.81ms +[2025-08-22 12:02:13] [Rank 0] step:2081/10000 train_time:191062ms step_avg:91.81ms +[2025-08-22 12:02:15] [Rank 0] step:2101/10000 train_time:192908ms step_avg:91.82ms +[2025-08-22 12:02:15] [Rank 0] step:2101/10000 train_time:192908ms step_avg:91.82ms +[2025-08-22 12:02:17] [Rank 0] step:2121/10000 train_time:194756ms step_avg:91.82ms +[2025-08-22 12:02:17] [Rank 0] step:2121/10000 train_time:194756ms step_avg:91.82ms +[2025-08-22 12:02:19] [Rank 0] step:2141/10000 train_time:196604ms step_avg:91.83ms +[2025-08-22 12:02:19] [Rank 0] step:2141/10000 train_time:196604ms step_avg:91.83ms +[2025-08-22 12:02:20] [Rank 0] step:2161/10000 train_time:198452ms step_avg:91.83ms +[2025-08-22 12:02:20] [Rank 0] step:2161/10000 train_time:198452ms step_avg:91.83ms +[2025-08-22 12:02:22] [Rank 0] step:2181/10000 train_time:200302ms step_avg:91.84ms +[2025-08-22 12:02:22] [Rank 0] step:2181/10000 train_time:200302ms step_avg:91.84ms +[2025-08-22 12:02:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:02:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:02:38] [Rank 0] PRINT: step:2200/10000 val_loss:4.0063 svd_entropy: attn_qk:H=0.7474,top10E=0.28,eRank=176.7,q75/q25=47.04 attn_vo:H=0.8341,top10E=0.05,eRank=404.1,q75/q25=inf mlp_w1:H=0.9728,top10E=0.04,eRank=641.2,q75/q25=2.75 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.85 vo_prod:H=0.6903,top10E=0.10,eRank=223.9,q75/q25=inf train_time:202153ms step_avg:91.89ms +[2025-08-22 12:02:38] [Rank 0] PRINT: step:2200/10000 val_loss:4.0063 svd_entropy: attn_qk:H=0.7474,top10E=0.28,eRank=176.7,q75/q25=47.04 attn_vo:H=0.8341,top10E=0.05,eRank=404.1,q75/q25=inf mlp_w1:H=0.9728,top10E=0.04,eRank=641.2,q75/q25=2.75 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.85 vo_prod:H=0.6903,top10E=0.10,eRank=223.9,q75/q25=inf train_time:202153ms step_avg:91.89ms +[2025-08-22 12:02:38] [Rank 0] step:2201/10000 train_time:202176ms step_avg:91.86ms +[2025-08-22 12:02:38] [Rank 0] step:2201/10000 train_time:202176ms step_avg:91.86ms +[2025-08-22 12:02:40] [Rank 0] step:2221/10000 train_time:204021ms step_avg:91.86ms +[2025-08-22 12:02:40] [Rank 0] step:2221/10000 train_time:204021ms step_avg:91.86ms +[2025-08-22 12:02:42] [Rank 0] step:2241/10000 train_time:205902ms step_avg:91.88ms +[2025-08-22 12:02:42] [Rank 0] step:2241/10000 train_time:205902ms step_avg:91.88ms +[2025-08-22 12:02:44] [Rank 0] step:2261/10000 train_time:207790ms step_avg:91.90ms +[2025-08-22 12:02:44] [Rank 0] step:2261/10000 train_time:207790ms step_avg:91.90ms +[2025-08-22 12:02:45] [Rank 0] step:2281/10000 train_time:209681ms step_avg:91.93ms +[2025-08-22 12:02:45] [Rank 0] step:2281/10000 train_time:209681ms step_avg:91.93ms +[2025-08-22 12:02:47] [Rank 0] step:2301/10000 train_time:211571ms step_avg:91.95ms +[2025-08-22 12:02:47] [Rank 0] step:2301/10000 train_time:211571ms step_avg:91.95ms +[2025-08-22 12:02:49] [Rank 0] step:2321/10000 train_time:213461ms step_avg:91.97ms +[2025-08-22 12:02:49] [Rank 0] step:2321/10000 train_time:213461ms step_avg:91.97ms +[2025-08-22 12:02:51] [Rank 0] step:2341/10000 train_time:215353ms step_avg:91.99ms +[2025-08-22 12:02:51] [Rank 0] step:2341/10000 train_time:215353ms step_avg:91.99ms +[2025-08-22 12:02:53] [Rank 0] step:2361/10000 train_time:217244ms step_avg:92.01ms +[2025-08-22 12:02:53] [Rank 0] step:2361/10000 train_time:217244ms step_avg:92.01ms +[2025-08-22 12:02:55] [Rank 0] step:2381/10000 train_time:219139ms step_avg:92.04ms +[2025-08-22 12:02:55] [Rank 0] step:2381/10000 train_time:219139ms step_avg:92.04ms +[2025-08-22 12:02:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:02:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:03:10] [Rank 0] PRINT: step:2400/10000 val_loss:3.9442 svd_entropy: attn_qk:H=0.7487,top10E=0.28,eRank=177.7,q75/q25=46.77 attn_vo:H=0.8341,top10E=0.05,eRank=404.1,q75/q25=inf mlp_w1:H=0.9729,top10E=0.04,eRank=641.5,q75/q25=2.75 mlp_w2:H=0.9688,top10E=0.05,eRank=624.4,q75/q25=2.84 vo_prod:H=0.6902,top10E=0.10,eRank=224.0,q75/q25=inf train_time:221033ms step_avg:92.10ms +[2025-08-22 12:03:10] [Rank 0] PRINT: step:2400/10000 val_loss:3.9442 svd_entropy: attn_qk:H=0.7487,top10E=0.28,eRank=177.7,q75/q25=46.77 attn_vo:H=0.8341,top10E=0.05,eRank=404.1,q75/q25=inf mlp_w1:H=0.9729,top10E=0.04,eRank=641.5,q75/q25=2.75 mlp_w2:H=0.9688,top10E=0.05,eRank=624.4,q75/q25=2.84 vo_prod:H=0.6902,top10E=0.10,eRank=224.0,q75/q25=inf train_time:221033ms step_avg:92.10ms +[2025-08-22 12:03:11] [Rank 0] step:2401/10000 train_time:221057ms step_avg:92.07ms +[2025-08-22 12:03:11] [Rank 0] step:2401/10000 train_time:221057ms step_avg:92.07ms +[2025-08-22 12:03:12] [Rank 0] step:2421/10000 train_time:222936ms step_avg:92.08ms +[2025-08-22 12:03:12] [Rank 0] step:2421/10000 train_time:222936ms step_avg:92.08ms +[2025-08-22 12:03:14] [Rank 0] step:2441/10000 train_time:224823ms step_avg:92.10ms +[2025-08-22 12:03:14] [Rank 0] step:2441/10000 train_time:224823ms step_avg:92.10ms +[2025-08-22 12:03:16] [Rank 0] step:2461/10000 train_time:226711ms step_avg:92.12ms +[2025-08-22 12:03:16] [Rank 0] step:2461/10000 train_time:226711ms step_avg:92.12ms +[2025-08-22 12:03:18] [Rank 0] step:2481/10000 train_time:228603ms step_avg:92.14ms +[2025-08-22 12:03:18] [Rank 0] step:2481/10000 train_time:228603ms step_avg:92.14ms +[2025-08-22 12:03:20] [Rank 0] step:2501/10000 train_time:230492ms step_avg:92.16ms +[2025-08-22 12:03:20] [Rank 0] step:2501/10000 train_time:230492ms step_avg:92.16ms +[2025-08-22 12:03:22] [Rank 0] step:2521/10000 train_time:232383ms step_avg:92.18ms +[2025-08-22 12:03:22] [Rank 0] step:2521/10000 train_time:232383ms step_avg:92.18ms +[2025-08-22 12:03:24] [Rank 0] step:2541/10000 train_time:234275ms step_avg:92.20ms +[2025-08-22 12:03:24] [Rank 0] step:2541/10000 train_time:234275ms step_avg:92.20ms +[2025-08-22 12:03:26] [Rank 0] step:2561/10000 train_time:236169ms step_avg:92.22ms +[2025-08-22 12:03:26] [Rank 0] step:2561/10000 train_time:236169ms step_avg:92.22ms +[2025-08-22 12:03:28] [Rank 0] step:2581/10000 train_time:238061ms step_avg:92.24ms +[2025-08-22 12:03:28] [Rank 0] step:2581/10000 train_time:238061ms step_avg:92.24ms +[2025-08-22 12:03:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:03:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:03:43] [Rank 0] PRINT: step:2600/10000 val_loss:3.9210 svd_entropy: attn_qk:H=0.7498,top10E=0.28,eRank=178.6,q75/q25=47.16 attn_vo:H=0.8342,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9729,top10E=0.04,eRank=641.7,q75/q25=2.74 mlp_w2:H=0.9688,top10E=0.05,eRank=624.2,q75/q25=2.84 vo_prod:H=0.6904,top10E=0.10,eRank=224.2,q75/q25=inf train_time:239957ms step_avg:92.29ms +[2025-08-22 12:03:43] [Rank 0] PRINT: step:2600/10000 val_loss:3.9210 svd_entropy: attn_qk:H=0.7498,top10E=0.28,eRank=178.6,q75/q25=47.16 attn_vo:H=0.8342,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9729,top10E=0.04,eRank=641.7,q75/q25=2.74 mlp_w2:H=0.9688,top10E=0.05,eRank=624.2,q75/q25=2.84 vo_prod:H=0.6904,top10E=0.10,eRank=224.2,q75/q25=inf train_time:239957ms step_avg:92.29ms +[2025-08-22 12:03:43] [Rank 0] step:2601/10000 train_time:239982ms step_avg:92.27ms +[2025-08-22 12:03:43] [Rank 0] step:2601/10000 train_time:239982ms step_avg:92.27ms +[2025-08-22 12:03:45] [Rank 0] step:2621/10000 train_time:241858ms step_avg:92.28ms +[2025-08-22 12:03:45] [Rank 0] step:2621/10000 train_time:241858ms step_avg:92.28ms +[2025-08-22 12:03:47] [Rank 0] step:2641/10000 train_time:243748ms step_avg:92.29ms +[2025-08-22 12:03:47] [Rank 0] step:2641/10000 train_time:243748ms step_avg:92.29ms +[2025-08-22 12:03:49] [Rank 0] step:2661/10000 train_time:245638ms step_avg:92.31ms +[2025-08-22 12:03:49] [Rank 0] step:2661/10000 train_time:245638ms step_avg:92.31ms +[2025-08-22 12:03:51] [Rank 0] step:2681/10000 train_time:247529ms step_avg:92.33ms +[2025-08-22 12:03:51] [Rank 0] step:2681/10000 train_time:247529ms step_avg:92.33ms +[2025-08-22 12:03:53] [Rank 0] step:2701/10000 train_time:249421ms step_avg:92.34ms +[2025-08-22 12:03:53] [Rank 0] step:2701/10000 train_time:249421ms step_avg:92.34ms +[2025-08-22 12:03:55] [Rank 0] step:2721/10000 train_time:251315ms step_avg:92.36ms +[2025-08-22 12:03:55] [Rank 0] step:2721/10000 train_time:251315ms step_avg:92.36ms +[2025-08-22 12:03:56] [Rank 0] step:2741/10000 train_time:253211ms step_avg:92.38ms +[2025-08-22 12:03:56] [Rank 0] step:2741/10000 train_time:253211ms step_avg:92.38ms +[2025-08-22 12:03:58] [Rank 0] step:2761/10000 train_time:255105ms step_avg:92.40ms +[2025-08-22 12:03:58] [Rank 0] step:2761/10000 train_time:255105ms step_avg:92.40ms +[2025-08-22 12:04:00] [Rank 0] step:2781/10000 train_time:257001ms step_avg:92.41ms +[2025-08-22 12:04:00] [Rank 0] step:2781/10000 train_time:257001ms step_avg:92.41ms +[2025-08-22 12:04:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:04:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:04:16] [Rank 0] PRINT: step:2800/10000 val_loss:3.9006 svd_entropy: attn_qk:H=0.7510,top10E=0.28,eRank=179.5,q75/q25=47.48 attn_vo:H=0.8342,top10E=0.05,eRank=404.3,q75/q25=inf mlp_w1:H=0.9730,top10E=0.04,eRank=641.8,q75/q25=2.74 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.84 vo_prod:H=0.6906,top10E=0.10,eRank=224.6,q75/q25=inf train_time:258983ms step_avg:92.49ms +[2025-08-22 12:04:16] [Rank 0] PRINT: step:2800/10000 val_loss:3.9006 svd_entropy: attn_qk:H=0.7510,top10E=0.28,eRank=179.5,q75/q25=47.48 attn_vo:H=0.8342,top10E=0.05,eRank=404.3,q75/q25=inf mlp_w1:H=0.9730,top10E=0.04,eRank=641.8,q75/q25=2.74 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.84 vo_prod:H=0.6906,top10E=0.10,eRank=224.6,q75/q25=inf train_time:258983ms step_avg:92.49ms +[2025-08-22 12:04:16] [Rank 0] step:2801/10000 train_time:259008ms step_avg:92.47ms +[2025-08-22 12:04:16] [Rank 0] step:2801/10000 train_time:259008ms step_avg:92.47ms +[2025-08-22 12:04:18] [Rank 0] step:2821/10000 train_time:260907ms step_avg:92.49ms +[2025-08-22 12:04:18] [Rank 0] step:2821/10000 train_time:260907ms step_avg:92.49ms +[2025-08-22 12:04:20] [Rank 0] step:2841/10000 train_time:262795ms step_avg:92.50ms +[2025-08-22 12:04:20] [Rank 0] step:2841/10000 train_time:262795ms step_avg:92.50ms +[2025-08-22 12:04:22] [Rank 0] step:2861/10000 train_time:264686ms step_avg:92.52ms +[2025-08-22 12:04:22] [Rank 0] step:2861/10000 train_time:264686ms step_avg:92.52ms +[2025-08-22 12:04:24] [Rank 0] step:2881/10000 train_time:266573ms step_avg:92.53ms +[2025-08-22 12:04:24] [Rank 0] step:2881/10000 train_time:266573ms step_avg:92.53ms +[2025-08-22 12:04:25] [Rank 0] step:2901/10000 train_time:268463ms step_avg:92.54ms +[2025-08-22 12:04:25] [Rank 0] step:2901/10000 train_time:268463ms step_avg:92.54ms +[2025-08-22 12:04:27] [Rank 0] step:2921/10000 train_time:270352ms step_avg:92.55ms +[2025-08-22 12:04:27] [Rank 0] step:2921/10000 train_time:270352ms step_avg:92.55ms +[2025-08-22 12:04:29] [Rank 0] step:2941/10000 train_time:272244ms step_avg:92.57ms +[2025-08-22 12:04:29] [Rank 0] step:2941/10000 train_time:272244ms step_avg:92.57ms +[2025-08-22 12:04:31] [Rank 0] step:2961/10000 train_time:274137ms step_avg:92.58ms +[2025-08-22 12:04:31] [Rank 0] step:2961/10000 train_time:274137ms step_avg:92.58ms +[2025-08-22 12:04:33] [Rank 0] step:2981/10000 train_time:276037ms step_avg:92.60ms +[2025-08-22 12:04:33] [Rank 0] step:2981/10000 train_time:276037ms step_avg:92.60ms +[2025-08-22 12:04:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:04:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:04:49] [Rank 0] PRINT: step:3000/10000 val_loss:3.8706 svd_entropy: attn_qk:H=0.7520,top10E=0.27,eRank=180.3,q75/q25=47.84 attn_vo:H=0.8342,top10E=0.05,eRank=404.4,q75/q25=inf mlp_w1:H=0.9730,top10E=0.04,eRank=642.0,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=624.0,q75/q25=2.84 vo_prod:H=0.6906,top10E=0.10,eRank=224.8,q75/q25=inf train_time:277941ms step_avg:92.65ms +[2025-08-22 12:04:49] [Rank 0] PRINT: step:3000/10000 val_loss:3.8706 svd_entropy: attn_qk:H=0.7520,top10E=0.27,eRank=180.3,q75/q25=47.84 attn_vo:H=0.8342,top10E=0.05,eRank=404.4,q75/q25=inf mlp_w1:H=0.9730,top10E=0.04,eRank=642.0,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=624.0,q75/q25=2.84 vo_prod:H=0.6906,top10E=0.10,eRank=224.8,q75/q25=inf train_time:277941ms step_avg:92.65ms +[2025-08-22 12:04:49] [Rank 0] step:3001/10000 train_time:277965ms step_avg:92.62ms +[2025-08-22 12:04:49] [Rank 0] step:3001/10000 train_time:277965ms step_avg:92.62ms +[2025-08-22 12:04:51] [Rank 0] step:3021/10000 train_time:279866ms step_avg:92.64ms +[2025-08-22 12:04:51] [Rank 0] step:3021/10000 train_time:279866ms step_avg:92.64ms +[2025-08-22 12:04:52] [Rank 0] step:3041/10000 train_time:281759ms step_avg:92.65ms +[2025-08-22 12:04:52] [Rank 0] step:3041/10000 train_time:281759ms step_avg:92.65ms +[2025-08-22 12:04:54] [Rank 0] step:3061/10000 train_time:283654ms step_avg:92.67ms +[2025-08-22 12:04:54] [Rank 0] step:3061/10000 train_time:283654ms step_avg:92.67ms +[2025-08-22 12:04:56] [Rank 0] step:3081/10000 train_time:285549ms step_avg:92.68ms +[2025-08-22 12:04:56] [Rank 0] step:3081/10000 train_time:285549ms step_avg:92.68ms +[2025-08-22 12:04:58] [Rank 0] step:3101/10000 train_time:287446ms step_avg:92.69ms +[2025-08-22 12:04:58] [Rank 0] step:3101/10000 train_time:287446ms step_avg:92.69ms +[2025-08-22 12:05:00] [Rank 0] step:3121/10000 train_time:289345ms step_avg:92.71ms +[2025-08-22 12:05:00] [Rank 0] step:3121/10000 train_time:289345ms step_avg:92.71ms +[2025-08-22 12:05:02] [Rank 0] step:3141/10000 train_time:291245ms step_avg:92.72ms +[2025-08-22 12:05:02] [Rank 0] step:3141/10000 train_time:291245ms step_avg:92.72ms +[2025-08-22 12:05:04] [Rank 0] step:3161/10000 train_time:293145ms step_avg:92.74ms +[2025-08-22 12:05:04] [Rank 0] step:3161/10000 train_time:293145ms step_avg:92.74ms +[2025-08-22 12:05:06] [Rank 0] step:3181/10000 train_time:295109ms step_avg:92.77ms +[2025-08-22 12:05:06] [Rank 0] step:3181/10000 train_time:295109ms step_avg:92.77ms +[2025-08-22 12:05:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:05:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:05:22] [Rank 0] PRINT: step:3200/10000 val_loss:3.8423 svd_entropy: attn_qk:H=0.7531,top10E=0.27,eRank=181.2,q75/q25=48.23 attn_vo:H=0.8342,top10E=0.05,eRank=404.4,q75/q25=inf mlp_w1:H=0.9730,top10E=0.04,eRank=642.1,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=623.8,q75/q25=2.83 vo_prod:H=0.6906,top10E=0.10,eRank=224.8,q75/q25=inf train_time:297097ms step_avg:92.84ms +[2025-08-22 12:05:22] [Rank 0] PRINT: step:3200/10000 val_loss:3.8423 svd_entropy: attn_qk:H=0.7531,top10E=0.27,eRank=181.2,q75/q25=48.23 attn_vo:H=0.8342,top10E=0.05,eRank=404.4,q75/q25=inf mlp_w1:H=0.9730,top10E=0.04,eRank=642.1,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=623.8,q75/q25=2.83 vo_prod:H=0.6906,top10E=0.10,eRank=224.8,q75/q25=inf train_time:297097ms step_avg:92.84ms +[2025-08-22 12:05:22] [Rank 0] step:3201/10000 train_time:297122ms step_avg:92.82ms +[2025-08-22 12:05:22] [Rank 0] step:3201/10000 train_time:297122ms step_avg:92.82ms +[2025-08-22 12:05:24] [Rank 0] step:3221/10000 train_time:299009ms step_avg:92.83ms +[2025-08-22 12:05:24] [Rank 0] step:3221/10000 train_time:299009ms step_avg:92.83ms +[2025-08-22 12:05:26] [Rank 0] step:3241/10000 train_time:300904ms step_avg:92.84ms +[2025-08-22 12:05:26] [Rank 0] step:3241/10000 train_time:300904ms step_avg:92.84ms +[2025-08-22 12:05:28] [Rank 0] step:3261/10000 train_time:302799ms step_avg:92.85ms +[2025-08-22 12:05:28] [Rank 0] step:3261/10000 train_time:302799ms step_avg:92.85ms +[2025-08-22 12:05:29] [Rank 0] step:3281/10000 train_time:304697ms step_avg:92.87ms +[2025-08-22 12:05:29] [Rank 0] step:3281/10000 train_time:304697ms step_avg:92.87ms +[2025-08-22 12:05:31] [Rank 0] step:3301/10000 train_time:306592ms step_avg:92.88ms +[2025-08-22 12:05:31] [Rank 0] step:3301/10000 train_time:306592ms step_avg:92.88ms +[2025-08-22 12:05:33] [Rank 0] step:3321/10000 train_time:308489ms step_avg:92.89ms +[2025-08-22 12:05:33] [Rank 0] step:3321/10000 train_time:308489ms step_avg:92.89ms +[2025-08-22 12:05:35] [Rank 0] step:3341/10000 train_time:310388ms step_avg:92.90ms +[2025-08-22 12:05:35] [Rank 0] step:3341/10000 train_time:310388ms step_avg:92.90ms +[2025-08-22 12:05:37] [Rank 0] step:3361/10000 train_time:312289ms step_avg:92.92ms +[2025-08-22 12:05:37] [Rank 0] step:3361/10000 train_time:312289ms step_avg:92.92ms +[2025-08-22 12:05:39] [Rank 0] step:3381/10000 train_time:314189ms step_avg:92.93ms +[2025-08-22 12:05:39] [Rank 0] step:3381/10000 train_time:314189ms step_avg:92.93ms +[2025-08-22 12:05:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:05:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:05:55] [Rank 0] PRINT: step:3400/10000 val_loss:3.8238 svd_entropy: attn_qk:H=0.7543,top10E=0.27,eRank=182.2,q75/q25=48.60 attn_vo:H=0.8343,top10E=0.05,eRank=404.5,q75/q25=inf mlp_w1:H=0.9731,top10E=0.04,eRank=642.2,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=623.9,q75/q25=2.83 vo_prod:H=0.6907,top10E=0.10,eRank=225.0,q75/q25=inf train_time:316092ms step_avg:92.97ms +[2025-08-22 12:05:55] [Rank 0] PRINT: step:3400/10000 val_loss:3.8238 svd_entropy: attn_qk:H=0.7543,top10E=0.27,eRank=182.2,q75/q25=48.60 attn_vo:H=0.8343,top10E=0.05,eRank=404.5,q75/q25=inf mlp_w1:H=0.9731,top10E=0.04,eRank=642.2,q75/q25=2.73 mlp_w2:H=0.9687,top10E=0.05,eRank=623.9,q75/q25=2.83 vo_prod:H=0.6907,top10E=0.10,eRank=225.0,q75/q25=inf train_time:316092ms step_avg:92.97ms +[2025-08-22 12:05:55] [Rank 0] step:3401/10000 train_time:316116ms step_avg:92.95ms +[2025-08-22 12:05:55] [Rank 0] step:3401/10000 train_time:316116ms step_avg:92.95ms +[2025-08-22 12:05:57] [Rank 0] step:3421/10000 train_time:318010ms step_avg:92.96ms +[2025-08-22 12:05:57] [Rank 0] step:3421/10000 train_time:318010ms step_avg:92.96ms +[2025-08-22 12:05:59] [Rank 0] step:3441/10000 train_time:319904ms step_avg:92.97ms +[2025-08-22 12:05:59] [Rank 0] step:3441/10000 train_time:319904ms step_avg:92.97ms +[2025-08-22 12:06:01] [Rank 0] step:3461/10000 train_time:321803ms step_avg:92.98ms +[2025-08-22 12:06:01] [Rank 0] step:3461/10000 train_time:321803ms step_avg:92.98ms +[2025-08-22 12:06:03] [Rank 0] step:3481/10000 train_time:323699ms step_avg:92.99ms +[2025-08-22 12:06:03] [Rank 0] step:3481/10000 train_time:323699ms step_avg:92.99ms +[2025-08-22 12:06:04] [Rank 0] step:3501/10000 train_time:325601ms step_avg:93.00ms +[2025-08-22 12:06:04] [Rank 0] step:3501/10000 train_time:325601ms step_avg:93.00ms +[2025-08-22 12:06:06] [Rank 0] step:3521/10000 train_time:327501ms step_avg:93.01ms +[2025-08-22 12:06:06] [Rank 0] step:3521/10000 train_time:327501ms step_avg:93.01ms +[2025-08-22 12:06:08] [Rank 0] step:3541/10000 train_time:329471ms step_avg:93.04ms +[2025-08-22 12:06:08] [Rank 0] step:3541/10000 train_time:329471ms step_avg:93.04ms +[2025-08-22 12:06:10] [Rank 0] step:3561/10000 train_time:331464ms step_avg:93.08ms +[2025-08-22 12:06:10] [Rank 0] step:3561/10000 train_time:331464ms step_avg:93.08ms +[2025-08-22 12:06:12] [Rank 0] step:3581/10000 train_time:333364ms step_avg:93.09ms +[2025-08-22 12:06:12] [Rank 0] step:3581/10000 train_time:333364ms step_avg:93.09ms +[2025-08-22 12:06:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:06:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:06:28] [Rank 0] PRINT: step:3600/10000 val_loss:3.8172 svd_entropy: attn_qk:H=0.7552,top10E=0.27,eRank=182.9,q75/q25=48.34 attn_vo:H=0.8343,top10E=0.05,eRank=404.7,q75/q25=inf mlp_w1:H=0.9731,top10E=0.04,eRank=642.4,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.83 vo_prod:H=0.6908,top10E=0.10,eRank=225.2,q75/q25=inf train_time:335269ms step_avg:93.13ms +[2025-08-22 12:06:28] [Rank 0] PRINT: step:3600/10000 val_loss:3.8172 svd_entropy: attn_qk:H=0.7552,top10E=0.27,eRank=182.9,q75/q25=48.34 attn_vo:H=0.8343,top10E=0.05,eRank=404.7,q75/q25=inf mlp_w1:H=0.9731,top10E=0.04,eRank=642.4,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.83 vo_prod:H=0.6908,top10E=0.10,eRank=225.2,q75/q25=inf train_time:335269ms step_avg:93.13ms +[2025-08-22 12:06:28] [Rank 0] step:3601/10000 train_time:335293ms step_avg:93.11ms +[2025-08-22 12:06:28] [Rank 0] step:3601/10000 train_time:335293ms step_avg:93.11ms +[2025-08-22 12:06:30] [Rank 0] step:3621/10000 train_time:337201ms step_avg:93.12ms +[2025-08-22 12:06:30] [Rank 0] step:3621/10000 train_time:337201ms step_avg:93.12ms +[2025-08-22 12:06:32] [Rank 0] step:3641/10000 train_time:339096ms step_avg:93.13ms +[2025-08-22 12:06:32] [Rank 0] step:3641/10000 train_time:339096ms step_avg:93.13ms +[2025-08-22 12:06:34] [Rank 0] step:3661/10000 train_time:340993ms step_avg:93.14ms +[2025-08-22 12:06:34] [Rank 0] step:3661/10000 train_time:340993ms step_avg:93.14ms +[2025-08-22 12:06:36] [Rank 0] step:3681/10000 train_time:342890ms step_avg:93.15ms +[2025-08-22 12:06:36] [Rank 0] step:3681/10000 train_time:342890ms step_avg:93.15ms +[2025-08-22 12:06:38] [Rank 0] step:3701/10000 train_time:344786ms step_avg:93.16ms +[2025-08-22 12:06:38] [Rank 0] step:3701/10000 train_time:344786ms step_avg:93.16ms +[2025-08-22 12:06:40] [Rank 0] step:3721/10000 train_time:346713ms step_avg:93.18ms +[2025-08-22 12:06:40] [Rank 0] step:3721/10000 train_time:346713ms step_avg:93.18ms +[2025-08-22 12:06:42] [Rank 0] step:3741/10000 train_time:348648ms step_avg:93.20ms +[2025-08-22 12:06:42] [Rank 0] step:3741/10000 train_time:348648ms step_avg:93.20ms +[2025-08-22 12:06:43] [Rank 0] step:3761/10000 train_time:350585ms step_avg:93.22ms +[2025-08-22 12:06:43] [Rank 0] step:3761/10000 train_time:350585ms step_avg:93.22ms +[2025-08-22 12:06:45] [Rank 0] step:3781/10000 train_time:352523ms step_avg:93.24ms +[2025-08-22 12:06:45] [Rank 0] step:3781/10000 train_time:352523ms step_avg:93.24ms +[2025-08-22 12:06:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:06:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:07:01] [Rank 0] PRINT: step:3800/10000 val_loss:3.7896 svd_entropy: attn_qk:H=0.7562,top10E=0.27,eRank=183.8,q75/q25=48.83 attn_vo:H=0.8343,top10E=0.05,eRank=404.7,q75/q25=inf mlp_w1:H=0.9731,top10E=0.04,eRank=642.5,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.82 vo_prod:H=0.6909,top10E=0.10,eRank=225.5,q75/q25=inf train_time:354464ms step_avg:93.28ms +[2025-08-22 12:07:01] [Rank 0] PRINT: step:3800/10000 val_loss:3.7896 svd_entropy: attn_qk:H=0.7562,top10E=0.27,eRank=183.8,q75/q25=48.83 attn_vo:H=0.8343,top10E=0.05,eRank=404.7,q75/q25=inf mlp_w1:H=0.9731,top10E=0.04,eRank=642.5,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.82 vo_prod:H=0.6909,top10E=0.10,eRank=225.5,q75/q25=inf train_time:354464ms step_avg:93.28ms +[2025-08-22 12:07:01] [Rank 0] step:3801/10000 train_time:354488ms step_avg:93.26ms +[2025-08-22 12:07:01] [Rank 0] step:3801/10000 train_time:354488ms step_avg:93.26ms +[2025-08-22 12:07:03] [Rank 0] step:3821/10000 train_time:356421ms step_avg:93.28ms +[2025-08-22 12:07:03] [Rank 0] step:3821/10000 train_time:356421ms step_avg:93.28ms +[2025-08-22 12:07:05] [Rank 0] step:3841/10000 train_time:358358ms step_avg:93.30ms +[2025-08-22 12:07:05] [Rank 0] step:3841/10000 train_time:358358ms step_avg:93.30ms +[2025-08-22 12:07:07] [Rank 0] step:3861/10000 train_time:360292ms step_avg:93.32ms +[2025-08-22 12:07:07] [Rank 0] step:3861/10000 train_time:360292ms step_avg:93.32ms +[2025-08-22 12:07:09] [Rank 0] step:3881/10000 train_time:362225ms step_avg:93.33ms +[2025-08-22 12:07:09] [Rank 0] step:3881/10000 train_time:362225ms step_avg:93.33ms +[2025-08-22 12:07:11] [Rank 0] step:3901/10000 train_time:364253ms step_avg:93.37ms +[2025-08-22 12:07:11] [Rank 0] step:3901/10000 train_time:364253ms step_avg:93.37ms +[2025-08-22 12:07:13] [Rank 0] step:3921/10000 train_time:366239ms step_avg:93.40ms +[2025-08-22 12:07:13] [Rank 0] step:3921/10000 train_time:366239ms step_avg:93.40ms +[2025-08-22 12:07:15] [Rank 0] step:3941/10000 train_time:368171ms step_avg:93.42ms +[2025-08-22 12:07:15] [Rank 0] step:3941/10000 train_time:368171ms step_avg:93.42ms +[2025-08-22 12:07:17] [Rank 0] step:3961/10000 train_time:370104ms step_avg:93.44ms +[2025-08-22 12:07:17] [Rank 0] step:3961/10000 train_time:370104ms step_avg:93.44ms +[2025-08-22 12:07:19] [Rank 0] step:3981/10000 train_time:372041ms step_avg:93.45ms +[2025-08-22 12:07:19] [Rank 0] step:3981/10000 train_time:372041ms step_avg:93.45ms +[2025-08-22 12:07:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:07:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:07:35] [Rank 0] PRINT: step:4000/10000 val_loss:3.7701 svd_entropy: attn_qk:H=0.7571,top10E=0.27,eRank=184.5,q75/q25=48.82 attn_vo:H=0.8344,top10E=0.06,eRank=404.9,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.6,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.6,q75/q25=2.82 vo_prod:H=0.6909,top10E=0.11,eRank=225.6,q75/q25=inf train_time:373979ms step_avg:93.49ms +[2025-08-22 12:07:35] [Rank 0] PRINT: step:4000/10000 val_loss:3.7701 svd_entropy: attn_qk:H=0.7571,top10E=0.27,eRank=184.5,q75/q25=48.82 attn_vo:H=0.8344,top10E=0.06,eRank=404.9,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.6,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.6,q75/q25=2.82 vo_prod:H=0.6909,top10E=0.11,eRank=225.6,q75/q25=inf train_time:373979ms step_avg:93.49ms +[2025-08-22 12:07:35] [Rank 0] step:4001/10000 train_time:374003ms step_avg:93.48ms +[2025-08-22 12:07:35] [Rank 0] step:4001/10000 train_time:374003ms step_avg:93.48ms +[2025-08-22 12:07:37] [Rank 0] step:4021/10000 train_time:375922ms step_avg:93.49ms +[2025-08-22 12:07:37] [Rank 0] step:4021/10000 train_time:375922ms step_avg:93.49ms +[2025-08-22 12:07:39] [Rank 0] step:4041/10000 train_time:377853ms step_avg:93.50ms +[2025-08-22 12:07:39] [Rank 0] step:4041/10000 train_time:377853ms step_avg:93.50ms +[2025-08-22 12:07:41] [Rank 0] step:4061/10000 train_time:379784ms step_avg:93.52ms +[2025-08-22 12:07:41] [Rank 0] step:4061/10000 train_time:379784ms step_avg:93.52ms +[2025-08-22 12:07:43] [Rank 0] step:4081/10000 train_time:382396ms step_avg:93.70ms +[2025-08-22 12:07:43] [Rank 0] step:4081/10000 train_time:382396ms step_avg:93.70ms +[2025-08-22 12:07:45] [Rank 0] step:4101/10000 train_time:384329ms step_avg:93.72ms +[2025-08-22 12:07:45] [Rank 0] step:4101/10000 train_time:384329ms step_avg:93.72ms +[2025-08-22 12:07:47] [Rank 0] step:4121/10000 train_time:386264ms step_avg:93.73ms +[2025-08-22 12:07:47] [Rank 0] step:4121/10000 train_time:386264ms step_avg:93.73ms +[2025-08-22 12:07:49] [Rank 0] step:4141/10000 train_time:388202ms step_avg:93.75ms +[2025-08-22 12:07:49] [Rank 0] step:4141/10000 train_time:388202ms step_avg:93.75ms +[2025-08-22 12:07:51] [Rank 0] step:4161/10000 train_time:390137ms step_avg:93.76ms +[2025-08-22 12:07:51] [Rank 0] step:4161/10000 train_time:390137ms step_avg:93.76ms +[2025-08-22 12:07:53] [Rank 0] step:4181/10000 train_time:392076ms step_avg:93.78ms +[2025-08-22 12:07:53] [Rank 0] step:4181/10000 train_time:392076ms step_avg:93.78ms +[2025-08-22 12:07:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:07:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:08:08] [Rank 0] PRINT: step:4200/10000 val_loss:3.7594 svd_entropy: attn_qk:H=0.7580,top10E=0.26,eRank=185.2,q75/q25=49.01 attn_vo:H=0.8344,top10E=0.06,eRank=405.0,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.7,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.5,q75/q25=2.82 vo_prod:H=0.6910,top10E=0.11,eRank=225.8,q75/q25=inf train_time:394016ms step_avg:93.81ms +[2025-08-22 12:08:08] [Rank 0] PRINT: step:4200/10000 val_loss:3.7594 svd_entropy: attn_qk:H=0.7580,top10E=0.26,eRank=185.2,q75/q25=49.01 attn_vo:H=0.8344,top10E=0.06,eRank=405.0,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.7,q75/q25=2.72 mlp_w2:H=0.9686,top10E=0.05,eRank=623.5,q75/q25=2.82 vo_prod:H=0.6910,top10E=0.11,eRank=225.8,q75/q25=inf train_time:394016ms step_avg:93.81ms +[2025-08-22 12:08:09] [Rank 0] step:4201/10000 train_time:394039ms step_avg:93.80ms +[2025-08-22 12:08:09] [Rank 0] step:4201/10000 train_time:394039ms step_avg:93.80ms +[2025-08-22 12:08:10] [Rank 0] step:4221/10000 train_time:395971ms step_avg:93.81ms +[2025-08-22 12:08:10] [Rank 0] step:4221/10000 train_time:395971ms step_avg:93.81ms +[2025-08-22 12:08:12] [Rank 0] step:4241/10000 train_time:397906ms step_avg:93.82ms +[2025-08-22 12:08:12] [Rank 0] step:4241/10000 train_time:397906ms step_avg:93.82ms +[2025-08-22 12:08:14] [Rank 0] step:4261/10000 train_time:399914ms step_avg:93.85ms +[2025-08-22 12:08:14] [Rank 0] step:4261/10000 train_time:399914ms step_avg:93.85ms +[2025-08-22 12:08:16] [Rank 0] step:4281/10000 train_time:401940ms step_avg:93.89ms +[2025-08-22 12:08:16] [Rank 0] step:4281/10000 train_time:401940ms step_avg:93.89ms +[2025-08-22 12:08:18] [Rank 0] step:4301/10000 train_time:403877ms step_avg:93.90ms +[2025-08-22 12:08:18] [Rank 0] step:4301/10000 train_time:403877ms step_avg:93.90ms +[2025-08-22 12:08:20] [Rank 0] step:4321/10000 train_time:405817ms step_avg:93.92ms +[2025-08-22 12:08:20] [Rank 0] step:4321/10000 train_time:405817ms step_avg:93.92ms +[2025-08-22 12:08:22] [Rank 0] step:4341/10000 train_time:407755ms step_avg:93.93ms +[2025-08-22 12:08:22] [Rank 0] step:4341/10000 train_time:407755ms step_avg:93.93ms +[2025-08-22 12:08:24] [Rank 0] step:4361/10000 train_time:409696ms step_avg:93.95ms +[2025-08-22 12:08:24] [Rank 0] step:4361/10000 train_time:409696ms step_avg:93.95ms +[2025-08-22 12:08:26] [Rank 0] step:4381/10000 train_time:411634ms step_avg:93.96ms +[2025-08-22 12:08:26] [Rank 0] step:4381/10000 train_time:411634ms step_avg:93.96ms +[2025-08-22 12:08:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:08:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:08:42] [Rank 0] PRINT: step:4400/10000 val_loss:3.7450 svd_entropy: attn_qk:H=0.7588,top10E=0.26,eRank=185.9,q75/q25=49.48 attn_vo:H=0.8344,top10E=0.06,eRank=405.0,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.8,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.5,q75/q25=2.82 vo_prod:H=0.6910,top10E=0.11,eRank=225.8,q75/q25=inf train_time:413576ms step_avg:93.99ms +[2025-08-22 12:08:42] [Rank 0] PRINT: step:4400/10000 val_loss:3.7450 svd_entropy: attn_qk:H=0.7588,top10E=0.26,eRank=185.9,q75/q25=49.48 attn_vo:H=0.8344,top10E=0.06,eRank=405.0,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.8,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.5,q75/q25=2.82 vo_prod:H=0.6910,top10E=0.11,eRank=225.8,q75/q25=inf train_time:413576ms step_avg:93.99ms +[2025-08-22 12:08:42] [Rank 0] step:4401/10000 train_time:413598ms step_avg:93.98ms +[2025-08-22 12:08:42] [Rank 0] step:4401/10000 train_time:413598ms step_avg:93.98ms +[2025-08-22 12:08:44] [Rank 0] step:4421/10000 train_time:415528ms step_avg:93.99ms +[2025-08-22 12:08:44] [Rank 0] step:4421/10000 train_time:415528ms step_avg:93.99ms +[2025-08-22 12:08:46] [Rank 0] step:4441/10000 train_time:417462ms step_avg:94.00ms +[2025-08-22 12:08:46] [Rank 0] step:4441/10000 train_time:417462ms step_avg:94.00ms +[2025-08-22 12:08:48] [Rank 0] step:4461/10000 train_time:419401ms step_avg:94.01ms +[2025-08-22 12:08:48] [Rank 0] step:4461/10000 train_time:419401ms step_avg:94.01ms +[2025-08-22 12:08:50] [Rank 0] step:4481/10000 train_time:421342ms step_avg:94.03ms +[2025-08-22 12:08:50] [Rank 0] step:4481/10000 train_time:421342ms step_avg:94.03ms +[2025-08-22 12:08:52] [Rank 0] step:4501/10000 train_time:423280ms step_avg:94.04ms +[2025-08-22 12:08:52] [Rank 0] step:4501/10000 train_time:423280ms step_avg:94.04ms +[2025-08-22 12:08:54] [Rank 0] step:4521/10000 train_time:425224ms step_avg:94.06ms +[2025-08-22 12:08:54] [Rank 0] step:4521/10000 train_time:425224ms step_avg:94.06ms +[2025-08-22 12:08:55] [Rank 0] step:4541/10000 train_time:427167ms step_avg:94.07ms +[2025-08-22 12:08:55] [Rank 0] step:4541/10000 train_time:427167ms step_avg:94.07ms +[2025-08-22 12:08:57] [Rank 0] step:4561/10000 train_time:429108ms step_avg:94.08ms +[2025-08-22 12:08:57] [Rank 0] step:4561/10000 train_time:429108ms step_avg:94.08ms +[2025-08-22 12:08:59] [Rank 0] step:4581/10000 train_time:431053ms step_avg:94.10ms +[2025-08-22 12:08:59] [Rank 0] step:4581/10000 train_time:431053ms step_avg:94.10ms +[2025-08-22 12:09:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:09:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:09:15] [Rank 0] PRINT: step:4600/10000 val_loss:3.7280 svd_entropy: attn_qk:H=0.7596,top10E=0.26,eRank=186.6,q75/q25=49.55 attn_vo:H=0.8344,top10E=0.06,eRank=405.1,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.9,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.6,q75/q25=2.81 vo_prod:H=0.6911,top10E=0.11,eRank=226.0,q75/q25=inf train_time:433000ms step_avg:94.13ms +[2025-08-22 12:09:15] [Rank 0] PRINT: step:4600/10000 val_loss:3.7280 svd_entropy: attn_qk:H=0.7596,top10E=0.26,eRank=186.6,q75/q25=49.55 attn_vo:H=0.8344,top10E=0.06,eRank=405.1,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=642.9,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.6,q75/q25=2.81 vo_prod:H=0.6911,top10E=0.11,eRank=226.0,q75/q25=inf train_time:433000ms step_avg:94.13ms +[2025-08-22 12:09:15] [Rank 0] step:4601/10000 train_time:433023ms step_avg:94.12ms +[2025-08-22 12:09:15] [Rank 0] step:4601/10000 train_time:433023ms step_avg:94.12ms +[2025-08-22 12:09:17] [Rank 0] step:4621/10000 train_time:435014ms step_avg:94.14ms +[2025-08-22 12:09:17] [Rank 0] step:4621/10000 train_time:435014ms step_avg:94.14ms +[2025-08-22 12:09:19] [Rank 0] step:4641/10000 train_time:436968ms step_avg:94.15ms +[2025-08-22 12:09:19] [Rank 0] step:4641/10000 train_time:436968ms step_avg:94.15ms +[2025-08-22 12:09:21] [Rank 0] step:4661/10000 train_time:438985ms step_avg:94.18ms +[2025-08-22 12:09:21] [Rank 0] step:4661/10000 train_time:438985ms step_avg:94.18ms +[2025-08-22 12:09:23] [Rank 0] step:4681/10000 train_time:440926ms step_avg:94.19ms +[2025-08-22 12:09:23] [Rank 0] step:4681/10000 train_time:440926ms step_avg:94.19ms +[2025-08-22 12:09:25] [Rank 0] step:4701/10000 train_time:442867ms step_avg:94.21ms +[2025-08-22 12:09:25] [Rank 0] step:4701/10000 train_time:442867ms step_avg:94.21ms +[2025-08-22 12:09:27] [Rank 0] step:4721/10000 train_time:444808ms step_avg:94.22ms +[2025-08-22 12:09:27] [Rank 0] step:4721/10000 train_time:444808ms step_avg:94.22ms +[2025-08-22 12:09:29] [Rank 0] step:4741/10000 train_time:446746ms step_avg:94.23ms +[2025-08-22 12:09:29] [Rank 0] step:4741/10000 train_time:446746ms step_avg:94.23ms +[2025-08-22 12:09:31] [Rank 0] step:4761/10000 train_time:448687ms step_avg:94.24ms +[2025-08-22 12:09:31] [Rank 0] step:4761/10000 train_time:448687ms step_avg:94.24ms +[2025-08-22 12:09:33] [Rank 0] step:4781/10000 train_time:450629ms step_avg:94.25ms +[2025-08-22 12:09:33] [Rank 0] step:4781/10000 train_time:450629ms step_avg:94.25ms +[2025-08-22 12:09:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:09:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:09:48] [Rank 0] PRINT: step:4800/10000 val_loss:3.7194 svd_entropy: attn_qk:H=0.7605,top10E=0.26,eRank=187.3,q75/q25=49.54 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=643.0,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.81 vo_prod:H=0.6913,top10E=0.11,eRank=226.3,q75/q25=inf train_time:452576ms step_avg:94.29ms +[2025-08-22 12:09:48] [Rank 0] PRINT: step:4800/10000 val_loss:3.7194 svd_entropy: attn_qk:H=0.7605,top10E=0.26,eRank=187.3,q75/q25=49.54 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9732,top10E=0.04,eRank=643.0,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.81 vo_prod:H=0.6913,top10E=0.11,eRank=226.3,q75/q25=inf train_time:452576ms step_avg:94.29ms +[2025-08-22 12:09:48] [Rank 0] step:4801/10000 train_time:452599ms step_avg:94.27ms +[2025-08-22 12:09:48] [Rank 0] step:4801/10000 train_time:452599ms step_avg:94.27ms +[2025-08-22 12:09:50] [Rank 0] step:4821/10000 train_time:454544ms step_avg:94.28ms +[2025-08-22 12:09:50] [Rank 0] step:4821/10000 train_time:454544ms step_avg:94.28ms +[2025-08-22 12:09:52] [Rank 0] step:4841/10000 train_time:456480ms step_avg:94.29ms +[2025-08-22 12:09:52] [Rank 0] step:4841/10000 train_time:456480ms step_avg:94.29ms +[2025-08-22 12:09:54] [Rank 0] step:4861/10000 train_time:458422ms step_avg:94.31ms +[2025-08-22 12:09:54] [Rank 0] step:4861/10000 train_time:458422ms step_avg:94.31ms +[2025-08-22 12:09:56] [Rank 0] step:4881/10000 train_time:460362ms step_avg:94.32ms +[2025-08-22 12:09:56] [Rank 0] step:4881/10000 train_time:460362ms step_avg:94.32ms +[2025-08-22 12:09:58] [Rank 0] step:4901/10000 train_time:462301ms step_avg:94.33ms +[2025-08-22 12:09:58] [Rank 0] step:4901/10000 train_time:462301ms step_avg:94.33ms +[2025-08-22 12:10:00] [Rank 0] step:4921/10000 train_time:464242ms step_avg:94.34ms +[2025-08-22 12:10:00] [Rank 0] step:4921/10000 train_time:464242ms step_avg:94.34ms +[2025-08-22 12:10:02] [Rank 0] step:4941/10000 train_time:466185ms step_avg:94.35ms +[2025-08-22 12:10:02] [Rank 0] step:4941/10000 train_time:466185ms step_avg:94.35ms +[2025-08-22 12:10:04] [Rank 0] step:4961/10000 train_time:468126ms step_avg:94.36ms +[2025-08-22 12:10:04] [Rank 0] step:4961/10000 train_time:468126ms step_avg:94.36ms +[2025-08-22 12:10:06] [Rank 0] step:4981/10000 train_time:470073ms step_avg:94.37ms +[2025-08-22 12:10:06] [Rank 0] step:4981/10000 train_time:470073ms step_avg:94.37ms +[2025-08-22 12:10:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:10:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:10:21] [Rank 0] PRINT: step:5000/10000 val_loss:3.7080 svd_entropy: attn_qk:H=0.7613,top10E=0.26,eRank=188.0,q75/q25=49.60 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.1,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.80 vo_prod:H=0.6913,top10E=0.11,eRank=226.4,q75/q25=inf train_time:472020ms step_avg:94.40ms +[2025-08-22 12:10:21] [Rank 0] PRINT: step:5000/10000 val_loss:3.7080 svd_entropy: attn_qk:H=0.7613,top10E=0.26,eRank=188.0,q75/q25=49.60 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.1,q75/q25=2.71 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.80 vo_prod:H=0.6913,top10E=0.11,eRank=226.4,q75/q25=inf train_time:472020ms step_avg:94.40ms +[2025-08-22 12:10:22] [Rank 0] step:5001/10000 train_time:472044ms step_avg:94.39ms +[2025-08-22 12:10:22] [Rank 0] step:5001/10000 train_time:472044ms step_avg:94.39ms +[2025-08-22 12:10:24] [Rank 0] step:5021/10000 train_time:474039ms step_avg:94.41ms +[2025-08-22 12:10:24] [Rank 0] step:5021/10000 train_time:474039ms step_avg:94.41ms +[2025-08-22 12:10:26] [Rank 0] step:5041/10000 train_time:475977ms step_avg:94.42ms +[2025-08-22 12:10:26] [Rank 0] step:5041/10000 train_time:475977ms step_avg:94.42ms +[2025-08-22 12:10:28] [Rank 0] step:5061/10000 train_time:477919ms step_avg:94.43ms +[2025-08-22 12:10:28] [Rank 0] step:5061/10000 train_time:477919ms step_avg:94.43ms +[2025-08-22 12:10:29] [Rank 0] step:5081/10000 train_time:479866ms step_avg:94.44ms +[2025-08-22 12:10:29] [Rank 0] step:5081/10000 train_time:479866ms step_avg:94.44ms +[2025-08-22 12:10:31] [Rank 0] step:5101/10000 train_time:481810ms step_avg:94.45ms +[2025-08-22 12:10:31] [Rank 0] step:5101/10000 train_time:481810ms step_avg:94.45ms +[2025-08-22 12:10:33] [Rank 0] step:5121/10000 train_time:483758ms step_avg:94.47ms +[2025-08-22 12:10:33] [Rank 0] step:5121/10000 train_time:483758ms step_avg:94.47ms +[2025-08-22 12:10:35] [Rank 0] step:5141/10000 train_time:485708ms step_avg:94.48ms +[2025-08-22 12:10:35] [Rank 0] step:5141/10000 train_time:485708ms step_avg:94.48ms +[2025-08-22 12:10:37] [Rank 0] step:5161/10000 train_time:487654ms step_avg:94.49ms +[2025-08-22 12:10:37] [Rank 0] step:5161/10000 train_time:487654ms step_avg:94.49ms +[2025-08-22 12:10:39] [Rank 0] step:5181/10000 train_time:489603ms step_avg:94.50ms +[2025-08-22 12:10:39] [Rank 0] step:5181/10000 train_time:489603ms step_avg:94.50ms +[2025-08-22 12:10:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:10:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:10:55] [Rank 0] PRINT: step:5200/10000 val_loss:3.6962 svd_entropy: attn_qk:H=0.7620,top10E=0.26,eRank=188.7,q75/q25=50.09 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.1,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.81 vo_prod:H=0.6913,top10E=0.11,eRank=226.4,q75/q25=inf train_time:491580ms step_avg:94.53ms +[2025-08-22 12:10:55] [Rank 0] PRINT: step:5200/10000 val_loss:3.6962 svd_entropy: attn_qk:H=0.7620,top10E=0.26,eRank=188.7,q75/q25=50.09 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.1,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.81 vo_prod:H=0.6913,top10E=0.11,eRank=226.4,q75/q25=inf train_time:491580ms step_avg:94.53ms +[2025-08-22 12:10:55] [Rank 0] step:5201/10000 train_time:491602ms step_avg:94.52ms +[2025-08-22 12:10:55] [Rank 0] step:5201/10000 train_time:491602ms step_avg:94.52ms +[2025-08-22 12:10:57] [Rank 0] step:5221/10000 train_time:493556ms step_avg:94.53ms +[2025-08-22 12:10:57] [Rank 0] step:5221/10000 train_time:493556ms step_avg:94.53ms +[2025-08-22 12:10:59] [Rank 0] step:5241/10000 train_time:495529ms step_avg:94.55ms +[2025-08-22 12:10:59] [Rank 0] step:5241/10000 train_time:495529ms step_avg:94.55ms +[2025-08-22 12:11:01] [Rank 0] step:5261/10000 train_time:497499ms step_avg:94.56ms +[2025-08-22 12:11:01] [Rank 0] step:5261/10000 train_time:497499ms step_avg:94.56ms +[2025-08-22 12:11:03] [Rank 0] step:5281/10000 train_time:499470ms step_avg:94.58ms +[2025-08-22 12:11:03] [Rank 0] step:5281/10000 train_time:499470ms step_avg:94.58ms +[2025-08-22 12:11:05] [Rank 0] step:5301/10000 train_time:501450ms step_avg:94.60ms +[2025-08-22 12:11:05] [Rank 0] step:5301/10000 train_time:501450ms step_avg:94.60ms +[2025-08-22 12:11:07] [Rank 0] step:5321/10000 train_time:503424ms step_avg:94.61ms +[2025-08-22 12:11:07] [Rank 0] step:5321/10000 train_time:503424ms step_avg:94.61ms +[2025-08-22 12:11:09] [Rank 0] step:5341/10000 train_time:505398ms step_avg:94.63ms +[2025-08-22 12:11:09] [Rank 0] step:5341/10000 train_time:505398ms step_avg:94.63ms +[2025-08-22 12:11:11] [Rank 0] step:5361/10000 train_time:507375ms step_avg:94.64ms +[2025-08-22 12:11:11] [Rank 0] step:5361/10000 train_time:507375ms step_avg:94.64ms +[2025-08-22 12:11:13] [Rank 0] step:5381/10000 train_time:509348ms step_avg:94.66ms +[2025-08-22 12:11:13] [Rank 0] step:5381/10000 train_time:509348ms step_avg:94.66ms +[2025-08-22 12:11:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:11:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:11:28] [Rank 0] PRINT: step:5400/10000 val_loss:3.6868 svd_entropy: attn_qk:H=0.7628,top10E=0.26,eRank=189.4,q75/q25=50.13 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.2,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.80 vo_prod:H=0.6914,top10E=0.11,eRank=226.7,q75/q25=inf train_time:511323ms step_avg:94.69ms +[2025-08-22 12:11:28] [Rank 0] PRINT: step:5400/10000 val_loss:3.6868 svd_entropy: attn_qk:H=0.7628,top10E=0.26,eRank=189.4,q75/q25=50.13 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.2,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.80 vo_prod:H=0.6914,top10E=0.11,eRank=226.7,q75/q25=inf train_time:511323ms step_avg:94.69ms +[2025-08-22 12:11:29] [Rank 0] step:5401/10000 train_time:511346ms step_avg:94.68ms +[2025-08-22 12:11:29] [Rank 0] step:5401/10000 train_time:511346ms step_avg:94.68ms +[2025-08-22 12:11:30] [Rank 0] step:5421/10000 train_time:513324ms step_avg:94.69ms +[2025-08-22 12:11:30] [Rank 0] step:5421/10000 train_time:513324ms step_avg:94.69ms +[2025-08-22 12:11:32] [Rank 0] step:5441/10000 train_time:515289ms step_avg:94.70ms +[2025-08-22 12:11:32] [Rank 0] step:5441/10000 train_time:515289ms step_avg:94.70ms +[2025-08-22 12:11:34] [Rank 0] step:5461/10000 train_time:517262ms step_avg:94.72ms +[2025-08-22 12:11:34] [Rank 0] step:5461/10000 train_time:517262ms step_avg:94.72ms +[2025-08-22 12:11:36] [Rank 0] step:5481/10000 train_time:519232ms step_avg:94.73ms +[2025-08-22 12:11:36] [Rank 0] step:5481/10000 train_time:519232ms step_avg:94.73ms +[2025-08-22 12:11:38] [Rank 0] step:5501/10000 train_time:521209ms step_avg:94.75ms +[2025-08-22 12:11:38] [Rank 0] step:5501/10000 train_time:521209ms step_avg:94.75ms +[2025-08-22 12:11:40] [Rank 0] step:5521/10000 train_time:523188ms step_avg:94.76ms +[2025-08-22 12:11:40] [Rank 0] step:5521/10000 train_time:523188ms step_avg:94.76ms +[2025-08-22 12:11:42] [Rank 0] step:5541/10000 train_time:525163ms step_avg:94.78ms +[2025-08-22 12:11:42] [Rank 0] step:5541/10000 train_time:525163ms step_avg:94.78ms +[2025-08-22 12:11:44] [Rank 0] step:5561/10000 train_time:527137ms step_avg:94.79ms +[2025-08-22 12:11:44] [Rank 0] step:5561/10000 train_time:527137ms step_avg:94.79ms +[2025-08-22 12:11:46] [Rank 0] step:5581/10000 train_time:529111ms step_avg:94.81ms +[2025-08-22 12:11:46] [Rank 0] step:5581/10000 train_time:529111ms step_avg:94.81ms +[2025-08-22 12:11:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:11:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:12:02] [Rank 0] PRINT: step:5600/10000 val_loss:3.6789 svd_entropy: attn_qk:H=0.7636,top10E=0.26,eRank=190.0,q75/q25=50.37 attn_vo:H=0.8345,top10E=0.06,eRank=405.4,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.3,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.80 vo_prod:H=0.6915,top10E=0.11,eRank=226.9,q75/q25=inf train_time:531092ms step_avg:94.84ms +[2025-08-22 12:12:02] [Rank 0] PRINT: step:5600/10000 val_loss:3.6789 svd_entropy: attn_qk:H=0.7636,top10E=0.26,eRank=190.0,q75/q25=50.37 attn_vo:H=0.8345,top10E=0.06,eRank=405.4,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.3,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.80 vo_prod:H=0.6915,top10E=0.11,eRank=226.9,q75/q25=inf train_time:531092ms step_avg:94.84ms +[2025-08-22 12:12:02] [Rank 0] step:5601/10000 train_time:531115ms step_avg:94.83ms +[2025-08-22 12:12:02] [Rank 0] step:5601/10000 train_time:531115ms step_avg:94.83ms +[2025-08-22 12:12:04] [Rank 0] step:5621/10000 train_time:533084ms step_avg:94.84ms +[2025-08-22 12:12:04] [Rank 0] step:5621/10000 train_time:533084ms step_avg:94.84ms +[2025-08-22 12:12:06] [Rank 0] step:5641/10000 train_time:535051ms step_avg:94.85ms +[2025-08-22 12:12:06] [Rank 0] step:5641/10000 train_time:535051ms step_avg:94.85ms +[2025-08-22 12:12:08] [Rank 0] step:5661/10000 train_time:537019ms step_avg:94.86ms +[2025-08-22 12:12:08] [Rank 0] step:5661/10000 train_time:537019ms step_avg:94.86ms +[2025-08-22 12:12:10] [Rank 0] step:5681/10000 train_time:538991ms step_avg:94.88ms +[2025-08-22 12:12:10] [Rank 0] step:5681/10000 train_time:538991ms step_avg:94.88ms +[2025-08-22 12:12:12] [Rank 0] step:5701/10000 train_time:540963ms step_avg:94.89ms +[2025-08-22 12:12:12] [Rank 0] step:5701/10000 train_time:540963ms step_avg:94.89ms +[2025-08-22 12:12:14] [Rank 0] step:5721/10000 train_time:542939ms step_avg:94.90ms +[2025-08-22 12:12:14] [Rank 0] step:5721/10000 train_time:542939ms step_avg:94.90ms +[2025-08-22 12:12:16] [Rank 0] step:5741/10000 train_time:544910ms step_avg:94.92ms +[2025-08-22 12:12:16] [Rank 0] step:5741/10000 train_time:544910ms step_avg:94.92ms +[2025-08-22 12:12:18] [Rank 0] step:5761/10000 train_time:546885ms step_avg:94.93ms +[2025-08-22 12:12:18] [Rank 0] step:5761/10000 train_time:546885ms step_avg:94.93ms +[2025-08-22 12:12:20] [Rank 0] step:5781/10000 train_time:548860ms step_avg:94.94ms +[2025-08-22 12:12:20] [Rank 0] step:5781/10000 train_time:548860ms step_avg:94.94ms +[2025-08-22 12:12:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:12:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:12:36] [Rank 0] PRINT: step:5800/10000 val_loss:3.6764 svd_entropy: attn_qk:H=0.7643,top10E=0.26,eRank=190.6,q75/q25=50.34 attn_vo:H=0.8346,top10E=0.06,eRank=405.5,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.3,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.81 vo_prod:H=0.6916,top10E=0.11,eRank=227.1,q75/q25=inf train_time:550840ms step_avg:94.97ms +[2025-08-22 12:12:36] [Rank 0] PRINT: step:5800/10000 val_loss:3.6764 svd_entropy: attn_qk:H=0.7643,top10E=0.26,eRank=190.6,q75/q25=50.34 attn_vo:H=0.8346,top10E=0.06,eRank=405.5,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.3,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.81 vo_prod:H=0.6916,top10E=0.11,eRank=227.1,q75/q25=inf train_time:550840ms step_avg:94.97ms +[2025-08-22 12:12:36] [Rank 0] step:5801/10000 train_time:550863ms step_avg:94.96ms +[2025-08-22 12:12:36] [Rank 0] step:5801/10000 train_time:550863ms step_avg:94.96ms +[2025-08-22 12:12:38] [Rank 0] step:5821/10000 train_time:552819ms step_avg:94.97ms +[2025-08-22 12:12:38] [Rank 0] step:5821/10000 train_time:552819ms step_avg:94.97ms +[2025-08-22 12:12:40] [Rank 0] step:5841/10000 train_time:554791ms step_avg:94.98ms +[2025-08-22 12:12:40] [Rank 0] step:5841/10000 train_time:554791ms step_avg:94.98ms +[2025-08-22 12:12:42] [Rank 0] step:5861/10000 train_time:556769ms step_avg:95.00ms +[2025-08-22 12:12:42] [Rank 0] step:5861/10000 train_time:556769ms step_avg:95.00ms +[2025-08-22 12:12:44] [Rank 0] step:5881/10000 train_time:558745ms step_avg:95.01ms +[2025-08-22 12:12:44] [Rank 0] step:5881/10000 train_time:558745ms step_avg:95.01ms +[2025-08-22 12:12:46] [Rank 0] step:5901/10000 train_time:560722ms step_avg:95.02ms +[2025-08-22 12:12:46] [Rank 0] step:5901/10000 train_time:560722ms step_avg:95.02ms +[2025-08-22 12:12:47] [Rank 0] step:5921/10000 train_time:562698ms step_avg:95.03ms +[2025-08-22 12:12:47] [Rank 0] step:5921/10000 train_time:562698ms step_avg:95.03ms +[2025-08-22 12:12:49] [Rank 0] step:5941/10000 train_time:564681ms step_avg:95.05ms +[2025-08-22 12:12:49] [Rank 0] step:5941/10000 train_time:564681ms step_avg:95.05ms +[2025-08-22 12:12:51] [Rank 0] step:5961/10000 train_time:566663ms step_avg:95.06ms +[2025-08-22 12:12:51] [Rank 0] step:5961/10000 train_time:566663ms step_avg:95.06ms +[2025-08-22 12:12:53] [Rank 0] step:5981/10000 train_time:568643ms step_avg:95.07ms +[2025-08-22 12:12:53] [Rank 0] step:5981/10000 train_time:568643ms step_avg:95.07ms +[2025-08-22 12:12:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:12:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:13:09] [Rank 0] PRINT: step:6000/10000 val_loss:3.6588 svd_entropy: attn_qk:H=0.7649,top10E=0.25,eRank=191.2,q75/q25=50.43 attn_vo:H=0.8346,top10E=0.06,eRank=405.6,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.4,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.80 vo_prod:H=0.6917,top10E=0.11,eRank=227.2,q75/q25=inf train_time:570624ms step_avg:95.10ms +[2025-08-22 12:13:09] [Rank 0] PRINT: step:6000/10000 val_loss:3.6588 svd_entropy: attn_qk:H=0.7649,top10E=0.25,eRank=191.2,q75/q25=50.43 attn_vo:H=0.8346,top10E=0.06,eRank=405.6,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.4,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.80 vo_prod:H=0.6917,top10E=0.11,eRank=227.2,q75/q25=inf train_time:570624ms step_avg:95.10ms +[2025-08-22 12:13:09] [Rank 0] step:6001/10000 train_time:570648ms step_avg:95.09ms +[2025-08-22 12:13:09] [Rank 0] step:6001/10000 train_time:570648ms step_avg:95.09ms +[2025-08-22 12:13:11] [Rank 0] step:6021/10000 train_time:572631ms step_avg:95.11ms +[2025-08-22 12:13:11] [Rank 0] step:6021/10000 train_time:572631ms step_avg:95.11ms +[2025-08-22 12:13:13] [Rank 0] step:6041/10000 train_time:574606ms step_avg:95.12ms +[2025-08-22 12:13:13] [Rank 0] step:6041/10000 train_time:574606ms step_avg:95.12ms +[2025-08-22 12:13:15] [Rank 0] step:6061/10000 train_time:576585ms step_avg:95.13ms +[2025-08-22 12:13:15] [Rank 0] step:6061/10000 train_time:576585ms step_avg:95.13ms +[2025-08-22 12:13:17] [Rank 0] step:6081/10000 train_time:578560ms step_avg:95.14ms +[2025-08-22 12:13:17] [Rank 0] step:6081/10000 train_time:578560ms step_avg:95.14ms +[2025-08-22 12:13:19] [Rank 0] step:6101/10000 train_time:580544ms step_avg:95.16ms +[2025-08-22 12:13:19] [Rank 0] step:6101/10000 train_time:580544ms step_avg:95.16ms +[2025-08-22 12:13:21] [Rank 0] step:6121/10000 train_time:582794ms step_avg:95.21ms +[2025-08-22 12:13:21] [Rank 0] step:6121/10000 train_time:582794ms step_avg:95.21ms +[2025-08-22 12:13:23] [Rank 0] step:6141/10000 train_time:584784ms step_avg:95.23ms +[2025-08-22 12:13:23] [Rank 0] step:6141/10000 train_time:584784ms step_avg:95.23ms +[2025-08-22 12:13:25] [Rank 0] step:6161/10000 train_time:586763ms step_avg:95.24ms +[2025-08-22 12:13:25] [Rank 0] step:6161/10000 train_time:586763ms step_avg:95.24ms +[2025-08-22 12:13:27] [Rank 0] step:6181/10000 train_time:588740ms step_avg:95.25ms +[2025-08-22 12:13:27] [Rank 0] step:6181/10000 train_time:588740ms step_avg:95.25ms +[2025-08-22 12:13:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:13:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:13:43] [Rank 0] PRINT: step:6200/10000 val_loss:3.6442 svd_entropy: attn_qk:H=0.7656,top10E=0.25,eRank=191.8,q75/q25=50.75 attn_vo:H=0.8346,top10E=0.06,eRank=405.7,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.4,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.80 vo_prod:H=0.6917,top10E=0.11,eRank=227.3,q75/q25=inf train_time:590723ms step_avg:95.28ms +[2025-08-22 12:13:43] [Rank 0] PRINT: step:6200/10000 val_loss:3.6442 svd_entropy: attn_qk:H=0.7656,top10E=0.25,eRank=191.8,q75/q25=50.75 attn_vo:H=0.8346,top10E=0.06,eRank=405.7,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.4,q75/q25=2.70 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.80 vo_prod:H=0.6917,top10E=0.11,eRank=227.3,q75/q25=inf train_time:590723ms step_avg:95.28ms +[2025-08-22 12:13:43] [Rank 0] step:6201/10000 train_time:590747ms step_avg:95.27ms +[2025-08-22 12:13:43] [Rank 0] step:6201/10000 train_time:590747ms step_avg:95.27ms +[2025-08-22 12:13:45] [Rank 0] step:6221/10000 train_time:592734ms step_avg:95.28ms +[2025-08-22 12:13:45] [Rank 0] step:6221/10000 train_time:592734ms step_avg:95.28ms +[2025-08-22 12:13:47] [Rank 0] step:6241/10000 train_time:594705ms step_avg:95.29ms +[2025-08-22 12:13:47] [Rank 0] step:6241/10000 train_time:594705ms step_avg:95.29ms +[2025-08-22 12:13:49] [Rank 0] step:6261/10000 train_time:596683ms step_avg:95.30ms +[2025-08-22 12:13:49] [Rank 0] step:6261/10000 train_time:596683ms step_avg:95.30ms +[2025-08-22 12:13:51] [Rank 0] step:6281/10000 train_time:598664ms step_avg:95.31ms +[2025-08-22 12:13:51] [Rank 0] step:6281/10000 train_time:598664ms step_avg:95.31ms +[2025-08-22 12:13:53] [Rank 0] step:6301/10000 train_time:600642ms step_avg:95.32ms +[2025-08-22 12:13:53] [Rank 0] step:6301/10000 train_time:600642ms step_avg:95.32ms +[2025-08-22 12:13:55] [Rank 0] step:6321/10000 train_time:602622ms step_avg:95.34ms +[2025-08-22 12:13:55] [Rank 0] step:6321/10000 train_time:602622ms step_avg:95.34ms +[2025-08-22 12:13:57] [Rank 0] step:6341/10000 train_time:604601ms step_avg:95.35ms +[2025-08-22 12:13:57] [Rank 0] step:6341/10000 train_time:604601ms step_avg:95.35ms +[2025-08-22 12:13:59] [Rank 0] step:6361/10000 train_time:606588ms step_avg:95.36ms +[2025-08-22 12:13:59] [Rank 0] step:6361/10000 train_time:606588ms step_avg:95.36ms +[2025-08-22 12:14:01] [Rank 0] step:6381/10000 train_time:608569ms step_avg:95.37ms +[2025-08-22 12:14:01] [Rank 0] step:6381/10000 train_time:608569ms step_avg:95.37ms +[2025-08-22 12:14:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:14:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:14:16] [Rank 0] PRINT: step:6400/10000 val_loss:3.6331 svd_entropy: attn_qk:H=0.7661,top10E=0.25,eRank=192.2,q75/q25=50.84 attn_vo:H=0.8346,top10E=0.06,eRank=405.7,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.4,q75/q25=2.69 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.80 vo_prod:H=0.6917,top10E=0.11,eRank=227.4,q75/q25=inf train_time:610550ms step_avg:95.40ms +[2025-08-22 12:14:16] [Rank 0] PRINT: step:6400/10000 val_loss:3.6331 svd_entropy: attn_qk:H=0.7661,top10E=0.25,eRank=192.2,q75/q25=50.84 attn_vo:H=0.8346,top10E=0.06,eRank=405.7,q75/q25=inf mlp_w1:H=0.9733,top10E=0.04,eRank=643.4,q75/q25=2.69 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.80 vo_prod:H=0.6917,top10E=0.11,eRank=227.4,q75/q25=inf train_time:610550ms step_avg:95.40ms +[2025-08-22 12:14:17] [Rank 0] step:6401/10000 train_time:610572ms step_avg:95.39ms +[2025-08-22 12:14:17] [Rank 0] step:6401/10000 train_time:610572ms step_avg:95.39ms +[2025-08-22 12:14:19] [Rank 0] step:6421/10000 train_time:612548ms step_avg:95.40ms +[2025-08-22 12:14:19] [Rank 0] step:6421/10000 train_time:612548ms step_avg:95.40ms +[2025-08-22 12:14:20] [Rank 0] step:6441/10000 train_time:614522ms step_avg:95.41ms +[2025-08-22 12:14:20] [Rank 0] step:6441/10000 train_time:614522ms step_avg:95.41ms +[2025-08-22 12:14:22] [Rank 0] step:6461/10000 train_time:616504ms step_avg:95.42ms +[2025-08-22 12:14:22] [Rank 0] step:6461/10000 train_time:616504ms step_avg:95.42ms +[2025-08-22 12:14:24] [Rank 0] step:6481/10000 train_time:618491ms step_avg:95.43ms +[2025-08-22 12:14:24] [Rank 0] step:6481/10000 train_time:618491ms step_avg:95.43ms +[2025-08-22 12:14:26] [Rank 0] step:6501/10000 train_time:620468ms step_avg:95.44ms +[2025-08-22 12:14:26] [Rank 0] step:6501/10000 train_time:620468ms step_avg:95.44ms +[2025-08-22 12:14:28] [Rank 0] step:6521/10000 train_time:622444ms step_avg:95.45ms +[2025-08-22 12:14:28] [Rank 0] step:6521/10000 train_time:622444ms step_avg:95.45ms +[2025-08-22 12:14:30] [Rank 0] step:6541/10000 train_time:624427ms step_avg:95.46ms +[2025-08-22 12:14:30] [Rank 0] step:6541/10000 train_time:624427ms step_avg:95.46ms +[2025-08-22 12:14:32] [Rank 0] step:6561/10000 train_time:626408ms step_avg:95.47ms +[2025-08-22 12:14:32] [Rank 0] step:6561/10000 train_time:626408ms step_avg:95.47ms +[2025-08-22 12:14:34] [Rank 0] step:6581/10000 train_time:628461ms step_avg:95.50ms +[2025-08-22 12:14:34] [Rank 0] step:6581/10000 train_time:628461ms step_avg:95.50ms +[2025-08-22 12:14:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:14:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:14:50] [Rank 0] PRINT: step:6600/10000 val_loss:3.6185 svd_entropy: attn_qk:H=0.7667,top10E=0.25,eRank=192.7,q75/q25=50.90 attn_vo:H=0.8347,top10E=0.06,eRank=405.8,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.5,q75/q25=2.69 mlp_w2:H=0.9686,top10E=0.05,eRank=623.9,q75/q25=2.80 vo_prod:H=0.6918,top10E=0.11,eRank=227.5,q75/q25=inf train_time:630513ms step_avg:95.53ms +[2025-08-22 12:14:50] [Rank 0] PRINT: step:6600/10000 val_loss:3.6185 svd_entropy: attn_qk:H=0.7667,top10E=0.25,eRank=192.7,q75/q25=50.90 attn_vo:H=0.8347,top10E=0.06,eRank=405.8,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.5,q75/q25=2.69 mlp_w2:H=0.9686,top10E=0.05,eRank=623.9,q75/q25=2.80 vo_prod:H=0.6918,top10E=0.11,eRank=227.5,q75/q25=inf train_time:630513ms step_avg:95.53ms +[2025-08-22 12:14:50] [Rank 0] step:6601/10000 train_time:630536ms step_avg:95.52ms +[2025-08-22 12:14:50] [Rank 0] step:6601/10000 train_time:630536ms step_avg:95.52ms +[2025-08-22 12:14:52] [Rank 0] step:6621/10000 train_time:632504ms step_avg:95.53ms +[2025-08-22 12:14:52] [Rank 0] step:6621/10000 train_time:632504ms step_avg:95.53ms +[2025-08-22 12:14:54] [Rank 0] step:6641/10000 train_time:634493ms step_avg:95.54ms +[2025-08-22 12:14:54] [Rank 0] step:6641/10000 train_time:634493ms step_avg:95.54ms +[2025-08-22 12:14:56] [Rank 0] step:6661/10000 train_time:636474ms step_avg:95.55ms +[2025-08-22 12:14:56] [Rank 0] step:6661/10000 train_time:636474ms step_avg:95.55ms +[2025-08-22 12:14:58] [Rank 0] step:6681/10000 train_time:638473ms step_avg:95.57ms +[2025-08-22 12:14:58] [Rank 0] step:6681/10000 train_time:638473ms step_avg:95.57ms +[2025-08-22 12:15:00] [Rank 0] step:6701/10000 train_time:640494ms step_avg:95.58ms +[2025-08-22 12:15:00] [Rank 0] step:6701/10000 train_time:640494ms step_avg:95.58ms +[2025-08-22 12:15:02] [Rank 0] step:6721/10000 train_time:642506ms step_avg:95.60ms +[2025-08-22 12:15:02] [Rank 0] step:6721/10000 train_time:642506ms step_avg:95.60ms +[2025-08-22 12:15:04] [Rank 0] step:6741/10000 train_time:644516ms step_avg:95.61ms +[2025-08-22 12:15:04] [Rank 0] step:6741/10000 train_time:644516ms step_avg:95.61ms +[2025-08-22 12:15:06] [Rank 0] step:6761/10000 train_time:646522ms step_avg:95.63ms +[2025-08-22 12:15:06] [Rank 0] step:6761/10000 train_time:646522ms step_avg:95.63ms +[2025-08-22 12:15:08] [Rank 0] step:6781/10000 train_time:648537ms step_avg:95.64ms +[2025-08-22 12:15:08] [Rank 0] step:6781/10000 train_time:648537ms step_avg:95.64ms +[2025-08-22 12:15:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:15:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:15:24] [Rank 0] PRINT: step:6800/10000 val_loss:3.6034 svd_entropy: attn_qk:H=0.7672,top10E=0.25,eRank=193.2,q75/q25=51.15 attn_vo:H=0.8347,top10E=0.06,eRank=405.9,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.5,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=623.9,q75/q25=2.80 vo_prod:H=0.6919,top10E=0.11,eRank=227.8,q75/q25=inf train_time:650560ms step_avg:95.67ms +[2025-08-22 12:15:24] [Rank 0] PRINT: step:6800/10000 val_loss:3.6034 svd_entropy: attn_qk:H=0.7672,top10E=0.25,eRank=193.2,q75/q25=51.15 attn_vo:H=0.8347,top10E=0.06,eRank=405.9,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.5,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=623.9,q75/q25=2.80 vo_prod:H=0.6919,top10E=0.11,eRank=227.8,q75/q25=inf train_time:650560ms step_avg:95.67ms +[2025-08-22 12:15:24] [Rank 0] step:6801/10000 train_time:650583ms step_avg:95.66ms +[2025-08-22 12:15:24] [Rank 0] step:6801/10000 train_time:650583ms step_avg:95.66ms +[2025-08-22 12:15:26] [Rank 0] step:6821/10000 train_time:652591ms step_avg:95.67ms +[2025-08-22 12:15:26] [Rank 0] step:6821/10000 train_time:652591ms step_avg:95.67ms +[2025-08-22 12:15:28] [Rank 0] step:6841/10000 train_time:654594ms step_avg:95.69ms +[2025-08-22 12:15:28] [Rank 0] step:6841/10000 train_time:654594ms step_avg:95.69ms +[2025-08-22 12:15:30] [Rank 0] step:6861/10000 train_time:656593ms step_avg:95.70ms +[2025-08-22 12:15:30] [Rank 0] step:6861/10000 train_time:656593ms step_avg:95.70ms +[2025-08-22 12:15:32] [Rank 0] step:6881/10000 train_time:658601ms step_avg:95.71ms +[2025-08-22 12:15:32] [Rank 0] step:6881/10000 train_time:658601ms step_avg:95.71ms +[2025-08-22 12:15:34] [Rank 0] step:6901/10000 train_time:660603ms step_avg:95.73ms +[2025-08-22 12:15:34] [Rank 0] step:6901/10000 train_time:660603ms step_avg:95.73ms +[2025-08-22 12:15:36] [Rank 0] step:6921/10000 train_time:662772ms step_avg:95.76ms +[2025-08-22 12:15:36] [Rank 0] step:6921/10000 train_time:662772ms step_avg:95.76ms +[2025-08-22 12:15:38] [Rank 0] step:6941/10000 train_time:664682ms step_avg:95.76ms +[2025-08-22 12:15:38] [Rank 0] step:6941/10000 train_time:664682ms step_avg:95.76ms +[2025-08-22 12:15:40] [Rank 0] step:6961/10000 train_time:666786ms step_avg:95.79ms +[2025-08-22 12:15:40] [Rank 0] step:6961/10000 train_time:666786ms step_avg:95.79ms +[2025-08-22 12:15:42] [Rank 0] step:6981/10000 train_time:668800ms step_avg:95.80ms +[2025-08-22 12:15:42] [Rank 0] step:6981/10000 train_time:668800ms step_avg:95.80ms +[2025-08-22 12:15:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:15:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:15:58] [Rank 0] PRINT: step:7000/10000 val_loss:3.5882 svd_entropy: attn_qk:H=0.7676,top10E=0.25,eRank=193.6,q75/q25=51.26 attn_vo:H=0.8347,top10E=0.06,eRank=405.9,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.5,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.0,q75/q25=2.80 vo_prod:H=0.6920,top10E=0.11,eRank=228.0,q75/q25=inf train_time:670816ms step_avg:95.83ms +[2025-08-22 12:15:58] [Rank 0] PRINT: step:7000/10000 val_loss:3.5882 svd_entropy: attn_qk:H=0.7676,top10E=0.25,eRank=193.6,q75/q25=51.26 attn_vo:H=0.8347,top10E=0.06,eRank=405.9,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.5,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.0,q75/q25=2.80 vo_prod:H=0.6920,top10E=0.11,eRank=228.0,q75/q25=inf train_time:670816ms step_avg:95.83ms +[2025-08-22 12:15:58] [Rank 0] step:7001/10000 train_time:670839ms step_avg:95.82ms +[2025-08-22 12:15:58] [Rank 0] step:7001/10000 train_time:670839ms step_avg:95.82ms +[2025-08-22 12:16:00] [Rank 0] step:7021/10000 train_time:672855ms step_avg:95.83ms +[2025-08-22 12:16:00] [Rank 0] step:7021/10000 train_time:672855ms step_avg:95.83ms +[2025-08-22 12:16:02] [Rank 0] step:7041/10000 train_time:674858ms step_avg:95.85ms +[2025-08-22 12:16:02] [Rank 0] step:7041/10000 train_time:674858ms step_avg:95.85ms +[2025-08-22 12:16:04] [Rank 0] step:7061/10000 train_time:676864ms step_avg:95.86ms +[2025-08-22 12:16:04] [Rank 0] step:7061/10000 train_time:676864ms step_avg:95.86ms +[2025-08-22 12:16:06] [Rank 0] step:7081/10000 train_time:678868ms step_avg:95.87ms +[2025-08-22 12:16:06] [Rank 0] step:7081/10000 train_time:678868ms step_avg:95.87ms +[2025-08-22 12:16:08] [Rank 0] step:7101/10000 train_time:680882ms step_avg:95.89ms +[2025-08-22 12:16:08] [Rank 0] step:7101/10000 train_time:680882ms step_avg:95.89ms +[2025-08-22 12:16:10] [Rank 0] step:7121/10000 train_time:682885ms step_avg:95.90ms +[2025-08-22 12:16:10] [Rank 0] step:7121/10000 train_time:682885ms step_avg:95.90ms +[2025-08-22 12:16:12] [Rank 0] step:7141/10000 train_time:684890ms step_avg:95.91ms +[2025-08-22 12:16:12] [Rank 0] step:7141/10000 train_time:684890ms step_avg:95.91ms +[2025-08-22 12:16:14] [Rank 0] step:7161/10000 train_time:686899ms step_avg:95.92ms +[2025-08-22 12:16:14] [Rank 0] step:7161/10000 train_time:686899ms step_avg:95.92ms +[2025-08-22 12:16:16] [Rank 0] step:7181/10000 train_time:688908ms step_avg:95.93ms +[2025-08-22 12:16:16] [Rank 0] step:7181/10000 train_time:688908ms step_avg:95.93ms +[2025-08-22 12:16:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:16:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:16:32] [Rank 0] PRINT: step:7200/10000 val_loss:3.5768 svd_entropy: attn_qk:H=0.7681,top10E=0.25,eRank=194.0,q75/q25=51.16 attn_vo:H=0.8347,top10E=0.06,eRank=406.0,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.6,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.79 vo_prod:H=0.6922,top10E=0.11,eRank=228.2,q75/q25=inf train_time:690923ms step_avg:95.96ms +[2025-08-22 12:16:32] [Rank 0] PRINT: step:7200/10000 val_loss:3.5768 svd_entropy: attn_qk:H=0.7681,top10E=0.25,eRank=194.0,q75/q25=51.16 attn_vo:H=0.8347,top10E=0.06,eRank=406.0,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.6,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.79 vo_prod:H=0.6922,top10E=0.11,eRank=228.2,q75/q25=inf train_time:690923ms step_avg:95.96ms +[2025-08-22 12:16:32] [Rank 0] step:7201/10000 train_time:690947ms step_avg:95.95ms +[2025-08-22 12:16:32] [Rank 0] step:7201/10000 train_time:690947ms step_avg:95.95ms +[2025-08-22 12:16:34] [Rank 0] step:7221/10000 train_time:692946ms step_avg:95.96ms +[2025-08-22 12:16:34] [Rank 0] step:7221/10000 train_time:692946ms step_avg:95.96ms +[2025-08-22 12:16:36] [Rank 0] step:7241/10000 train_time:694946ms step_avg:95.97ms +[2025-08-22 12:16:36] [Rank 0] step:7241/10000 train_time:694946ms step_avg:95.97ms +[2025-08-22 12:16:38] [Rank 0] step:7261/10000 train_time:696945ms step_avg:95.98ms +[2025-08-22 12:16:38] [Rank 0] step:7261/10000 train_time:696945ms step_avg:95.98ms +[2025-08-22 12:16:40] [Rank 0] step:7281/10000 train_time:699038ms step_avg:96.01ms +[2025-08-22 12:16:40] [Rank 0] step:7281/10000 train_time:699038ms step_avg:96.01ms +[2025-08-22 12:16:42] [Rank 0] step:7301/10000 train_time:701112ms step_avg:96.03ms +[2025-08-22 12:16:42] [Rank 0] step:7301/10000 train_time:701112ms step_avg:96.03ms +[2025-08-22 12:16:44] [Rank 0] step:7321/10000 train_time:703130ms step_avg:96.04ms +[2025-08-22 12:16:44] [Rank 0] step:7321/10000 train_time:703130ms step_avg:96.04ms +[2025-08-22 12:16:46] [Rank 0] step:7341/10000 train_time:705137ms step_avg:96.05ms +[2025-08-22 12:16:46] [Rank 0] step:7341/10000 train_time:705137ms step_avg:96.05ms +[2025-08-22 12:16:48] [Rank 0] step:7361/10000 train_time:707154ms step_avg:96.07ms +[2025-08-22 12:16:48] [Rank 0] step:7361/10000 train_time:707154ms step_avg:96.07ms +[2025-08-22 12:16:50] [Rank 0] step:7381/10000 train_time:709171ms step_avg:96.08ms +[2025-08-22 12:16:50] [Rank 0] step:7381/10000 train_time:709171ms step_avg:96.08ms +[2025-08-22 12:16:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:16:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:17:06] [Rank 0] PRINT: step:7400/10000 val_loss:3.5604 svd_entropy: attn_qk:H=0.7684,top10E=0.25,eRank=194.3,q75/q25=51.21 attn_vo:H=0.8348,top10E=0.06,eRank=406.1,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.6,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.79 vo_prod:H=0.6923,top10E=0.11,eRank=228.5,q75/q25=inf train_time:711172ms step_avg:96.10ms +[2025-08-22 12:17:06] [Rank 0] PRINT: step:7400/10000 val_loss:3.5604 svd_entropy: attn_qk:H=0.7684,top10E=0.25,eRank=194.3,q75/q25=51.21 attn_vo:H=0.8348,top10E=0.06,eRank=406.1,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.6,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.79 vo_prod:H=0.6923,top10E=0.11,eRank=228.5,q75/q25=inf train_time:711172ms step_avg:96.10ms +[2025-08-22 12:17:06] [Rank 0] step:7401/10000 train_time:711196ms step_avg:96.09ms +[2025-08-22 12:17:06] [Rank 0] step:7401/10000 train_time:711196ms step_avg:96.09ms +[2025-08-22 12:17:08] [Rank 0] step:7421/10000 train_time:713213ms step_avg:96.11ms +[2025-08-22 12:17:08] [Rank 0] step:7421/10000 train_time:713213ms step_avg:96.11ms +[2025-08-22 12:17:10] [Rank 0] step:7441/10000 train_time:715220ms step_avg:96.12ms +[2025-08-22 12:17:10] [Rank 0] step:7441/10000 train_time:715220ms step_avg:96.12ms +[2025-08-22 12:17:12] [Rank 0] step:7461/10000 train_time:717231ms step_avg:96.13ms +[2025-08-22 12:17:12] [Rank 0] step:7461/10000 train_time:717231ms step_avg:96.13ms +[2025-08-22 12:17:14] [Rank 0] step:7481/10000 train_time:719252ms step_avg:96.14ms +[2025-08-22 12:17:14] [Rank 0] step:7481/10000 train_time:719252ms step_avg:96.14ms +[2025-08-22 12:17:16] [Rank 0] step:7501/10000 train_time:721270ms step_avg:96.16ms +[2025-08-22 12:17:16] [Rank 0] step:7501/10000 train_time:721270ms step_avg:96.16ms +[2025-08-22 12:17:18] [Rank 0] step:7521/10000 train_time:723287ms step_avg:96.17ms +[2025-08-22 12:17:18] [Rank 0] step:7521/10000 train_time:723287ms step_avg:96.17ms +[2025-08-22 12:17:20] [Rank 0] step:7541/10000 train_time:725313ms step_avg:96.18ms +[2025-08-22 12:17:20] [Rank 0] step:7541/10000 train_time:725313ms step_avg:96.18ms +[2025-08-22 12:17:22] [Rank 0] step:7561/10000 train_time:727322ms step_avg:96.19ms +[2025-08-22 12:17:22] [Rank 0] step:7561/10000 train_time:727322ms step_avg:96.19ms +[2025-08-22 12:17:24] [Rank 0] step:7581/10000 train_time:729349ms step_avg:96.21ms +[2025-08-22 12:17:24] [Rank 0] step:7581/10000 train_time:729349ms step_avg:96.21ms +[2025-08-22 12:17:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:17:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:17:40] [Rank 0] PRINT: step:7600/10000 val_loss:3.5516 svd_entropy: attn_qk:H=0.7688,top10E=0.25,eRank=194.7,q75/q25=51.15 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.7,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.2,q75/q25=2.79 vo_prod:H=0.6924,top10E=0.11,eRank=228.7,q75/q25=inf train_time:731375ms step_avg:96.23ms +[2025-08-22 12:17:40] [Rank 0] PRINT: step:7600/10000 val_loss:3.5516 svd_entropy: attn_qk:H=0.7688,top10E=0.25,eRank=194.7,q75/q25=51.15 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.7,q75/q25=2.69 mlp_w2:H=0.9687,top10E=0.05,eRank=624.2,q75/q25=2.79 vo_prod:H=0.6924,top10E=0.11,eRank=228.7,q75/q25=inf train_time:731375ms step_avg:96.23ms +[2025-08-22 12:17:41] [Rank 0] step:7601/10000 train_time:731399ms step_avg:96.22ms +[2025-08-22 12:17:41] [Rank 0] step:7601/10000 train_time:731399ms step_avg:96.22ms +[2025-08-22 12:17:43] [Rank 0] step:7621/10000 train_time:733521ms step_avg:96.25ms +[2025-08-22 12:17:43] [Rank 0] step:7621/10000 train_time:733521ms step_avg:96.25ms +[2025-08-22 12:17:45] [Rank 0] step:7641/10000 train_time:735584ms step_avg:96.27ms +[2025-08-22 12:17:45] [Rank 0] step:7641/10000 train_time:735584ms step_avg:96.27ms +[2025-08-22 12:17:47] [Rank 0] step:7661/10000 train_time:737592ms step_avg:96.28ms +[2025-08-22 12:17:47] [Rank 0] step:7661/10000 train_time:737592ms step_avg:96.28ms +[2025-08-22 12:17:49] [Rank 0] step:7681/10000 train_time:739599ms step_avg:96.29ms +[2025-08-22 12:17:49] [Rank 0] step:7681/10000 train_time:739599ms step_avg:96.29ms +[2025-08-22 12:17:51] [Rank 0] step:7701/10000 train_time:741607ms step_avg:96.30ms +[2025-08-22 12:17:51] [Rank 0] step:7701/10000 train_time:741607ms step_avg:96.30ms +[2025-08-22 12:17:53] [Rank 0] step:7721/10000 train_time:743629ms step_avg:96.31ms +[2025-08-22 12:17:53] [Rank 0] step:7721/10000 train_time:743629ms step_avg:96.31ms +[2025-08-22 12:17:55] [Rank 0] step:7741/10000 train_time:745639ms step_avg:96.32ms +[2025-08-22 12:17:55] [Rank 0] step:7741/10000 train_time:745639ms step_avg:96.32ms +[2025-08-22 12:17:57] [Rank 0] step:7761/10000 train_time:747659ms step_avg:96.34ms +[2025-08-22 12:17:57] [Rank 0] step:7761/10000 train_time:747659ms step_avg:96.34ms +[2025-08-22 12:17:59] [Rank 0] step:7781/10000 train_time:749675ms step_avg:96.35ms +[2025-08-22 12:17:59] [Rank 0] step:7781/10000 train_time:749675ms step_avg:96.35ms +[2025-08-22 12:18:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:18:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:18:15] [Rank 0] PRINT: step:7800/10000 val_loss:3.5388 svd_entropy: attn_qk:H=0.7692,top10E=0.25,eRank=195.0,q75/q25=51.21 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.8,q75/q25=2.69 mlp_w2:H=0.9688,top10E=0.05,eRank=624.3,q75/q25=2.79 vo_prod:H=0.6925,top10E=0.11,eRank=228.9,q75/q25=inf train_time:751701ms step_avg:96.37ms +[2025-08-22 12:18:15] [Rank 0] PRINT: step:7800/10000 val_loss:3.5388 svd_entropy: attn_qk:H=0.7692,top10E=0.25,eRank=195.0,q75/q25=51.21 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.8,q75/q25=2.69 mlp_w2:H=0.9688,top10E=0.05,eRank=624.3,q75/q25=2.79 vo_prod:H=0.6925,top10E=0.11,eRank=228.9,q75/q25=inf train_time:751701ms step_avg:96.37ms +[2025-08-22 12:18:15] [Rank 0] step:7801/10000 train_time:751724ms step_avg:96.36ms +[2025-08-22 12:18:15] [Rank 0] step:7801/10000 train_time:751724ms step_avg:96.36ms +[2025-08-22 12:18:17] [Rank 0] step:7821/10000 train_time:753714ms step_avg:96.37ms +[2025-08-22 12:18:17] [Rank 0] step:7821/10000 train_time:753714ms step_avg:96.37ms +[2025-08-22 12:18:19] [Rank 0] step:7841/10000 train_time:755715ms step_avg:96.38ms +[2025-08-22 12:18:19] [Rank 0] step:7841/10000 train_time:755715ms step_avg:96.38ms +[2025-08-22 12:18:21] [Rank 0] step:7861/10000 train_time:757727ms step_avg:96.39ms +[2025-08-22 12:18:21] [Rank 0] step:7861/10000 train_time:757727ms step_avg:96.39ms +[2025-08-22 12:18:23] [Rank 0] step:7881/10000 train_time:759741ms step_avg:96.40ms +[2025-08-22 12:18:23] [Rank 0] step:7881/10000 train_time:759741ms step_avg:96.40ms +[2025-08-22 12:18:25] [Rank 0] step:7901/10000 train_time:761747ms step_avg:96.41ms +[2025-08-22 12:18:25] [Rank 0] step:7901/10000 train_time:761747ms step_avg:96.41ms +[2025-08-22 12:18:27] [Rank 0] step:7921/10000 train_time:763764ms step_avg:96.42ms +[2025-08-22 12:18:27] [Rank 0] step:7921/10000 train_time:763764ms step_avg:96.42ms +[2025-08-22 12:18:29] [Rank 0] step:7941/10000 train_time:765782ms step_avg:96.43ms +[2025-08-22 12:18:29] [Rank 0] step:7941/10000 train_time:765782ms step_avg:96.43ms +[2025-08-22 12:18:31] [Rank 0] step:7961/10000 train_time:767797ms step_avg:96.44ms +[2025-08-22 12:18:31] [Rank 0] step:7961/10000 train_time:767797ms step_avg:96.44ms +[2025-08-22 12:18:33] [Rank 0] step:7981/10000 train_time:769802ms step_avg:96.45ms +[2025-08-22 12:18:33] [Rank 0] step:7981/10000 train_time:769802ms step_avg:96.45ms +[2025-08-22 12:18:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:18:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:18:49] [Rank 0] PRINT: step:8000/10000 val_loss:3.5230 svd_entropy: attn_qk:H=0.7695,top10E=0.25,eRank=195.3,q75/q25=50.98 attn_vo:H=0.8348,top10E=0.06,eRank=406.3,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.8,q75/q25=2.69 mlp_w2:H=0.9688,top10E=0.05,eRank=624.5,q75/q25=2.79 vo_prod:H=0.6926,top10E=0.11,eRank=229.1,q75/q25=inf train_time:771821ms step_avg:96.48ms +[2025-08-22 12:18:49] [Rank 0] PRINT: step:8000/10000 val_loss:3.5230 svd_entropy: attn_qk:H=0.7695,top10E=0.25,eRank=195.3,q75/q25=50.98 attn_vo:H=0.8348,top10E=0.06,eRank=406.3,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.8,q75/q25=2.69 mlp_w2:H=0.9688,top10E=0.05,eRank=624.5,q75/q25=2.79 vo_prod:H=0.6926,top10E=0.11,eRank=229.1,q75/q25=inf train_time:771821ms step_avg:96.48ms +[2025-08-22 12:18:49] [Rank 0] step:8001/10000 train_time:771844ms step_avg:96.47ms +[2025-08-22 12:18:49] [Rank 0] step:8001/10000 train_time:771844ms step_avg:96.47ms +[2025-08-22 12:18:51] [Rank 0] step:8021/10000 train_time:773854ms step_avg:96.48ms +[2025-08-22 12:18:51] [Rank 0] step:8021/10000 train_time:773854ms step_avg:96.48ms +[2025-08-22 12:18:53] [Rank 0] step:8041/10000 train_time:775872ms step_avg:96.49ms +[2025-08-22 12:18:53] [Rank 0] step:8041/10000 train_time:775872ms step_avg:96.49ms +[2025-08-22 12:18:55] [Rank 0] step:8061/10000 train_time:777886ms step_avg:96.50ms +[2025-08-22 12:18:55] [Rank 0] step:8061/10000 train_time:777886ms step_avg:96.50ms +[2025-08-22 12:18:57] [Rank 0] step:8081/10000 train_time:779886ms step_avg:96.51ms +[2025-08-22 12:18:57] [Rank 0] step:8081/10000 train_time:779886ms step_avg:96.51ms +[2025-08-22 12:18:59] [Rank 0] step:8101/10000 train_time:781902ms step_avg:96.52ms +[2025-08-22 12:18:59] [Rank 0] step:8101/10000 train_time:781902ms step_avg:96.52ms +[2025-08-22 12:19:01] [Rank 0] step:8121/10000 train_time:783916ms step_avg:96.53ms +[2025-08-22 12:19:01] [Rank 0] step:8121/10000 train_time:783916ms step_avg:96.53ms +[2025-08-22 12:19:04] [Rank 0] step:8141/10000 train_time:786578ms step_avg:96.62ms +[2025-08-22 12:19:04] [Rank 0] step:8141/10000 train_time:786578ms step_avg:96.62ms +[2025-08-22 12:19:06] [Rank 0] step:8161/10000 train_time:788608ms step_avg:96.63ms +[2025-08-22 12:19:06] [Rank 0] step:8161/10000 train_time:788608ms step_avg:96.63ms +[2025-08-22 12:19:08] [Rank 0] step:8181/10000 train_time:790653ms step_avg:96.65ms +[2025-08-22 12:19:08] [Rank 0] step:8181/10000 train_time:790653ms step_avg:96.65ms +[2025-08-22 12:19:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:19:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:19:24] [Rank 0] PRINT: step:8200/10000 val_loss:3.5119 svd_entropy: attn_qk:H=0.7698,top10E=0.25,eRank=195.5,q75/q25=51.19 attn_vo:H=0.8349,top10E=0.06,eRank=406.4,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.9,q75/q25=2.69 mlp_w2:H=0.9688,top10E=0.05,eRank=624.6,q75/q25=2.79 vo_prod:H=0.6927,top10E=0.11,eRank=229.4,q75/q25=inf train_time:792719ms step_avg:96.67ms +[2025-08-22 12:19:24] [Rank 0] PRINT: step:8200/10000 val_loss:3.5119 svd_entropy: attn_qk:H=0.7698,top10E=0.25,eRank=195.5,q75/q25=51.19 attn_vo:H=0.8349,top10E=0.06,eRank=406.4,q75/q25=inf mlp_w1:H=0.9734,top10E=0.04,eRank=643.9,q75/q25=2.69 mlp_w2:H=0.9688,top10E=0.05,eRank=624.6,q75/q25=2.79 vo_prod:H=0.6927,top10E=0.11,eRank=229.4,q75/q25=inf train_time:792719ms step_avg:96.67ms +[2025-08-22 12:19:24] [Rank 0] step:8201/10000 train_time:792743ms step_avg:96.66ms +[2025-08-22 12:19:24] [Rank 0] step:8201/10000 train_time:792743ms step_avg:96.66ms +[2025-08-22 12:19:26] [Rank 0] step:8221/10000 train_time:794782ms step_avg:96.68ms +[2025-08-22 12:19:26] [Rank 0] step:8221/10000 train_time:794782ms step_avg:96.68ms +[2025-08-22 12:19:28] [Rank 0] step:8241/10000 train_time:796828ms step_avg:96.69ms +[2025-08-22 12:19:28] [Rank 0] step:8241/10000 train_time:796828ms step_avg:96.69ms +[2025-08-22 12:19:30] [Rank 0] step:8261/10000 train_time:798879ms step_avg:96.70ms +[2025-08-22 12:19:30] [Rank 0] step:8261/10000 train_time:798879ms step_avg:96.70ms +[2025-08-22 12:19:32] [Rank 0] step:8281/10000 train_time:800918ms step_avg:96.72ms +[2025-08-22 12:19:32] [Rank 0] step:8281/10000 train_time:800918ms step_avg:96.72ms +[2025-08-22 12:19:34] [Rank 0] step:8301/10000 train_time:802963ms step_avg:96.73ms +[2025-08-22 12:19:34] [Rank 0] step:8301/10000 train_time:802963ms step_avg:96.73ms +[2025-08-22 12:19:36] [Rank 0] step:8321/10000 train_time:805000ms step_avg:96.74ms +[2025-08-22 12:19:36] [Rank 0] step:8321/10000 train_time:805000ms step_avg:96.74ms +[2025-08-22 12:19:38] [Rank 0] step:8341/10000 train_time:807055ms step_avg:96.76ms +[2025-08-22 12:19:38] [Rank 0] step:8341/10000 train_time:807055ms step_avg:96.76ms +[2025-08-22 12:19:40] [Rank 0] step:8361/10000 train_time:809097ms step_avg:96.77ms +[2025-08-22 12:19:40] [Rank 0] step:8361/10000 train_time:809097ms step_avg:96.77ms +[2025-08-22 12:19:43] [Rank 0] step:8381/10000 train_time:811139ms step_avg:96.78ms +[2025-08-22 12:19:43] [Rank 0] step:8381/10000 train_time:811139ms step_avg:96.78ms +[2025-08-22 12:19:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:19:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:19:58] [Rank 0] PRINT: step:8400/10000 val_loss:3.4987 svd_entropy: attn_qk:H=0.7701,top10E=0.25,eRank=195.8,q75/q25=51.62 attn_vo:H=0.8349,top10E=0.06,eRank=406.5,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=643.9,q75/q25=2.68 mlp_w2:H=0.9688,top10E=0.05,eRank=624.6,q75/q25=2.79 vo_prod:H=0.6929,top10E=0.11,eRank=229.6,q75/q25=inf train_time:813185ms step_avg:96.81ms +[2025-08-22 12:19:58] [Rank 0] PRINT: step:8400/10000 val_loss:3.4987 svd_entropy: attn_qk:H=0.7701,top10E=0.25,eRank=195.8,q75/q25=51.62 attn_vo:H=0.8349,top10E=0.06,eRank=406.5,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=643.9,q75/q25=2.68 mlp_w2:H=0.9688,top10E=0.05,eRank=624.6,q75/q25=2.79 vo_prod:H=0.6929,top10E=0.11,eRank=229.6,q75/q25=inf train_time:813185ms step_avg:96.81ms +[2025-08-22 12:19:59] [Rank 0] step:8401/10000 train_time:813209ms step_avg:96.80ms +[2025-08-22 12:19:59] [Rank 0] step:8401/10000 train_time:813209ms step_avg:96.80ms +[2025-08-22 12:20:01] [Rank 0] step:8421/10000 train_time:815242ms step_avg:96.81ms +[2025-08-22 12:20:01] [Rank 0] step:8421/10000 train_time:815242ms step_avg:96.81ms +[2025-08-22 12:20:03] [Rank 0] step:8441/10000 train_time:817279ms step_avg:96.82ms +[2025-08-22 12:20:03] [Rank 0] step:8441/10000 train_time:817279ms step_avg:96.82ms +[2025-08-22 12:20:05] [Rank 0] step:8461/10000 train_time:819310ms step_avg:96.83ms +[2025-08-22 12:20:05] [Rank 0] step:8461/10000 train_time:819310ms step_avg:96.83ms +[2025-08-22 12:20:07] [Rank 0] step:8481/10000 train_time:821355ms step_avg:96.85ms +[2025-08-22 12:20:07] [Rank 0] step:8481/10000 train_time:821355ms step_avg:96.85ms +[2025-08-22 12:20:09] [Rank 0] step:8501/10000 train_time:823417ms step_avg:96.86ms +[2025-08-22 12:20:09] [Rank 0] step:8501/10000 train_time:823417ms step_avg:96.86ms +[2025-08-22 12:20:11] [Rank 0] step:8521/10000 train_time:825459ms step_avg:96.87ms +[2025-08-22 12:20:11] [Rank 0] step:8521/10000 train_time:825459ms step_avg:96.87ms +[2025-08-22 12:20:13] [Rank 0] step:8541/10000 train_time:827513ms step_avg:96.89ms +[2025-08-22 12:20:13] [Rank 0] step:8541/10000 train_time:827513ms step_avg:96.89ms +[2025-08-22 12:20:15] [Rank 0] step:8561/10000 train_time:829564ms step_avg:96.90ms +[2025-08-22 12:20:15] [Rank 0] step:8561/10000 train_time:829564ms step_avg:96.90ms +[2025-08-22 12:20:17] [Rank 0] step:8581/10000 train_time:831608ms step_avg:96.91ms +[2025-08-22 12:20:17] [Rank 0] step:8581/10000 train_time:831608ms step_avg:96.91ms +[2025-08-22 12:20:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:20:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:20:33] [Rank 0] PRINT: step:8600/10000 val_loss:3.4904 svd_entropy: attn_qk:H=0.7704,top10E=0.25,eRank=196.0,q75/q25=51.20 attn_vo:H=0.8349,top10E=0.06,eRank=406.6,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=643.9,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=624.7,q75/q25=2.79 vo_prod:H=0.6929,top10E=0.11,eRank=229.7,q75/q25=inf train_time:833643ms step_avg:96.94ms +[2025-08-22 12:20:33] [Rank 0] PRINT: step:8600/10000 val_loss:3.4904 svd_entropy: attn_qk:H=0.7704,top10E=0.25,eRank=196.0,q75/q25=51.20 attn_vo:H=0.8349,top10E=0.06,eRank=406.6,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=643.9,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=624.7,q75/q25=2.79 vo_prod:H=0.6929,top10E=0.11,eRank=229.7,q75/q25=inf train_time:833643ms step_avg:96.94ms +[2025-08-22 12:20:33] [Rank 0] step:8601/10000 train_time:833666ms step_avg:96.93ms +[2025-08-22 12:20:33] [Rank 0] step:8601/10000 train_time:833666ms step_avg:96.93ms +[2025-08-22 12:20:35] [Rank 0] step:8621/10000 train_time:835704ms step_avg:96.94ms +[2025-08-22 12:20:35] [Rank 0] step:8621/10000 train_time:835704ms step_avg:96.94ms +[2025-08-22 12:20:37] [Rank 0] step:8641/10000 train_time:837739ms step_avg:96.95ms +[2025-08-22 12:20:37] [Rank 0] step:8641/10000 train_time:837739ms step_avg:96.95ms +[2025-08-22 12:20:39] [Rank 0] step:8661/10000 train_time:839778ms step_avg:96.96ms +[2025-08-22 12:20:39] [Rank 0] step:8661/10000 train_time:839778ms step_avg:96.96ms +[2025-08-22 12:20:41] [Rank 0] step:8681/10000 train_time:841821ms step_avg:96.97ms +[2025-08-22 12:20:41] [Rank 0] step:8681/10000 train_time:841821ms step_avg:96.97ms +[2025-08-22 12:20:43] [Rank 0] step:8701/10000 train_time:843854ms step_avg:96.98ms +[2025-08-22 12:20:43] [Rank 0] step:8701/10000 train_time:843854ms step_avg:96.98ms +[2025-08-22 12:20:45] [Rank 0] step:8721/10000 train_time:845899ms step_avg:97.00ms +[2025-08-22 12:20:45] [Rank 0] step:8721/10000 train_time:845899ms step_avg:97.00ms +[2025-08-22 12:20:47] [Rank 0] step:8741/10000 train_time:847930ms step_avg:97.01ms +[2025-08-22 12:20:47] [Rank 0] step:8741/10000 train_time:847930ms step_avg:97.01ms +[2025-08-22 12:20:49] [Rank 0] step:8761/10000 train_time:849973ms step_avg:97.02ms +[2025-08-22 12:20:49] [Rank 0] step:8761/10000 train_time:849973ms step_avg:97.02ms +[2025-08-22 12:20:51] [Rank 0] step:8781/10000 train_time:852019ms step_avg:97.03ms +[2025-08-22 12:20:51] [Rank 0] step:8781/10000 train_time:852019ms step_avg:97.03ms +[2025-08-22 12:20:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:20:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:21:07] [Rank 0] PRINT: step:8800/10000 val_loss:3.4786 svd_entropy: attn_qk:H=0.7705,top10E=0.25,eRank=196.2,q75/q25=51.30 attn_vo:H=0.8349,top10E=0.06,eRank=406.7,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.0,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.79 vo_prod:H=0.6930,top10E=0.11,eRank=229.9,q75/q25=inf train_time:854141ms step_avg:97.06ms +[2025-08-22 12:21:07] [Rank 0] PRINT: step:8800/10000 val_loss:3.4786 svd_entropy: attn_qk:H=0.7705,top10E=0.25,eRank=196.2,q75/q25=51.30 attn_vo:H=0.8349,top10E=0.06,eRank=406.7,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.0,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.79 vo_prod:H=0.6930,top10E=0.11,eRank=229.9,q75/q25=inf train_time:854141ms step_avg:97.06ms +[2025-08-22 12:21:07] [Rank 0] step:8801/10000 train_time:854163ms step_avg:97.05ms +[2025-08-22 12:21:07] [Rank 0] step:8801/10000 train_time:854163ms step_avg:97.05ms +[2025-08-22 12:21:09] [Rank 0] step:8821/10000 train_time:856188ms step_avg:97.06ms +[2025-08-22 12:21:09] [Rank 0] step:8821/10000 train_time:856188ms step_avg:97.06ms +[2025-08-22 12:21:11] [Rank 0] step:8841/10000 train_time:858246ms step_avg:97.08ms +[2025-08-22 12:21:11] [Rank 0] step:8841/10000 train_time:858246ms step_avg:97.08ms +[2025-08-22 12:21:13] [Rank 0] step:8861/10000 train_time:860277ms step_avg:97.09ms +[2025-08-22 12:21:13] [Rank 0] step:8861/10000 train_time:860277ms step_avg:97.09ms +[2025-08-22 12:21:15] [Rank 0] step:8881/10000 train_time:862318ms step_avg:97.10ms +[2025-08-22 12:21:15] [Rank 0] step:8881/10000 train_time:862318ms step_avg:97.10ms +[2025-08-22 12:21:17] [Rank 0] step:8901/10000 train_time:864361ms step_avg:97.11ms +[2025-08-22 12:21:17] [Rank 0] step:8901/10000 train_time:864361ms step_avg:97.11ms +[2025-08-22 12:21:19] [Rank 0] step:8921/10000 train_time:866415ms step_avg:97.12ms +[2025-08-22 12:21:19] [Rank 0] step:8921/10000 train_time:866415ms step_avg:97.12ms +[2025-08-22 12:21:22] [Rank 0] step:8941/10000 train_time:868462ms step_avg:97.13ms +[2025-08-22 12:21:22] [Rank 0] step:8941/10000 train_time:868462ms step_avg:97.13ms +[2025-08-22 12:21:24] [Rank 0] step:8961/10000 train_time:870505ms step_avg:97.14ms +[2025-08-22 12:21:24] [Rank 0] step:8961/10000 train_time:870505ms step_avg:97.14ms +[2025-08-22 12:21:26] [Rank 0] step:8981/10000 train_time:872548ms step_avg:97.15ms +[2025-08-22 12:21:26] [Rank 0] step:8981/10000 train_time:872548ms step_avg:97.15ms +[2025-08-22 12:21:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:21:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:21:42] [Rank 0] PRINT: step:9000/10000 val_loss:3.4672 svd_entropy: attn_qk:H=0.7707,top10E=0.25,eRank=196.4,q75/q25=51.24 attn_vo:H=0.8350,top10E=0.06,eRank=406.7,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.0,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=624.9,q75/q25=2.78 vo_prod:H=0.6931,top10E=0.11,eRank=230.0,q75/q25=inf train_time:874593ms step_avg:97.18ms +[2025-08-22 12:21:42] [Rank 0] PRINT: step:9000/10000 val_loss:3.4672 svd_entropy: attn_qk:H=0.7707,top10E=0.25,eRank=196.4,q75/q25=51.24 attn_vo:H=0.8350,top10E=0.06,eRank=406.7,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.0,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=624.9,q75/q25=2.78 vo_prod:H=0.6931,top10E=0.11,eRank=230.0,q75/q25=inf train_time:874593ms step_avg:97.18ms +[2025-08-22 12:21:42] [Rank 0] step:9001/10000 train_time:874616ms step_avg:97.17ms +[2025-08-22 12:21:42] [Rank 0] step:9001/10000 train_time:874616ms step_avg:97.17ms +[2025-08-22 12:21:44] [Rank 0] step:9021/10000 train_time:876657ms step_avg:97.18ms +[2025-08-22 12:21:44] [Rank 0] step:9021/10000 train_time:876657ms step_avg:97.18ms +[2025-08-22 12:21:46] [Rank 0] step:9041/10000 train_time:878703ms step_avg:97.19ms +[2025-08-22 12:21:46] [Rank 0] step:9041/10000 train_time:878703ms step_avg:97.19ms +[2025-08-22 12:21:48] [Rank 0] step:9061/10000 train_time:880754ms step_avg:97.20ms +[2025-08-22 12:21:48] [Rank 0] step:9061/10000 train_time:880754ms step_avg:97.20ms +[2025-08-22 12:21:50] [Rank 0] step:9081/10000 train_time:882805ms step_avg:97.21ms +[2025-08-22 12:21:50] [Rank 0] step:9081/10000 train_time:882805ms step_avg:97.21ms +[2025-08-22 12:21:52] [Rank 0] step:9101/10000 train_time:884867ms step_avg:97.23ms +[2025-08-22 12:21:52] [Rank 0] step:9101/10000 train_time:884867ms step_avg:97.23ms +[2025-08-22 12:21:54] [Rank 0] step:9121/10000 train_time:886917ms step_avg:97.24ms +[2025-08-22 12:21:54] [Rank 0] step:9121/10000 train_time:886917ms step_avg:97.24ms +[2025-08-22 12:21:56] [Rank 0] step:9141/10000 train_time:888954ms step_avg:97.25ms +[2025-08-22 12:21:56] [Rank 0] step:9141/10000 train_time:888954ms step_avg:97.25ms +[2025-08-22 12:21:58] [Rank 0] step:9161/10000 train_time:891049ms step_avg:97.27ms +[2025-08-22 12:21:58] [Rank 0] step:9161/10000 train_time:891049ms step_avg:97.27ms +[2025-08-22 12:22:00] [Rank 0] step:9181/10000 train_time:893189ms step_avg:97.29ms +[2025-08-22 12:22:00] [Rank 0] step:9181/10000 train_time:893189ms step_avg:97.29ms +[2025-08-22 12:22:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:22:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:22:16] [Rank 0] PRINT: step:9200/10000 val_loss:3.4584 svd_entropy: attn_qk:H=0.7709,top10E=0.25,eRank=196.5,q75/q25=51.26 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.0,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=625.0,q75/q25=2.78 vo_prod:H=0.6931,top10E=0.11,eRank=230.1,q75/q25=inf train_time:895233ms step_avg:97.31ms +[2025-08-22 12:22:16] [Rank 0] PRINT: step:9200/10000 val_loss:3.4584 svd_entropy: attn_qk:H=0.7709,top10E=0.25,eRank=196.5,q75/q25=51.26 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.0,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=625.0,q75/q25=2.78 vo_prod:H=0.6931,top10E=0.11,eRank=230.1,q75/q25=inf train_time:895233ms step_avg:97.31ms +[2025-08-22 12:22:16] [Rank 0] step:9201/10000 train_time:895257ms step_avg:97.30ms +[2025-08-22 12:22:16] [Rank 0] step:9201/10000 train_time:895257ms step_avg:97.30ms +[2025-08-22 12:22:18] [Rank 0] step:9221/10000 train_time:897317ms step_avg:97.31ms +[2025-08-22 12:22:18] [Rank 0] step:9221/10000 train_time:897317ms step_avg:97.31ms +[2025-08-22 12:22:20] [Rank 0] step:9241/10000 train_time:899367ms step_avg:97.32ms +[2025-08-22 12:22:20] [Rank 0] step:9241/10000 train_time:899367ms step_avg:97.32ms +[2025-08-22 12:22:23] [Rank 0] step:9261/10000 train_time:901417ms step_avg:97.33ms +[2025-08-22 12:22:23] [Rank 0] step:9261/10000 train_time:901417ms step_avg:97.33ms +[2025-08-22 12:22:25] [Rank 0] step:9281/10000 train_time:903448ms step_avg:97.34ms +[2025-08-22 12:22:25] [Rank 0] step:9281/10000 train_time:903448ms step_avg:97.34ms +[2025-08-22 12:22:27] [Rank 0] step:9301/10000 train_time:905485ms step_avg:97.35ms +[2025-08-22 12:22:27] [Rank 0] step:9301/10000 train_time:905485ms step_avg:97.35ms +[2025-08-22 12:22:29] [Rank 0] step:9321/10000 train_time:907531ms step_avg:97.36ms +[2025-08-22 12:22:29] [Rank 0] step:9321/10000 train_time:907531ms step_avg:97.36ms +[2025-08-22 12:22:31] [Rank 0] step:9341/10000 train_time:909575ms step_avg:97.37ms +[2025-08-22 12:22:31] [Rank 0] step:9341/10000 train_time:909575ms step_avg:97.37ms +[2025-08-22 12:22:33] [Rank 0] step:9361/10000 train_time:911623ms step_avg:97.39ms +[2025-08-22 12:22:33] [Rank 0] step:9361/10000 train_time:911623ms step_avg:97.39ms +[2025-08-22 12:22:35] [Rank 0] step:9381/10000 train_time:913679ms step_avg:97.40ms +[2025-08-22 12:22:35] [Rank 0] step:9381/10000 train_time:913679ms step_avg:97.40ms +[2025-08-22 12:22:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:22:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:22:51] [Rank 0] PRINT: step:9400/10000 val_loss:3.4496 svd_entropy: attn_qk:H=0.7710,top10E=0.25,eRank=196.6,q75/q25=51.09 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=625.1,q75/q25=2.78 vo_prod:H=0.6932,top10E=0.11,eRank=230.2,q75/q25=inf train_time:915732ms step_avg:97.42ms +[2025-08-22 12:22:51] [Rank 0] PRINT: step:9400/10000 val_loss:3.4496 svd_entropy: attn_qk:H=0.7710,top10E=0.25,eRank=196.6,q75/q25=51.09 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9689,top10E=0.05,eRank=625.1,q75/q25=2.78 vo_prod:H=0.6932,top10E=0.11,eRank=230.2,q75/q25=inf train_time:915732ms step_avg:97.42ms +[2025-08-22 12:22:51] [Rank 0] step:9401/10000 train_time:915756ms step_avg:97.41ms +[2025-08-22 12:22:51] [Rank 0] step:9401/10000 train_time:915756ms step_avg:97.41ms +[2025-08-22 12:22:53] [Rank 0] step:9421/10000 train_time:917795ms step_avg:97.42ms +[2025-08-22 12:22:53] [Rank 0] step:9421/10000 train_time:917795ms step_avg:97.42ms +[2025-08-22 12:22:55] [Rank 0] step:9441/10000 train_time:919834ms step_avg:97.43ms +[2025-08-22 12:22:55] [Rank 0] step:9441/10000 train_time:919834ms step_avg:97.43ms +[2025-08-22 12:22:57] [Rank 0] step:9461/10000 train_time:921880ms step_avg:97.44ms +[2025-08-22 12:22:57] [Rank 0] step:9461/10000 train_time:921880ms step_avg:97.44ms +[2025-08-22 12:22:59] [Rank 0] step:9481/10000 train_time:923931ms step_avg:97.45ms +[2025-08-22 12:22:59] [Rank 0] step:9481/10000 train_time:923931ms step_avg:97.45ms +[2025-08-22 12:23:01] [Rank 0] step:9501/10000 train_time:926029ms step_avg:97.47ms +[2025-08-22 12:23:01] [Rank 0] step:9501/10000 train_time:926029ms step_avg:97.47ms +[2025-08-22 12:23:03] [Rank 0] step:9521/10000 train_time:928136ms step_avg:97.48ms +[2025-08-22 12:23:03] [Rank 0] step:9521/10000 train_time:928136ms step_avg:97.48ms +[2025-08-22 12:23:05] [Rank 0] step:9541/10000 train_time:930180ms step_avg:97.49ms +[2025-08-22 12:23:05] [Rank 0] step:9541/10000 train_time:930180ms step_avg:97.49ms +[2025-08-22 12:23:07] [Rank 0] step:9561/10000 train_time:932216ms step_avg:97.50ms +[2025-08-22 12:23:07] [Rank 0] step:9561/10000 train_time:932216ms step_avg:97.50ms +[2025-08-22 12:23:09] [Rank 0] step:9581/10000 train_time:934258ms step_avg:97.51ms +[2025-08-22 12:23:09] [Rank 0] step:9581/10000 train_time:934258ms step_avg:97.51ms +[2025-08-22 12:23:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:23:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:23:25] [Rank 0] PRINT: step:9600/10000 val_loss:3.4409 svd_entropy: attn_qk:H=0.7711,top10E=0.25,eRank=196.7,q75/q25=51.10 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9690,top10E=0.05,eRank=625.2,q75/q25=2.78 vo_prod:H=0.6933,top10E=0.11,eRank=230.4,q75/q25=inf train_time:936318ms step_avg:97.53ms +[2025-08-22 12:23:25] [Rank 0] PRINT: step:9600/10000 val_loss:3.4409 svd_entropy: attn_qk:H=0.7711,top10E=0.25,eRank=196.7,q75/q25=51.10 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9690,top10E=0.05,eRank=625.2,q75/q25=2.78 vo_prod:H=0.6933,top10E=0.11,eRank=230.4,q75/q25=inf train_time:936318ms step_avg:97.53ms +[2025-08-22 12:23:26] [Rank 0] step:9601/10000 train_time:936340ms step_avg:97.53ms +[2025-08-22 12:23:26] [Rank 0] step:9601/10000 train_time:936340ms step_avg:97.53ms +[2025-08-22 12:23:28] [Rank 0] step:9621/10000 train_time:938396ms step_avg:97.54ms +[2025-08-22 12:23:28] [Rank 0] step:9621/10000 train_time:938396ms step_avg:97.54ms +[2025-08-22 12:23:30] [Rank 0] step:9641/10000 train_time:940441ms step_avg:97.55ms +[2025-08-22 12:23:30] [Rank 0] step:9641/10000 train_time:940441ms step_avg:97.55ms +[2025-08-22 12:23:32] [Rank 0] step:9661/10000 train_time:942513ms step_avg:97.56ms +[2025-08-22 12:23:32] [Rank 0] step:9661/10000 train_time:942513ms step_avg:97.56ms +[2025-08-22 12:23:34] [Rank 0] step:9681/10000 train_time:944579ms step_avg:97.57ms +[2025-08-22 12:23:34] [Rank 0] step:9681/10000 train_time:944579ms step_avg:97.57ms +[2025-08-22 12:23:36] [Rank 0] step:9701/10000 train_time:946662ms step_avg:97.58ms +[2025-08-22 12:23:36] [Rank 0] step:9701/10000 train_time:946662ms step_avg:97.58ms +[2025-08-22 12:23:38] [Rank 0] step:9721/10000 train_time:948729ms step_avg:97.60ms +[2025-08-22 12:23:38] [Rank 0] step:9721/10000 train_time:948729ms step_avg:97.60ms +[2025-08-22 12:23:40] [Rank 0] step:9741/10000 train_time:950816ms step_avg:97.61ms +[2025-08-22 12:23:40] [Rank 0] step:9741/10000 train_time:950816ms step_avg:97.61ms +[2025-08-22 12:23:42] [Rank 0] step:9761/10000 train_time:952890ms step_avg:97.62ms +[2025-08-22 12:23:42] [Rank 0] step:9761/10000 train_time:952890ms step_avg:97.62ms +[2025-08-22 12:23:44] [Rank 0] step:9781/10000 train_time:954973ms step_avg:97.64ms +[2025-08-22 12:23:44] [Rank 0] step:9781/10000 train_time:954973ms step_avg:97.64ms +[2025-08-22 12:23:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:23:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:24:00] [Rank 0] PRINT: step:9800/10000 val_loss:3.4330 svd_entropy: attn_qk:H=0.7712,top10E=0.25,eRank=196.7,q75/q25=51.10 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9690,top10E=0.05,eRank=625.2,q75/q25=2.78 vo_prod:H=0.6933,top10E=0.11,eRank=230.5,q75/q25=inf train_time:957068ms step_avg:97.66ms +[2025-08-22 12:24:00] [Rank 0] PRINT: step:9800/10000 val_loss:3.4330 svd_entropy: attn_qk:H=0.7712,top10E=0.25,eRank=196.7,q75/q25=51.10 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9690,top10E=0.05,eRank=625.2,q75/q25=2.78 vo_prod:H=0.6933,top10E=0.11,eRank=230.5,q75/q25=inf train_time:957068ms step_avg:97.66ms +[2025-08-22 12:24:00] [Rank 0] step:9801/10000 train_time:957092ms step_avg:97.65ms +[2025-08-22 12:24:00] [Rank 0] step:9801/10000 train_time:957092ms step_avg:97.65ms +[2025-08-22 12:24:02] [Rank 0] step:9821/10000 train_time:959160ms step_avg:97.66ms +[2025-08-22 12:24:02] [Rank 0] step:9821/10000 train_time:959160ms step_avg:97.66ms +[2025-08-22 12:24:04] [Rank 0] step:9841/10000 train_time:961316ms step_avg:97.68ms +[2025-08-22 12:24:04] [Rank 0] step:9841/10000 train_time:961316ms step_avg:97.68ms +[2025-08-22 12:24:06] [Rank 0] step:9861/10000 train_time:963425ms step_avg:97.70ms +[2025-08-22 12:24:06] [Rank 0] step:9861/10000 train_time:963425ms step_avg:97.70ms +[2025-08-22 12:24:09] [Rank 0] step:9881/10000 train_time:965486ms step_avg:97.71ms +[2025-08-22 12:24:09] [Rank 0] step:9881/10000 train_time:965486ms step_avg:97.71ms +[2025-08-22 12:24:11] [Rank 0] step:9901/10000 train_time:967569ms step_avg:97.72ms +[2025-08-22 12:24:11] [Rank 0] step:9901/10000 train_time:967569ms step_avg:97.72ms +[2025-08-22 12:24:13] [Rank 0] step:9921/10000 train_time:969635ms step_avg:97.74ms +[2025-08-22 12:24:13] [Rank 0] step:9921/10000 train_time:969635ms step_avg:97.74ms +[2025-08-22 12:24:15] [Rank 0] step:9941/10000 train_time:971718ms step_avg:97.75ms +[2025-08-22 12:24:15] [Rank 0] step:9941/10000 train_time:971718ms step_avg:97.75ms +[2025-08-22 12:24:17] [Rank 0] step:9961/10000 train_time:973784ms step_avg:97.76ms +[2025-08-22 12:24:17] [Rank 0] step:9961/10000 train_time:973784ms step_avg:97.76ms +[2025-08-22 12:24:19] [Rank 0] step:9981/10000 train_time:975860ms step_avg:97.77ms +[2025-08-22 12:24:19] [Rank 0] step:9981/10000 train_time:975860ms step_avg:97.77ms +[2025-08-22 12:24:21] [Rank 0] step:10000/10000 train_time:977838ms step_avg:97.78ms +[2025-08-22 12:24:21] [Rank 0] step:10000/10000 train_time:977838ms step_avg:97.78ms +[2025-08-22 12:24:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:24:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:24:35] [Rank 0] PRINT: step:10000/10000 val_loss:3.4260 svd_entropy: attn_qk:H=0.7712,top10E=0.25,eRank=196.8,q75/q25=51.13 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9690,top10E=0.05,eRank=625.2,q75/q25=2.78 vo_prod:H=0.6934,top10E=0.11,eRank=230.5,q75/q25=inf train_time:977950ms step_avg:97.79ms +[2025-08-22 12:24:35] [Rank 0] PRINT: step:10000/10000 val_loss:3.4260 svd_entropy: attn_qk:H=0.7712,top10E=0.25,eRank=196.8,q75/q25=51.13 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9735,top10E=0.04,eRank=644.1,q75/q25=2.68 mlp_w2:H=0.9690,top10E=0.05,eRank=625.2,q75/q25=2.78 vo_prod:H=0.6934,top10E=0.11,eRank=230.5,q75/q25=inf train_time:977950ms step_avg:97.79ms +[2025-08-22 12:24:35] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 12:24:35 2025 --- +[2025-08-22 12:24:35] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 12:24:35 2025 --- +[2025-08-22 12:24:35] [Rank 0] PRINT: Peak memory allocated: 11123 MiB reserved: 16916 MiB +[2025-08-22 12:24:35] [Rank 0] PRINT: Peak memory allocated: 11123 MiB reserved: 16916 MiB diff --git a/logs_svd_gated/mode_7_param_gated_seed_42/config.json b/logs_svd_gated/mode_7_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..9db6ede8204459e8e102edf51830c9b03fc5874c --- /dev/null +++ b/logs_svd_gated/mode_7_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 7, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "fd7ea98a-a2b1-42e8-ba31-6452c9eb90d6", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_7_param_gated_seed_42/training_log_fd7ea98a-a2b1-42e8-ba31-6452c9eb90d6.txt b/logs_svd_gated/mode_7_param_gated_seed_42/training_log_fd7ea98a-a2b1-42e8-ba31-6452c9eb90d6.txt new file mode 100644 index 0000000000000000000000000000000000000000..a24b03422d7265bacffdd79e766459cf88c80e9b --- /dev/null +++ b/logs_svd_gated/mode_7_param_gated_seed_42/training_log_fd7ea98a-a2b1-42e8-ba31-6452c9eb90d6.txt @@ -0,0 +1,2926 @@ +[2025-08-22 17:09:41] [Rank 0] PRINT: --- Script Start: Fri Aug 22 17:09:41 2025 --- +[2025-08-22 17:09:41] [Rank 0] PRINT: --- Script Start: Fri Aug 22 17:09:41 2025 --- +[2025-08-22 17:09:41] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=7, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 17:09:41] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=7, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 17:09:41] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 17:09:41] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 17:09:41] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 17:09:41] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 17:09:41] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_7_param_gated_seed_42 +[2025-08-22 17:09:41] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_7_param_gated_seed_42 +[2025-08-22 17:09:41] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 17:09:41] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 17:09:41] [Rank 0] PRINT: Constructing model... +[2025-08-22 17:09:41] [Rank 0] PRINT: Constructing model... +[2025-08-22 17:09:43] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 17:09:43] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 17:09:43] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 17:09:43] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 17:09:43] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 17:09:43] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 17:09:43] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-08-22 17:09:43] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-08-22 17:09:43] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.05). +[2025-08-22 17:09:43] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.05). +[2025-08-22 17:09:43] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 17:09:43] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 17:09:43] [Rank 0] PRINT: Muon optimizer is active with 58 parameters. +[2025-08-22 17:09:43] [Rank 0] PRINT: Muon optimizer is active with 58 parameters. +[2025-08-22 17:09:44] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 17:09:44] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 17:09:44] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 17:09:44] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 17:09:44] [Rank 0] PRINT: Starting warmup... +[2025-08-22 17:09:44] [Rank 0] PRINT: Starting warmup... +[2025-08-22 17:10:28] [Rank 0] PRINT: Warmup complete. +[2025-08-22 17:10:28] [Rank 0] PRINT: Warmup complete. +[2025-08-22 17:10:28] [Rank 0] PRINT: Starting training... +[2025-08-22 17:10:28] [Rank 0] PRINT: Starting training... +[2025-08-22 17:10:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:10:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:10:46] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 17:10:46] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 17:10:48] [Rank 0] step:21/10000 train_time:2030ms step_avg:96.67ms +[2025-08-22 17:10:48] [Rank 0] step:21/10000 train_time:2030ms step_avg:96.67ms +[2025-08-22 17:10:50] [Rank 0] step:41/10000 train_time:3848ms step_avg:93.84ms +[2025-08-22 17:10:50] [Rank 0] step:41/10000 train_time:3848ms step_avg:93.84ms +[2025-08-22 17:10:51] [Rank 0] step:61/10000 train_time:5666ms step_avg:92.88ms +[2025-08-22 17:10:51] [Rank 0] step:61/10000 train_time:5666ms step_avg:92.88ms +[2025-08-22 17:10:53] [Rank 0] step:81/10000 train_time:7486ms step_avg:92.42ms +[2025-08-22 17:10:53] [Rank 0] step:81/10000 train_time:7486ms step_avg:92.42ms +[2025-08-22 17:10:55] [Rank 0] step:101/10000 train_time:9306ms step_avg:92.14ms +[2025-08-22 17:10:55] [Rank 0] step:101/10000 train_time:9306ms step_avg:92.14ms +[2025-08-22 17:10:57] [Rank 0] step:121/10000 train_time:11127ms step_avg:91.96ms +[2025-08-22 17:10:57] [Rank 0] step:121/10000 train_time:11127ms step_avg:91.96ms +[2025-08-22 17:10:59] [Rank 0] step:141/10000 train_time:12946ms step_avg:91.82ms +[2025-08-22 17:10:59] [Rank 0] step:141/10000 train_time:12946ms step_avg:91.82ms +[2025-08-22 17:11:01] [Rank 0] step:161/10000 train_time:14768ms step_avg:91.73ms +[2025-08-22 17:11:01] [Rank 0] step:161/10000 train_time:14768ms step_avg:91.73ms +[2025-08-22 17:11:02] [Rank 0] step:181/10000 train_time:16589ms step_avg:91.65ms +[2025-08-22 17:11:02] [Rank 0] step:181/10000 train_time:16589ms step_avg:91.65ms +[2025-08-22 17:11:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:11:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:11:18] [Rank 0] PRINT: step:200/10000 val_loss:5.3351 svd_entropy: attn_qk:H=0.6992,top10E=0.37,eRank=146.9,q75/q25=36.75 attn_vo:H=0.8341,top10E=0.06,eRank=404.1,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.3,q75/q25=2.94 mlp_w2:H=0.9699,top10E=0.04,eRank=628.8,q75/q25=3.06 vo_prod:H=0.6858,top10E=0.11,eRank=215.8,q75/q25=inf train_time:18414ms step_avg:92.07ms +[2025-08-22 17:11:18] [Rank 0] PRINT: step:200/10000 val_loss:5.3351 svd_entropy: attn_qk:H=0.6992,top10E=0.37,eRank=146.9,q75/q25=36.75 attn_vo:H=0.8341,top10E=0.06,eRank=404.1,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.3,q75/q25=2.94 mlp_w2:H=0.9699,top10E=0.04,eRank=628.8,q75/q25=3.06 vo_prod:H=0.6858,top10E=0.11,eRank=215.8,q75/q25=inf train_time:18414ms step_avg:92.07ms +[2025-08-22 17:11:18] [Rank 0] step:201/10000 train_time:18438ms step_avg:91.73ms +[2025-08-22 17:11:18] [Rank 0] step:201/10000 train_time:18438ms step_avg:91.73ms +[2025-08-22 17:11:20] [Rank 0] step:221/10000 train_time:20260ms step_avg:91.68ms +[2025-08-22 17:11:20] [Rank 0] step:221/10000 train_time:20260ms step_avg:91.68ms +[2025-08-22 17:11:22] [Rank 0] step:241/10000 train_time:22081ms step_avg:91.62ms +[2025-08-22 17:11:22] [Rank 0] step:241/10000 train_time:22081ms step_avg:91.62ms +[2025-08-22 17:11:23] [Rank 0] step:261/10000 train_time:23901ms step_avg:91.58ms +[2025-08-22 17:11:23] [Rank 0] step:261/10000 train_time:23901ms step_avg:91.58ms +[2025-08-22 17:11:25] [Rank 0] step:281/10000 train_time:25724ms step_avg:91.55ms +[2025-08-22 17:11:25] [Rank 0] step:281/10000 train_time:25724ms step_avg:91.55ms +[2025-08-22 17:11:27] [Rank 0] step:301/10000 train_time:27548ms step_avg:91.52ms +[2025-08-22 17:11:27] [Rank 0] step:301/10000 train_time:27548ms step_avg:91.52ms +[2025-08-22 17:11:29] [Rank 0] step:321/10000 train_time:29372ms step_avg:91.50ms +[2025-08-22 17:11:29] [Rank 0] step:321/10000 train_time:29372ms step_avg:91.50ms +[2025-08-22 17:11:31] [Rank 0] step:341/10000 train_time:31197ms step_avg:91.49ms +[2025-08-22 17:11:31] [Rank 0] step:341/10000 train_time:31197ms step_avg:91.49ms +[2025-08-22 17:11:33] [Rank 0] step:361/10000 train_time:33022ms step_avg:91.47ms +[2025-08-22 17:11:33] [Rank 0] step:361/10000 train_time:33022ms step_avg:91.47ms +[2025-08-22 17:11:34] [Rank 0] step:381/10000 train_time:34847ms step_avg:91.46ms +[2025-08-22 17:11:34] [Rank 0] step:381/10000 train_time:34847ms step_avg:91.46ms +[2025-08-22 17:11:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:11:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:11:50] [Rank 0] PRINT: step:400/10000 val_loss:4.9870 svd_entropy: attn_qk:H=0.7187,top10E=0.33,eRank=157.2,q75/q25=44.14 attn_vo:H=0.8380,top10E=0.05,eRank=415.4,q75/q25=inf mlp_w1:H=0.9707,top10E=0.04,eRank=632.3,q75/q25=2.96 mlp_w2:H=0.9695,top10E=0.04,eRank=627.2,q75/q25=3.03 vo_prod:H=0.6949,top10E=0.10,eRank=231.6,q75/q25=inf train_time:36675ms step_avg:91.69ms +[2025-08-22 17:11:50] [Rank 0] PRINT: step:400/10000 val_loss:4.9870 svd_entropy: attn_qk:H=0.7187,top10E=0.33,eRank=157.2,q75/q25=44.14 attn_vo:H=0.8380,top10E=0.05,eRank=415.4,q75/q25=inf mlp_w1:H=0.9707,top10E=0.04,eRank=632.3,q75/q25=2.96 mlp_w2:H=0.9695,top10E=0.04,eRank=627.2,q75/q25=3.03 vo_prod:H=0.6949,top10E=0.10,eRank=231.6,q75/q25=inf train_time:36675ms step_avg:91.69ms +[2025-08-22 17:11:50] [Rank 0] step:401/10000 train_time:36700ms step_avg:91.52ms +[2025-08-22 17:11:50] [Rank 0] step:401/10000 train_time:36700ms step_avg:91.52ms +[2025-08-22 17:11:52] [Rank 0] step:421/10000 train_time:38659ms step_avg:91.83ms +[2025-08-22 17:11:52] [Rank 0] step:421/10000 train_time:38659ms step_avg:91.83ms +[2025-08-22 17:11:54] [Rank 0] step:441/10000 train_time:40481ms step_avg:91.79ms +[2025-08-22 17:11:54] [Rank 0] step:441/10000 train_time:40481ms step_avg:91.79ms +[2025-08-22 17:11:55] [Rank 0] step:461/10000 train_time:42302ms step_avg:91.76ms +[2025-08-22 17:11:55] [Rank 0] step:461/10000 train_time:42302ms step_avg:91.76ms +[2025-08-22 17:11:57] [Rank 0] step:481/10000 train_time:44124ms step_avg:91.73ms +[2025-08-22 17:11:57] [Rank 0] step:481/10000 train_time:44124ms step_avg:91.73ms +[2025-08-22 17:11:59] [Rank 0] step:501/10000 train_time:45946ms step_avg:91.71ms +[2025-08-22 17:11:59] [Rank 0] step:501/10000 train_time:45946ms step_avg:91.71ms +[2025-08-22 17:12:01] [Rank 0] step:521/10000 train_time:47770ms step_avg:91.69ms +[2025-08-22 17:12:01] [Rank 0] step:521/10000 train_time:47770ms step_avg:91.69ms +[2025-08-22 17:12:03] [Rank 0] step:541/10000 train_time:49597ms step_avg:91.68ms +[2025-08-22 17:12:03] [Rank 0] step:541/10000 train_time:49597ms step_avg:91.68ms +[2025-08-22 17:12:05] [Rank 0] step:561/10000 train_time:51424ms step_avg:91.66ms +[2025-08-22 17:12:05] [Rank 0] step:561/10000 train_time:51424ms step_avg:91.66ms +[2025-08-22 17:12:06] [Rank 0] step:581/10000 train_time:53252ms step_avg:91.66ms +[2025-08-22 17:12:06] [Rank 0] step:581/10000 train_time:53252ms step_avg:91.66ms +[2025-08-22 17:12:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:12:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:12:22] [Rank 0] PRINT: step:600/10000 val_loss:4.7733 svd_entropy: attn_qk:H=0.7250,top10E=0.32,eRank=161.0,q75/q25=47.06 attn_vo:H=0.8364,top10E=0.05,eRank=410.5,q75/q25=inf mlp_w1:H=0.9703,top10E=0.04,eRank=630.9,q75/q25=2.97 mlp_w2:H=0.9685,top10E=0.04,eRank=623.1,q75/q25=3.05 vo_prod:H=0.6925,top10E=0.10,eRank=227.3,q75/q25=inf train_time:55083ms step_avg:91.80ms +[2025-08-22 17:12:22] [Rank 0] PRINT: step:600/10000 val_loss:4.7733 svd_entropy: attn_qk:H=0.7250,top10E=0.32,eRank=161.0,q75/q25=47.06 attn_vo:H=0.8364,top10E=0.05,eRank=410.5,q75/q25=inf mlp_w1:H=0.9703,top10E=0.04,eRank=630.9,q75/q25=2.97 mlp_w2:H=0.9685,top10E=0.04,eRank=623.1,q75/q25=3.05 vo_prod:H=0.6925,top10E=0.10,eRank=227.3,q75/q25=inf train_time:55083ms step_avg:91.80ms +[2025-08-22 17:12:22] [Rank 0] step:601/10000 train_time:55106ms step_avg:91.69ms +[2025-08-22 17:12:22] [Rank 0] step:601/10000 train_time:55106ms step_avg:91.69ms +[2025-08-22 17:12:24] [Rank 0] step:621/10000 train_time:56933ms step_avg:91.68ms +[2025-08-22 17:12:24] [Rank 0] step:621/10000 train_time:56933ms step_avg:91.68ms +[2025-08-22 17:12:26] [Rank 0] step:641/10000 train_time:58753ms step_avg:91.66ms +[2025-08-22 17:12:26] [Rank 0] step:641/10000 train_time:58753ms step_avg:91.66ms +[2025-08-22 17:12:27] [Rank 0] step:661/10000 train_time:60574ms step_avg:91.64ms +[2025-08-22 17:12:27] [Rank 0] step:661/10000 train_time:60574ms step_avg:91.64ms +[2025-08-22 17:12:29] [Rank 0] step:681/10000 train_time:62397ms step_avg:91.63ms +[2025-08-22 17:12:29] [Rank 0] step:681/10000 train_time:62397ms step_avg:91.63ms +[2025-08-22 17:12:31] [Rank 0] step:701/10000 train_time:64222ms step_avg:91.61ms +[2025-08-22 17:12:31] [Rank 0] step:701/10000 train_time:64222ms step_avg:91.61ms +[2025-08-22 17:12:33] [Rank 0] step:721/10000 train_time:66045ms step_avg:91.60ms +[2025-08-22 17:12:33] [Rank 0] step:721/10000 train_time:66045ms step_avg:91.60ms +[2025-08-22 17:12:35] [Rank 0] step:741/10000 train_time:67868ms step_avg:91.59ms +[2025-08-22 17:12:35] [Rank 0] step:741/10000 train_time:67868ms step_avg:91.59ms +[2025-08-22 17:12:36] [Rank 0] step:761/10000 train_time:69704ms step_avg:91.60ms +[2025-08-22 17:12:36] [Rank 0] step:761/10000 train_time:69704ms step_avg:91.60ms +[2025-08-22 17:12:38] [Rank 0] step:781/10000 train_time:71541ms step_avg:91.60ms +[2025-08-22 17:12:38] [Rank 0] step:781/10000 train_time:71541ms step_avg:91.60ms +[2025-08-22 17:12:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:12:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:12:54] [Rank 0] PRINT: step:800/10000 val_loss:4.4871 svd_entropy: attn_qk:H=0.7288,top10E=0.32,eRank=163.4,q75/q25=47.91 attn_vo:H=0.8353,top10E=0.05,eRank=407.5,q75/q25=inf mlp_w1:H=0.9704,top10E=0.04,eRank=631.2,q75/q25=2.95 mlp_w2:H=0.9680,top10E=0.04,eRank=620.9,q75/q25=3.04 vo_prod:H=0.6910,top10E=0.10,eRank=224.7,q75/q25=inf train_time:73380ms step_avg:91.73ms +[2025-08-22 17:12:54] [Rank 0] PRINT: step:800/10000 val_loss:4.4871 svd_entropy: attn_qk:H=0.7288,top10E=0.32,eRank=163.4,q75/q25=47.91 attn_vo:H=0.8353,top10E=0.05,eRank=407.5,q75/q25=inf mlp_w1:H=0.9704,top10E=0.04,eRank=631.2,q75/q25=2.95 mlp_w2:H=0.9680,top10E=0.04,eRank=620.9,q75/q25=3.04 vo_prod:H=0.6910,top10E=0.10,eRank=224.7,q75/q25=inf train_time:73380ms step_avg:91.73ms +[2025-08-22 17:12:54] [Rank 0] step:801/10000 train_time:73404ms step_avg:91.64ms +[2025-08-22 17:12:54] [Rank 0] step:801/10000 train_time:73404ms step_avg:91.64ms +[2025-08-22 17:12:56] [Rank 0] step:821/10000 train_time:75299ms step_avg:91.72ms +[2025-08-22 17:12:56] [Rank 0] step:821/10000 train_time:75299ms step_avg:91.72ms +[2025-08-22 17:12:58] [Rank 0] step:841/10000 train_time:77129ms step_avg:91.71ms +[2025-08-22 17:12:58] [Rank 0] step:841/10000 train_time:77129ms step_avg:91.71ms +[2025-08-22 17:12:59] [Rank 0] step:861/10000 train_time:78963ms step_avg:91.71ms +[2025-08-22 17:12:59] [Rank 0] step:861/10000 train_time:78963ms step_avg:91.71ms +[2025-08-22 17:13:01] [Rank 0] step:881/10000 train_time:80796ms step_avg:91.71ms +[2025-08-22 17:13:01] [Rank 0] step:881/10000 train_time:80796ms step_avg:91.71ms +[2025-08-22 17:13:03] [Rank 0] step:901/10000 train_time:82633ms step_avg:91.71ms +[2025-08-22 17:13:03] [Rank 0] step:901/10000 train_time:82633ms step_avg:91.71ms +[2025-08-22 17:13:05] [Rank 0] step:921/10000 train_time:84467ms step_avg:91.71ms +[2025-08-22 17:13:05] [Rank 0] step:921/10000 train_time:84467ms step_avg:91.71ms +[2025-08-22 17:13:07] [Rank 0] step:941/10000 train_time:86303ms step_avg:91.71ms +[2025-08-22 17:13:07] [Rank 0] step:941/10000 train_time:86303ms step_avg:91.71ms +[2025-08-22 17:13:09] [Rank 0] step:961/10000 train_time:88141ms step_avg:91.72ms +[2025-08-22 17:13:09] [Rank 0] step:961/10000 train_time:88141ms step_avg:91.72ms +[2025-08-22 17:13:10] [Rank 0] step:981/10000 train_time:89979ms step_avg:91.72ms +[2025-08-22 17:13:10] [Rank 0] step:981/10000 train_time:89979ms step_avg:91.72ms +[2025-08-22 17:13:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:13:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:13:26] [Rank 0] PRINT: step:1000/10000 val_loss:4.3627 svd_entropy: attn_qk:H=0.7318,top10E=0.31,eRank=165.3,q75/q25=47.66 attn_vo:H=0.8348,top10E=0.05,eRank=405.9,q75/q25=inf mlp_w1:H=0.9706,top10E=0.04,eRank=632.0,q75/q25=2.93 mlp_w2:H=0.9677,top10E=0.05,eRank=619.6,q75/q25=3.03 vo_prod:H=0.6904,top10E=0.10,eRank=223.6,q75/q25=inf train_time:91820ms step_avg:91.82ms +[2025-08-22 17:13:26] [Rank 0] PRINT: step:1000/10000 val_loss:4.3627 svd_entropy: attn_qk:H=0.7318,top10E=0.31,eRank=165.3,q75/q25=47.66 attn_vo:H=0.8348,top10E=0.05,eRank=405.9,q75/q25=inf mlp_w1:H=0.9706,top10E=0.04,eRank=632.0,q75/q25=2.93 mlp_w2:H=0.9677,top10E=0.05,eRank=619.6,q75/q25=3.03 vo_prod:H=0.6904,top10E=0.10,eRank=223.6,q75/q25=inf train_time:91820ms step_avg:91.82ms +[2025-08-22 17:13:26] [Rank 0] step:1001/10000 train_time:91844ms step_avg:91.75ms +[2025-08-22 17:13:26] [Rank 0] step:1001/10000 train_time:91844ms step_avg:91.75ms +[2025-08-22 17:13:28] [Rank 0] step:1021/10000 train_time:93697ms step_avg:91.77ms +[2025-08-22 17:13:28] [Rank 0] step:1021/10000 train_time:93697ms step_avg:91.77ms +[2025-08-22 17:13:30] [Rank 0] step:1041/10000 train_time:95534ms step_avg:91.77ms +[2025-08-22 17:13:30] [Rank 0] step:1041/10000 train_time:95534ms step_avg:91.77ms +[2025-08-22 17:13:31] [Rank 0] step:1061/10000 train_time:97372ms step_avg:91.77ms +[2025-08-22 17:13:31] [Rank 0] step:1061/10000 train_time:97372ms step_avg:91.77ms +[2025-08-22 17:13:33] [Rank 0] step:1081/10000 train_time:99211ms step_avg:91.78ms +[2025-08-22 17:13:33] [Rank 0] step:1081/10000 train_time:99211ms step_avg:91.78ms +[2025-08-22 17:13:35] [Rank 0] step:1101/10000 train_time:101051ms step_avg:91.78ms +[2025-08-22 17:13:35] [Rank 0] step:1101/10000 train_time:101051ms step_avg:91.78ms +[2025-08-22 17:13:37] [Rank 0] step:1121/10000 train_time:102891ms step_avg:91.79ms +[2025-08-22 17:13:37] [Rank 0] step:1121/10000 train_time:102891ms step_avg:91.79ms +[2025-08-22 17:13:39] [Rank 0] step:1141/10000 train_time:104732ms step_avg:91.79ms +[2025-08-22 17:13:39] [Rank 0] step:1141/10000 train_time:104732ms step_avg:91.79ms +[2025-08-22 17:13:41] [Rank 0] step:1161/10000 train_time:106575ms step_avg:91.80ms +[2025-08-22 17:13:41] [Rank 0] step:1161/10000 train_time:106575ms step_avg:91.80ms +[2025-08-22 17:13:42] [Rank 0] step:1181/10000 train_time:108417ms step_avg:91.80ms +[2025-08-22 17:13:42] [Rank 0] step:1181/10000 train_time:108417ms step_avg:91.80ms +[2025-08-22 17:13:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:13:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:13:58] [Rank 0] PRINT: step:1200/10000 val_loss:4.2676 svd_entropy: attn_qk:H=0.7341,top10E=0.31,eRank=166.8,q75/q25=48.49 attn_vo:H=0.8345,top10E=0.05,eRank=404.9,q75/q25=inf mlp_w1:H=0.9707,top10E=0.04,eRank=632.4,q75/q25=2.92 mlp_w2:H=0.9674,top10E=0.05,eRank=618.6,q75/q25=3.03 vo_prod:H=0.6900,top10E=0.10,eRank=223.0,q75/q25=inf train_time:110262ms step_avg:91.88ms +[2025-08-22 17:13:58] [Rank 0] PRINT: step:1200/10000 val_loss:4.2676 svd_entropy: attn_qk:H=0.7341,top10E=0.31,eRank=166.8,q75/q25=48.49 attn_vo:H=0.8345,top10E=0.05,eRank=404.9,q75/q25=inf mlp_w1:H=0.9707,top10E=0.04,eRank=632.4,q75/q25=2.92 mlp_w2:H=0.9674,top10E=0.05,eRank=618.6,q75/q25=3.03 vo_prod:H=0.6900,top10E=0.10,eRank=223.0,q75/q25=inf train_time:110262ms step_avg:91.88ms +[2025-08-22 17:13:58] [Rank 0] step:1201/10000 train_time:110286ms step_avg:91.83ms +[2025-08-22 17:13:58] [Rank 0] step:1201/10000 train_time:110286ms step_avg:91.83ms +[2025-08-22 17:14:00] [Rank 0] step:1221/10000 train_time:112108ms step_avg:91.82ms +[2025-08-22 17:14:00] [Rank 0] step:1221/10000 train_time:112108ms step_avg:91.82ms +[2025-08-22 17:14:02] [Rank 0] step:1241/10000 train_time:113941ms step_avg:91.81ms +[2025-08-22 17:14:02] [Rank 0] step:1241/10000 train_time:113941ms step_avg:91.81ms +[2025-08-22 17:14:03] [Rank 0] step:1261/10000 train_time:115776ms step_avg:91.81ms +[2025-08-22 17:14:03] [Rank 0] step:1261/10000 train_time:115776ms step_avg:91.81ms +[2025-08-22 17:14:05] [Rank 0] step:1281/10000 train_time:117612ms step_avg:91.81ms +[2025-08-22 17:14:05] [Rank 0] step:1281/10000 train_time:117612ms step_avg:91.81ms +[2025-08-22 17:14:07] [Rank 0] step:1301/10000 train_time:119448ms step_avg:91.81ms +[2025-08-22 17:14:07] [Rank 0] step:1301/10000 train_time:119448ms step_avg:91.81ms +[2025-08-22 17:14:09] [Rank 0] step:1321/10000 train_time:121286ms step_avg:91.81ms +[2025-08-22 17:14:09] [Rank 0] step:1321/10000 train_time:121286ms step_avg:91.81ms +[2025-08-22 17:14:11] [Rank 0] step:1341/10000 train_time:123124ms step_avg:91.81ms +[2025-08-22 17:14:11] [Rank 0] step:1341/10000 train_time:123124ms step_avg:91.81ms +[2025-08-22 17:14:13] [Rank 0] step:1361/10000 train_time:124961ms step_avg:91.82ms +[2025-08-22 17:14:13] [Rank 0] step:1361/10000 train_time:124961ms step_avg:91.82ms +[2025-08-22 17:14:14] [Rank 0] step:1381/10000 train_time:126798ms step_avg:91.82ms +[2025-08-22 17:14:14] [Rank 0] step:1381/10000 train_time:126798ms step_avg:91.82ms +[2025-08-22 17:14:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:14:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:14:30] [Rank 0] PRINT: step:1400/10000 val_loss:4.2185 svd_entropy: attn_qk:H=0.7360,top10E=0.30,eRank=168.2,q75/q25=49.52 attn_vo:H=0.8343,top10E=0.05,eRank=404.3,q75/q25=inf mlp_w1:H=0.9708,top10E=0.04,eRank=632.8,q75/q25=2.91 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=3.02 vo_prod:H=0.6898,top10E=0.10,eRank=222.7,q75/q25=inf train_time:128639ms step_avg:91.88ms +[2025-08-22 17:14:30] [Rank 0] PRINT: step:1400/10000 val_loss:4.2185 svd_entropy: attn_qk:H=0.7360,top10E=0.30,eRank=168.2,q75/q25=49.52 attn_vo:H=0.8343,top10E=0.05,eRank=404.3,q75/q25=inf mlp_w1:H=0.9708,top10E=0.04,eRank=632.8,q75/q25=2.91 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=3.02 vo_prod:H=0.6898,top10E=0.10,eRank=222.7,q75/q25=inf train_time:128639ms step_avg:91.88ms +[2025-08-22 17:14:30] [Rank 0] step:1401/10000 train_time:128662ms step_avg:91.84ms +[2025-08-22 17:14:30] [Rank 0] step:1401/10000 train_time:128662ms step_avg:91.84ms +[2025-08-22 17:14:32] [Rank 0] step:1421/10000 train_time:130496ms step_avg:91.83ms +[2025-08-22 17:14:32] [Rank 0] step:1421/10000 train_time:130496ms step_avg:91.83ms +[2025-08-22 17:14:34] [Rank 0] step:1441/10000 train_time:132328ms step_avg:91.83ms +[2025-08-22 17:14:34] [Rank 0] step:1441/10000 train_time:132328ms step_avg:91.83ms +[2025-08-22 17:14:35] [Rank 0] step:1461/10000 train_time:134160ms step_avg:91.83ms +[2025-08-22 17:14:35] [Rank 0] step:1461/10000 train_time:134160ms step_avg:91.83ms +[2025-08-22 17:14:37] [Rank 0] step:1481/10000 train_time:135993ms step_avg:91.83ms +[2025-08-22 17:14:37] [Rank 0] step:1481/10000 train_time:135993ms step_avg:91.83ms +[2025-08-22 17:14:39] [Rank 0] step:1501/10000 train_time:137838ms step_avg:91.83ms +[2025-08-22 17:14:39] [Rank 0] step:1501/10000 train_time:137838ms step_avg:91.83ms +[2025-08-22 17:14:41] [Rank 0] step:1521/10000 train_time:139684ms step_avg:91.84ms +[2025-08-22 17:14:41] [Rank 0] step:1521/10000 train_time:139684ms step_avg:91.84ms +[2025-08-22 17:14:43] [Rank 0] step:1541/10000 train_time:141532ms step_avg:91.84ms +[2025-08-22 17:14:43] [Rank 0] step:1541/10000 train_time:141532ms step_avg:91.84ms +[2025-08-22 17:14:45] [Rank 0] step:1561/10000 train_time:143379ms step_avg:91.85ms +[2025-08-22 17:14:45] [Rank 0] step:1561/10000 train_time:143379ms step_avg:91.85ms +[2025-08-22 17:14:46] [Rank 0] step:1581/10000 train_time:145226ms step_avg:91.86ms +[2025-08-22 17:14:46] [Rank 0] step:1581/10000 train_time:145226ms step_avg:91.86ms +[2025-08-22 17:14:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:14:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:15:02] [Rank 0] PRINT: step:1600/10000 val_loss:4.1272 svd_entropy: attn_qk:H=0.7377,top10E=0.30,eRank=169.4,q75/q25=50.01 attn_vo:H=0.8341,top10E=0.05,eRank=403.8,q75/q25=inf mlp_w1:H=0.9709,top10E=0.04,eRank=633.1,q75/q25=2.90 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=3.01 vo_prod:H=0.6896,top10E=0.10,eRank=222.5,q75/q25=inf train_time:147076ms step_avg:91.92ms +[2025-08-22 17:15:02] [Rank 0] PRINT: step:1600/10000 val_loss:4.1272 svd_entropy: attn_qk:H=0.7377,top10E=0.30,eRank=169.4,q75/q25=50.01 attn_vo:H=0.8341,top10E=0.05,eRank=403.8,q75/q25=inf mlp_w1:H=0.9709,top10E=0.04,eRank=633.1,q75/q25=2.90 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=3.01 vo_prod:H=0.6896,top10E=0.10,eRank=222.5,q75/q25=inf train_time:147076ms step_avg:91.92ms +[2025-08-22 17:15:02] [Rank 0] step:1601/10000 train_time:147099ms step_avg:91.88ms +[2025-08-22 17:15:02] [Rank 0] step:1601/10000 train_time:147099ms step_avg:91.88ms +[2025-08-22 17:15:04] [Rank 0] step:1621/10000 train_time:148951ms step_avg:91.89ms +[2025-08-22 17:15:04] [Rank 0] step:1621/10000 train_time:148951ms step_avg:91.89ms +[2025-08-22 17:15:06] [Rank 0] step:1641/10000 train_time:150794ms step_avg:91.89ms +[2025-08-22 17:15:06] [Rank 0] step:1641/10000 train_time:150794ms step_avg:91.89ms +[2025-08-22 17:15:08] [Rank 0] step:1661/10000 train_time:152637ms step_avg:91.89ms +[2025-08-22 17:15:08] [Rank 0] step:1661/10000 train_time:152637ms step_avg:91.89ms +[2025-08-22 17:15:09] [Rank 0] step:1681/10000 train_time:154483ms step_avg:91.90ms +[2025-08-22 17:15:09] [Rank 0] step:1681/10000 train_time:154483ms step_avg:91.90ms +[2025-08-22 17:15:11] [Rank 0] step:1701/10000 train_time:156328ms step_avg:91.90ms +[2025-08-22 17:15:11] [Rank 0] step:1701/10000 train_time:156328ms step_avg:91.90ms +[2025-08-22 17:15:13] [Rank 0] step:1721/10000 train_time:158175ms step_avg:91.91ms +[2025-08-22 17:15:13] [Rank 0] step:1721/10000 train_time:158175ms step_avg:91.91ms +[2025-08-22 17:15:15] [Rank 0] step:1741/10000 train_time:160025ms step_avg:91.92ms +[2025-08-22 17:15:15] [Rank 0] step:1741/10000 train_time:160025ms step_avg:91.92ms +[2025-08-22 17:15:17] [Rank 0] step:1761/10000 train_time:161874ms step_avg:91.92ms +[2025-08-22 17:15:17] [Rank 0] step:1761/10000 train_time:161874ms step_avg:91.92ms +[2025-08-22 17:15:19] [Rank 0] step:1781/10000 train_time:163724ms step_avg:91.93ms +[2025-08-22 17:15:19] [Rank 0] step:1781/10000 train_time:163724ms step_avg:91.93ms +[2025-08-22 17:15:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:15:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:15:34] [Rank 0] PRINT: step:1800/10000 val_loss:4.0749 svd_entropy: attn_qk:H=0.7394,top10E=0.30,eRank=170.6,q75/q25=50.54 attn_vo:H=0.8340,top10E=0.05,eRank=403.5,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.4,q75/q25=2.89 mlp_w2:H=0.9670,top10E=0.05,eRank=616.8,q75/q25=3.01 vo_prod:H=0.6895,top10E=0.10,eRank=222.3,q75/q25=inf train_time:165578ms step_avg:91.99ms +[2025-08-22 17:15:34] [Rank 0] PRINT: step:1800/10000 val_loss:4.0749 svd_entropy: attn_qk:H=0.7394,top10E=0.30,eRank=170.6,q75/q25=50.54 attn_vo:H=0.8340,top10E=0.05,eRank=403.5,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.4,q75/q25=2.89 mlp_w2:H=0.9670,top10E=0.05,eRank=616.8,q75/q25=3.01 vo_prod:H=0.6895,top10E=0.10,eRank=222.3,q75/q25=inf train_time:165578ms step_avg:91.99ms +[2025-08-22 17:15:34] [Rank 0] step:1801/10000 train_time:165602ms step_avg:91.95ms +[2025-08-22 17:15:34] [Rank 0] step:1801/10000 train_time:165602ms step_avg:91.95ms +[2025-08-22 17:15:36] [Rank 0] step:1821/10000 train_time:167449ms step_avg:91.95ms +[2025-08-22 17:15:36] [Rank 0] step:1821/10000 train_time:167449ms step_avg:91.95ms +[2025-08-22 17:15:38] [Rank 0] step:1841/10000 train_time:169297ms step_avg:91.96ms +[2025-08-22 17:15:38] [Rank 0] step:1841/10000 train_time:169297ms step_avg:91.96ms +[2025-08-22 17:15:39] [Rank 0] step:1861/10000 train_time:171146ms step_avg:91.96ms +[2025-08-22 17:15:39] [Rank 0] step:1861/10000 train_time:171146ms step_avg:91.96ms +[2025-08-22 17:15:41] [Rank 0] step:1881/10000 train_time:172995ms step_avg:91.97ms +[2025-08-22 17:15:41] [Rank 0] step:1881/10000 train_time:172995ms step_avg:91.97ms +[2025-08-22 17:15:43] [Rank 0] step:1901/10000 train_time:174846ms step_avg:91.98ms +[2025-08-22 17:15:43] [Rank 0] step:1901/10000 train_time:174846ms step_avg:91.98ms +[2025-08-22 17:15:45] [Rank 0] step:1921/10000 train_time:176697ms step_avg:91.98ms +[2025-08-22 17:15:45] [Rank 0] step:1921/10000 train_time:176697ms step_avg:91.98ms +[2025-08-22 17:15:47] [Rank 0] step:1941/10000 train_time:178551ms step_avg:91.99ms +[2025-08-22 17:15:47] [Rank 0] step:1941/10000 train_time:178551ms step_avg:91.99ms +[2025-08-22 17:15:49] [Rank 0] step:1961/10000 train_time:180404ms step_avg:92.00ms +[2025-08-22 17:15:49] [Rank 0] step:1961/10000 train_time:180404ms step_avg:92.00ms +[2025-08-22 17:15:51] [Rank 0] step:1981/10000 train_time:182259ms step_avg:92.00ms +[2025-08-22 17:15:51] [Rank 0] step:1981/10000 train_time:182259ms step_avg:92.00ms +[2025-08-22 17:15:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:15:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:16:06] [Rank 0] PRINT: step:2000/10000 val_loss:4.0465 svd_entropy: attn_qk:H=0.7410,top10E=0.29,eRank=171.8,q75/q25=51.42 attn_vo:H=0.8339,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.7,q75/q25=2.88 mlp_w2:H=0.9669,top10E=0.05,eRank=616.6,q75/q25=3.00 vo_prod:H=0.6894,top10E=0.10,eRank=222.3,q75/q25=inf train_time:184117ms step_avg:92.06ms +[2025-08-22 17:16:06] [Rank 0] PRINT: step:2000/10000 val_loss:4.0465 svd_entropy: attn_qk:H=0.7410,top10E=0.29,eRank=171.8,q75/q25=51.42 attn_vo:H=0.8339,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.7,q75/q25=2.88 mlp_w2:H=0.9669,top10E=0.05,eRank=616.6,q75/q25=3.00 vo_prod:H=0.6894,top10E=0.10,eRank=222.3,q75/q25=inf train_time:184117ms step_avg:92.06ms +[2025-08-22 17:16:06] [Rank 0] step:2001/10000 train_time:184140ms step_avg:92.02ms +[2025-08-22 17:16:06] [Rank 0] step:2001/10000 train_time:184140ms step_avg:92.02ms +[2025-08-22 17:16:08] [Rank 0] step:2021/10000 train_time:185974ms step_avg:92.02ms +[2025-08-22 17:16:08] [Rank 0] step:2021/10000 train_time:185974ms step_avg:92.02ms +[2025-08-22 17:16:10] [Rank 0] step:2041/10000 train_time:187873ms step_avg:92.05ms +[2025-08-22 17:16:10] [Rank 0] step:2041/10000 train_time:187873ms step_avg:92.05ms +[2025-08-22 17:16:12] [Rank 0] step:2061/10000 train_time:189719ms step_avg:92.05ms +[2025-08-22 17:16:12] [Rank 0] step:2061/10000 train_time:189719ms step_avg:92.05ms +[2025-08-22 17:16:14] [Rank 0] step:2081/10000 train_time:191565ms step_avg:92.05ms +[2025-08-22 17:16:14] [Rank 0] step:2081/10000 train_time:191565ms step_avg:92.05ms +[2025-08-22 17:16:15] [Rank 0] step:2101/10000 train_time:193412ms step_avg:92.06ms +[2025-08-22 17:16:15] [Rank 0] step:2101/10000 train_time:193412ms step_avg:92.06ms +[2025-08-22 17:16:17] [Rank 0] step:2121/10000 train_time:195258ms step_avg:92.06ms +[2025-08-22 17:16:17] [Rank 0] step:2121/10000 train_time:195258ms step_avg:92.06ms +[2025-08-22 17:16:19] [Rank 0] step:2141/10000 train_time:197105ms step_avg:92.06ms +[2025-08-22 17:16:19] [Rank 0] step:2141/10000 train_time:197105ms step_avg:92.06ms +[2025-08-22 17:16:21] [Rank 0] step:2161/10000 train_time:198953ms step_avg:92.07ms +[2025-08-22 17:16:21] [Rank 0] step:2161/10000 train_time:198953ms step_avg:92.07ms +[2025-08-22 17:16:23] [Rank 0] step:2181/10000 train_time:200803ms step_avg:92.07ms +[2025-08-22 17:16:23] [Rank 0] step:2181/10000 train_time:200803ms step_avg:92.07ms +[2025-08-22 17:16:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:16:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:16:38] [Rank 0] PRINT: step:2200/10000 val_loss:4.0079 svd_entropy: attn_qk:H=0.7425,top10E=0.29,eRank=172.9,q75/q25=51.32 attn_vo:H=0.8338,top10E=0.06,eRank=403.2,q75/q25=inf mlp_w1:H=0.9711,top10E=0.04,eRank=634.0,q75/q25=2.88 mlp_w2:H=0.9669,top10E=0.05,eRank=616.4,q75/q25=2.99 vo_prod:H=0.6894,top10E=0.11,eRank=222.2,q75/q25=inf train_time:202656ms step_avg:92.12ms +[2025-08-22 17:16:38] [Rank 0] PRINT: step:2200/10000 val_loss:4.0079 svd_entropy: attn_qk:H=0.7425,top10E=0.29,eRank=172.9,q75/q25=51.32 attn_vo:H=0.8338,top10E=0.06,eRank=403.2,q75/q25=inf mlp_w1:H=0.9711,top10E=0.04,eRank=634.0,q75/q25=2.88 mlp_w2:H=0.9669,top10E=0.05,eRank=616.4,q75/q25=2.99 vo_prod:H=0.6894,top10E=0.11,eRank=222.2,q75/q25=inf train_time:202656ms step_avg:92.12ms +[2025-08-22 17:16:38] [Rank 0] step:2201/10000 train_time:202678ms step_avg:92.08ms +[2025-08-22 17:16:38] [Rank 0] step:2201/10000 train_time:202678ms step_avg:92.08ms +[2025-08-22 17:16:40] [Rank 0] step:2221/10000 train_time:204527ms step_avg:92.09ms +[2025-08-22 17:16:40] [Rank 0] step:2221/10000 train_time:204527ms step_avg:92.09ms +[2025-08-22 17:16:42] [Rank 0] step:2241/10000 train_time:206408ms step_avg:92.11ms +[2025-08-22 17:16:42] [Rank 0] step:2241/10000 train_time:206408ms step_avg:92.11ms +[2025-08-22 17:16:44] [Rank 0] step:2261/10000 train_time:208297ms step_avg:92.13ms +[2025-08-22 17:16:44] [Rank 0] step:2261/10000 train_time:208297ms step_avg:92.13ms +[2025-08-22 17:16:46] [Rank 0] step:2281/10000 train_time:210188ms step_avg:92.15ms +[2025-08-22 17:16:46] [Rank 0] step:2281/10000 train_time:210188ms step_avg:92.15ms +[2025-08-22 17:16:48] [Rank 0] step:2301/10000 train_time:212079ms step_avg:92.17ms +[2025-08-22 17:16:48] [Rank 0] step:2301/10000 train_time:212079ms step_avg:92.17ms +[2025-08-22 17:16:50] [Rank 0] step:2321/10000 train_time:213968ms step_avg:92.19ms +[2025-08-22 17:16:50] [Rank 0] step:2321/10000 train_time:213968ms step_avg:92.19ms +[2025-08-22 17:16:51] [Rank 0] step:2341/10000 train_time:215860ms step_avg:92.21ms +[2025-08-22 17:16:51] [Rank 0] step:2341/10000 train_time:215860ms step_avg:92.21ms +[2025-08-22 17:16:53] [Rank 0] step:2361/10000 train_time:217753ms step_avg:92.23ms +[2025-08-22 17:16:53] [Rank 0] step:2361/10000 train_time:217753ms step_avg:92.23ms +[2025-08-22 17:16:55] [Rank 0] step:2381/10000 train_time:219646ms step_avg:92.25ms +[2025-08-22 17:16:55] [Rank 0] step:2381/10000 train_time:219646ms step_avg:92.25ms +[2025-08-22 17:16:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:16:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:17:11] [Rank 0] PRINT: step:2400/10000 val_loss:3.9466 svd_entropy: attn_qk:H=0.7438,top10E=0.29,eRank=173.9,q75/q25=51.46 attn_vo:H=0.8338,top10E=0.06,eRank=403.1,q75/q25=inf mlp_w1:H=0.9712,top10E=0.04,eRank=634.3,q75/q25=2.87 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=2.99 vo_prod:H=0.6894,top10E=0.11,eRank=222.4,q75/q25=inf train_time:221541ms step_avg:92.31ms +[2025-08-22 17:17:11] [Rank 0] PRINT: step:2400/10000 val_loss:3.9466 svd_entropy: attn_qk:H=0.7438,top10E=0.29,eRank=173.9,q75/q25=51.46 attn_vo:H=0.8338,top10E=0.06,eRank=403.1,q75/q25=inf mlp_w1:H=0.9712,top10E=0.04,eRank=634.3,q75/q25=2.87 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=2.99 vo_prod:H=0.6894,top10E=0.11,eRank=222.4,q75/q25=inf train_time:221541ms step_avg:92.31ms +[2025-08-22 17:17:11] [Rank 0] step:2401/10000 train_time:221564ms step_avg:92.28ms +[2025-08-22 17:17:11] [Rank 0] step:2401/10000 train_time:221564ms step_avg:92.28ms +[2025-08-22 17:17:13] [Rank 0] step:2421/10000 train_time:223443ms step_avg:92.29ms +[2025-08-22 17:17:13] [Rank 0] step:2421/10000 train_time:223443ms step_avg:92.29ms +[2025-08-22 17:17:15] [Rank 0] step:2441/10000 train_time:225330ms step_avg:92.31ms +[2025-08-22 17:17:15] [Rank 0] step:2441/10000 train_time:225330ms step_avg:92.31ms +[2025-08-22 17:17:17] [Rank 0] step:2461/10000 train_time:227220ms step_avg:92.33ms +[2025-08-22 17:17:17] [Rank 0] step:2461/10000 train_time:227220ms step_avg:92.33ms +[2025-08-22 17:17:18] [Rank 0] step:2481/10000 train_time:229109ms step_avg:92.35ms +[2025-08-22 17:17:18] [Rank 0] step:2481/10000 train_time:229109ms step_avg:92.35ms +[2025-08-22 17:17:20] [Rank 0] step:2501/10000 train_time:230997ms step_avg:92.36ms +[2025-08-22 17:17:20] [Rank 0] step:2501/10000 train_time:230997ms step_avg:92.36ms +[2025-08-22 17:17:22] [Rank 0] step:2521/10000 train_time:232889ms step_avg:92.38ms +[2025-08-22 17:17:22] [Rank 0] step:2521/10000 train_time:232889ms step_avg:92.38ms +[2025-08-22 17:17:24] [Rank 0] step:2541/10000 train_time:234781ms step_avg:92.40ms +[2025-08-22 17:17:24] [Rank 0] step:2541/10000 train_time:234781ms step_avg:92.40ms +[2025-08-22 17:17:26] [Rank 0] step:2561/10000 train_time:236674ms step_avg:92.41ms +[2025-08-22 17:17:26] [Rank 0] step:2561/10000 train_time:236674ms step_avg:92.41ms +[2025-08-22 17:17:28] [Rank 0] step:2581/10000 train_time:238567ms step_avg:92.43ms +[2025-08-22 17:17:28] [Rank 0] step:2581/10000 train_time:238567ms step_avg:92.43ms +[2025-08-22 17:17:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:17:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:17:43] [Rank 0] PRINT: step:2600/10000 val_loss:3.9213 svd_entropy: attn_qk:H=0.7452,top10E=0.29,eRank=174.9,q75/q25=52.16 attn_vo:H=0.8338,top10E=0.06,eRank=403.2,q75/q25=inf mlp_w1:H=0.9712,top10E=0.04,eRank=634.6,q75/q25=2.86 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.98 vo_prod:H=0.6897,top10E=0.11,eRank=222.9,q75/q25=inf train_time:240462ms step_avg:92.49ms +[2025-08-22 17:17:43] [Rank 0] PRINT: step:2600/10000 val_loss:3.9213 svd_entropy: attn_qk:H=0.7452,top10E=0.29,eRank=174.9,q75/q25=52.16 attn_vo:H=0.8338,top10E=0.06,eRank=403.2,q75/q25=inf mlp_w1:H=0.9712,top10E=0.04,eRank=634.6,q75/q25=2.86 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.98 vo_prod:H=0.6897,top10E=0.11,eRank=222.9,q75/q25=inf train_time:240462ms step_avg:92.49ms +[2025-08-22 17:17:43] [Rank 0] step:2601/10000 train_time:240484ms step_avg:92.46ms +[2025-08-22 17:17:43] [Rank 0] step:2601/10000 train_time:240484ms step_avg:92.46ms +[2025-08-22 17:17:45] [Rank 0] step:2621/10000 train_time:242392ms step_avg:92.48ms +[2025-08-22 17:17:45] [Rank 0] step:2621/10000 train_time:242392ms step_avg:92.48ms +[2025-08-22 17:17:47] [Rank 0] step:2641/10000 train_time:244281ms step_avg:92.50ms +[2025-08-22 17:17:47] [Rank 0] step:2641/10000 train_time:244281ms step_avg:92.50ms +[2025-08-22 17:17:49] [Rank 0] step:2661/10000 train_time:246174ms step_avg:92.51ms +[2025-08-22 17:17:49] [Rank 0] step:2661/10000 train_time:246174ms step_avg:92.51ms +[2025-08-22 17:17:51] [Rank 0] step:2681/10000 train_time:248066ms step_avg:92.53ms +[2025-08-22 17:17:51] [Rank 0] step:2681/10000 train_time:248066ms step_avg:92.53ms +[2025-08-22 17:17:53] [Rank 0] step:2701/10000 train_time:249962ms step_avg:92.54ms +[2025-08-22 17:17:53] [Rank 0] step:2701/10000 train_time:249962ms step_avg:92.54ms +[2025-08-22 17:17:55] [Rank 0] step:2721/10000 train_time:251856ms step_avg:92.56ms +[2025-08-22 17:17:55] [Rank 0] step:2721/10000 train_time:251856ms step_avg:92.56ms +[2025-08-22 17:17:57] [Rank 0] step:2741/10000 train_time:253754ms step_avg:92.58ms +[2025-08-22 17:17:57] [Rank 0] step:2741/10000 train_time:253754ms step_avg:92.58ms +[2025-08-22 17:17:59] [Rank 0] step:2761/10000 train_time:255651ms step_avg:92.59ms +[2025-08-22 17:17:59] [Rank 0] step:2761/10000 train_time:255651ms step_avg:92.59ms +[2025-08-22 17:18:00] [Rank 0] step:2781/10000 train_time:257550ms step_avg:92.61ms +[2025-08-22 17:18:00] [Rank 0] step:2781/10000 train_time:257550ms step_avg:92.61ms +[2025-08-22 17:18:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:18:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:18:16] [Rank 0] PRINT: step:2800/10000 val_loss:3.9028 svd_entropy: attn_qk:H=0.7465,top10E=0.29,eRank=175.9,q75/q25=52.47 attn_vo:H=0.8338,top10E=0.06,eRank=403.2,q75/q25=inf mlp_w1:H=0.9713,top10E=0.04,eRank=634.8,q75/q25=2.86 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=2.97 vo_prod:H=0.6898,top10E=0.11,eRank=223.2,q75/q25=inf train_time:259449ms step_avg:92.66ms +[2025-08-22 17:18:16] [Rank 0] PRINT: step:2800/10000 val_loss:3.9028 svd_entropy: attn_qk:H=0.7465,top10E=0.29,eRank=175.9,q75/q25=52.47 attn_vo:H=0.8338,top10E=0.06,eRank=403.2,q75/q25=inf mlp_w1:H=0.9713,top10E=0.04,eRank=634.8,q75/q25=2.86 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=2.97 vo_prod:H=0.6898,top10E=0.11,eRank=223.2,q75/q25=inf train_time:259449ms step_avg:92.66ms +[2025-08-22 17:18:16] [Rank 0] step:2801/10000 train_time:259472ms step_avg:92.64ms +[2025-08-22 17:18:16] [Rank 0] step:2801/10000 train_time:259472ms step_avg:92.64ms +[2025-08-22 17:18:18] [Rank 0] step:2821/10000 train_time:261377ms step_avg:92.65ms +[2025-08-22 17:18:18] [Rank 0] step:2821/10000 train_time:261377ms step_avg:92.65ms +[2025-08-22 17:18:20] [Rank 0] step:2841/10000 train_time:263267ms step_avg:92.67ms +[2025-08-22 17:18:20] [Rank 0] step:2841/10000 train_time:263267ms step_avg:92.67ms +[2025-08-22 17:18:22] [Rank 0] step:2861/10000 train_time:265159ms step_avg:92.68ms +[2025-08-22 17:18:22] [Rank 0] step:2861/10000 train_time:265159ms step_avg:92.68ms +[2025-08-22 17:18:24] [Rank 0] step:2881/10000 train_time:267053ms step_avg:92.69ms +[2025-08-22 17:18:24] [Rank 0] step:2881/10000 train_time:267053ms step_avg:92.69ms +[2025-08-22 17:18:26] [Rank 0] step:2901/10000 train_time:268943ms step_avg:92.71ms +[2025-08-22 17:18:26] [Rank 0] step:2901/10000 train_time:268943ms step_avg:92.71ms +[2025-08-22 17:18:28] [Rank 0] step:2921/10000 train_time:270834ms step_avg:92.72ms +[2025-08-22 17:18:28] [Rank 0] step:2921/10000 train_time:270834ms step_avg:92.72ms +[2025-08-22 17:18:29] [Rank 0] step:2941/10000 train_time:272729ms step_avg:92.73ms +[2025-08-22 17:18:29] [Rank 0] step:2941/10000 train_time:272729ms step_avg:92.73ms +[2025-08-22 17:18:31] [Rank 0] step:2961/10000 train_time:274620ms step_avg:92.75ms +[2025-08-22 17:18:31] [Rank 0] step:2961/10000 train_time:274620ms step_avg:92.75ms +[2025-08-22 17:18:33] [Rank 0] step:2981/10000 train_time:276519ms step_avg:92.76ms +[2025-08-22 17:18:33] [Rank 0] step:2981/10000 train_time:276519ms step_avg:92.76ms +[2025-08-22 17:18:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:18:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:18:49] [Rank 0] PRINT: step:3000/10000 val_loss:3.8727 svd_entropy: attn_qk:H=0.7478,top10E=0.28,eRank=176.9,q75/q25=52.64 attn_vo:H=0.8339,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.1,q75/q25=2.85 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.97 vo_prod:H=0.6901,top10E=0.11,eRank=223.6,q75/q25=inf train_time:278423ms step_avg:92.81ms +[2025-08-22 17:18:49] [Rank 0] PRINT: step:3000/10000 val_loss:3.8727 svd_entropy: attn_qk:H=0.7478,top10E=0.28,eRank=176.9,q75/q25=52.64 attn_vo:H=0.8339,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.1,q75/q25=2.85 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.97 vo_prod:H=0.6901,top10E=0.11,eRank=223.6,q75/q25=inf train_time:278423ms step_avg:92.81ms +[2025-08-22 17:18:49] [Rank 0] step:3001/10000 train_time:278444ms step_avg:92.78ms +[2025-08-22 17:18:49] [Rank 0] step:3001/10000 train_time:278444ms step_avg:92.78ms +[2025-08-22 17:18:51] [Rank 0] step:3021/10000 train_time:280332ms step_avg:92.79ms +[2025-08-22 17:18:51] [Rank 0] step:3021/10000 train_time:280332ms step_avg:92.79ms +[2025-08-22 17:18:53] [Rank 0] step:3041/10000 train_time:282224ms step_avg:92.81ms +[2025-08-22 17:18:53] [Rank 0] step:3041/10000 train_time:282224ms step_avg:92.81ms +[2025-08-22 17:18:54] [Rank 0] step:3061/10000 train_time:284120ms step_avg:92.82ms +[2025-08-22 17:18:54] [Rank 0] step:3061/10000 train_time:284120ms step_avg:92.82ms +[2025-08-22 17:18:56] [Rank 0] step:3081/10000 train_time:286017ms step_avg:92.83ms +[2025-08-22 17:18:56] [Rank 0] step:3081/10000 train_time:286017ms step_avg:92.83ms +[2025-08-22 17:18:58] [Rank 0] step:3101/10000 train_time:287914ms step_avg:92.85ms +[2025-08-22 17:18:58] [Rank 0] step:3101/10000 train_time:287914ms step_avg:92.85ms +[2025-08-22 17:19:00] [Rank 0] step:3121/10000 train_time:289811ms step_avg:92.86ms +[2025-08-22 17:19:00] [Rank 0] step:3121/10000 train_time:289811ms step_avg:92.86ms +[2025-08-22 17:19:02] [Rank 0] step:3141/10000 train_time:291709ms step_avg:92.87ms +[2025-08-22 17:19:02] [Rank 0] step:3141/10000 train_time:291709ms step_avg:92.87ms +[2025-08-22 17:19:04] [Rank 0] step:3161/10000 train_time:293609ms step_avg:92.88ms +[2025-08-22 17:19:04] [Rank 0] step:3161/10000 train_time:293609ms step_avg:92.88ms +[2025-08-22 17:19:06] [Rank 0] step:3181/10000 train_time:295508ms step_avg:92.90ms +[2025-08-22 17:19:06] [Rank 0] step:3181/10000 train_time:295508ms step_avg:92.90ms +[2025-08-22 17:19:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:19:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:19:21] [Rank 0] PRINT: step:3200/10000 val_loss:3.8507 svd_entropy: attn_qk:H=0.7490,top10E=0.28,eRank=177.8,q75/q25=52.69 attn_vo:H=0.8338,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.3,q75/q25=2.85 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.96 vo_prod:H=0.6902,top10E=0.11,eRank=223.9,q75/q25=inf train_time:297413ms step_avg:92.94ms +[2025-08-22 17:19:21] [Rank 0] PRINT: step:3200/10000 val_loss:3.8507 svd_entropy: attn_qk:H=0.7490,top10E=0.28,eRank=177.8,q75/q25=52.69 attn_vo:H=0.8338,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.3,q75/q25=2.85 mlp_w2:H=0.9668,top10E=0.05,eRank=616.2,q75/q25=2.96 vo_prod:H=0.6902,top10E=0.11,eRank=223.9,q75/q25=inf train_time:297413ms step_avg:92.94ms +[2025-08-22 17:19:21] [Rank 0] step:3201/10000 train_time:297436ms step_avg:92.92ms +[2025-08-22 17:19:21] [Rank 0] step:3201/10000 train_time:297436ms step_avg:92.92ms +[2025-08-22 17:19:23] [Rank 0] step:3221/10000 train_time:299322ms step_avg:92.93ms +[2025-08-22 17:19:23] [Rank 0] step:3221/10000 train_time:299322ms step_avg:92.93ms +[2025-08-22 17:19:25] [Rank 0] step:3241/10000 train_time:301217ms step_avg:92.94ms +[2025-08-22 17:19:25] [Rank 0] step:3241/10000 train_time:301217ms step_avg:92.94ms +[2025-08-22 17:19:27] [Rank 0] step:3261/10000 train_time:303114ms step_avg:92.95ms +[2025-08-22 17:19:27] [Rank 0] step:3261/10000 train_time:303114ms step_avg:92.95ms +[2025-08-22 17:19:29] [Rank 0] step:3281/10000 train_time:305012ms step_avg:92.96ms +[2025-08-22 17:19:29] [Rank 0] step:3281/10000 train_time:305012ms step_avg:92.96ms +[2025-08-22 17:19:31] [Rank 0] step:3301/10000 train_time:306910ms step_avg:92.97ms +[2025-08-22 17:19:31] [Rank 0] step:3301/10000 train_time:306910ms step_avg:92.97ms +[2025-08-22 17:19:33] [Rank 0] step:3321/10000 train_time:308810ms step_avg:92.99ms +[2025-08-22 17:19:33] [Rank 0] step:3321/10000 train_time:308810ms step_avg:92.99ms +[2025-08-22 17:19:35] [Rank 0] step:3341/10000 train_time:310708ms step_avg:93.00ms +[2025-08-22 17:19:35] [Rank 0] step:3341/10000 train_time:310708ms step_avg:93.00ms +[2025-08-22 17:19:37] [Rank 0] step:3361/10000 train_time:312608ms step_avg:93.01ms +[2025-08-22 17:19:37] [Rank 0] step:3361/10000 train_time:312608ms step_avg:93.01ms +[2025-08-22 17:19:39] [Rank 0] step:3381/10000 train_time:314511ms step_avg:93.02ms +[2025-08-22 17:19:39] [Rank 0] step:3381/10000 train_time:314511ms step_avg:93.02ms +[2025-08-22 17:19:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:19:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:19:54] [Rank 0] PRINT: step:3400/10000 val_loss:3.8250 svd_entropy: attn_qk:H=0.7503,top10E=0.28,eRank=178.9,q75/q25=53.06 attn_vo:H=0.8339,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.5,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.3,q75/q25=2.96 vo_prod:H=0.6903,top10E=0.11,eRank=224.1,q75/q25=inf train_time:316414ms step_avg:93.06ms +[2025-08-22 17:19:54] [Rank 0] PRINT: step:3400/10000 val_loss:3.8250 svd_entropy: attn_qk:H=0.7503,top10E=0.28,eRank=178.9,q75/q25=53.06 attn_vo:H=0.8339,top10E=0.06,eRank=403.3,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.5,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.3,q75/q25=2.96 vo_prod:H=0.6903,top10E=0.11,eRank=224.1,q75/q25=inf train_time:316414ms step_avg:93.06ms +[2025-08-22 17:19:54] [Rank 0] step:3401/10000 train_time:316436ms step_avg:93.04ms +[2025-08-22 17:19:54] [Rank 0] step:3401/10000 train_time:316436ms step_avg:93.04ms +[2025-08-22 17:19:56] [Rank 0] step:3421/10000 train_time:318322ms step_avg:93.05ms +[2025-08-22 17:19:56] [Rank 0] step:3421/10000 train_time:318322ms step_avg:93.05ms +[2025-08-22 17:19:58] [Rank 0] step:3441/10000 train_time:320220ms step_avg:93.06ms +[2025-08-22 17:19:58] [Rank 0] step:3441/10000 train_time:320220ms step_avg:93.06ms +[2025-08-22 17:20:00] [Rank 0] step:3461/10000 train_time:322124ms step_avg:93.07ms +[2025-08-22 17:20:00] [Rank 0] step:3461/10000 train_time:322124ms step_avg:93.07ms +[2025-08-22 17:20:02] [Rank 0] step:3481/10000 train_time:324025ms step_avg:93.08ms +[2025-08-22 17:20:02] [Rank 0] step:3481/10000 train_time:324025ms step_avg:93.08ms +[2025-08-22 17:20:04] [Rank 0] step:3501/10000 train_time:325931ms step_avg:93.10ms +[2025-08-22 17:20:04] [Rank 0] step:3501/10000 train_time:325931ms step_avg:93.10ms +[2025-08-22 17:20:06] [Rank 0] step:3521/10000 train_time:327834ms step_avg:93.11ms +[2025-08-22 17:20:06] [Rank 0] step:3521/10000 train_time:327834ms step_avg:93.11ms +[2025-08-22 17:20:07] [Rank 0] step:3541/10000 train_time:329739ms step_avg:93.12ms +[2025-08-22 17:20:07] [Rank 0] step:3541/10000 train_time:329739ms step_avg:93.12ms +[2025-08-22 17:20:09] [Rank 0] step:3561/10000 train_time:331642ms step_avg:93.13ms +[2025-08-22 17:20:09] [Rank 0] step:3561/10000 train_time:331642ms step_avg:93.13ms +[2025-08-22 17:20:11] [Rank 0] step:3581/10000 train_time:333548ms step_avg:93.14ms +[2025-08-22 17:20:11] [Rank 0] step:3581/10000 train_time:333548ms step_avg:93.14ms +[2025-08-22 17:20:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:20:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:20:27] [Rank 0] PRINT: step:3600/10000 val_loss:3.8189 svd_entropy: attn_qk:H=0.7513,top10E=0.28,eRank=179.7,q75/q25=53.49 attn_vo:H=0.8339,top10E=0.06,eRank=403.4,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.7,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.4,q75/q25=2.95 vo_prod:H=0.6905,top10E=0.11,eRank=224.4,q75/q25=inf train_time:335459ms step_avg:93.18ms +[2025-08-22 17:20:27] [Rank 0] PRINT: step:3600/10000 val_loss:3.8189 svd_entropy: attn_qk:H=0.7513,top10E=0.28,eRank=179.7,q75/q25=53.49 attn_vo:H=0.8339,top10E=0.06,eRank=403.4,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.7,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.4,q75/q25=2.95 vo_prod:H=0.6905,top10E=0.11,eRank=224.4,q75/q25=inf train_time:335459ms step_avg:93.18ms +[2025-08-22 17:20:27] [Rank 0] step:3601/10000 train_time:335482ms step_avg:93.16ms +[2025-08-22 17:20:27] [Rank 0] step:3601/10000 train_time:335482ms step_avg:93.16ms +[2025-08-22 17:20:29] [Rank 0] step:3621/10000 train_time:337377ms step_avg:93.17ms +[2025-08-22 17:20:29] [Rank 0] step:3621/10000 train_time:337377ms step_avg:93.17ms +[2025-08-22 17:20:31] [Rank 0] step:3641/10000 train_time:339273ms step_avg:93.18ms +[2025-08-22 17:20:31] [Rank 0] step:3641/10000 train_time:339273ms step_avg:93.18ms +[2025-08-22 17:20:33] [Rank 0] step:3661/10000 train_time:341174ms step_avg:93.19ms +[2025-08-22 17:20:33] [Rank 0] step:3661/10000 train_time:341174ms step_avg:93.19ms +[2025-08-22 17:20:34] [Rank 0] step:3681/10000 train_time:343073ms step_avg:93.20ms +[2025-08-22 17:20:34] [Rank 0] step:3681/10000 train_time:343073ms step_avg:93.20ms +[2025-08-22 17:20:36] [Rank 0] step:3701/10000 train_time:344971ms step_avg:93.21ms +[2025-08-22 17:20:36] [Rank 0] step:3701/10000 train_time:344971ms step_avg:93.21ms +[2025-08-22 17:20:38] [Rank 0] step:3721/10000 train_time:346901ms step_avg:93.23ms +[2025-08-22 17:20:38] [Rank 0] step:3721/10000 train_time:346901ms step_avg:93.23ms +[2025-08-22 17:20:40] [Rank 0] step:3741/10000 train_time:348838ms step_avg:93.25ms +[2025-08-22 17:20:40] [Rank 0] step:3741/10000 train_time:348838ms step_avg:93.25ms +[2025-08-22 17:20:42] [Rank 0] step:3761/10000 train_time:350776ms step_avg:93.27ms +[2025-08-22 17:20:42] [Rank 0] step:3761/10000 train_time:350776ms step_avg:93.27ms +[2025-08-22 17:20:44] [Rank 0] step:3781/10000 train_time:352715ms step_avg:93.29ms +[2025-08-22 17:20:44] [Rank 0] step:3781/10000 train_time:352715ms step_avg:93.29ms +[2025-08-22 17:20:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:20:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:21:00] [Rank 0] PRINT: step:3800/10000 val_loss:3.7894 svd_entropy: attn_qk:H=0.7524,top10E=0.28,eRank=180.6,q75/q25=53.53 attn_vo:H=0.8339,top10E=0.06,eRank=403.5,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.8,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.4,q75/q25=2.95 vo_prod:H=0.6906,top10E=0.11,eRank=224.7,q75/q25=inf train_time:354656ms step_avg:93.33ms +[2025-08-22 17:21:00] [Rank 0] PRINT: step:3800/10000 val_loss:3.7894 svd_entropy: attn_qk:H=0.7524,top10E=0.28,eRank=180.6,q75/q25=53.53 attn_vo:H=0.8339,top10E=0.06,eRank=403.5,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.8,q75/q25=2.84 mlp_w2:H=0.9669,top10E=0.05,eRank=616.4,q75/q25=2.95 vo_prod:H=0.6906,top10E=0.11,eRank=224.7,q75/q25=inf train_time:354656ms step_avg:93.33ms +[2025-08-22 17:21:00] [Rank 0] step:3801/10000 train_time:354678ms step_avg:93.31ms +[2025-08-22 17:21:00] [Rank 0] step:3801/10000 train_time:354678ms step_avg:93.31ms +[2025-08-22 17:21:02] [Rank 0] step:3821/10000 train_time:356610ms step_avg:93.33ms +[2025-08-22 17:21:02] [Rank 0] step:3821/10000 train_time:356610ms step_avg:93.33ms +[2025-08-22 17:21:04] [Rank 0] step:3841/10000 train_time:358547ms step_avg:93.35ms +[2025-08-22 17:21:04] [Rank 0] step:3841/10000 train_time:358547ms step_avg:93.35ms +[2025-08-22 17:21:06] [Rank 0] step:3861/10000 train_time:360480ms step_avg:93.36ms +[2025-08-22 17:21:06] [Rank 0] step:3861/10000 train_time:360480ms step_avg:93.36ms +[2025-08-22 17:21:07] [Rank 0] step:3881/10000 train_time:362412ms step_avg:93.38ms +[2025-08-22 17:21:07] [Rank 0] step:3881/10000 train_time:362412ms step_avg:93.38ms +[2025-08-22 17:21:09] [Rank 0] step:3901/10000 train_time:364347ms step_avg:93.40ms +[2025-08-22 17:21:09] [Rank 0] step:3901/10000 train_time:364347ms step_avg:93.40ms +[2025-08-22 17:21:11] [Rank 0] step:3921/10000 train_time:366280ms step_avg:93.41ms +[2025-08-22 17:21:11] [Rank 0] step:3921/10000 train_time:366280ms step_avg:93.41ms +[2025-08-22 17:21:13] [Rank 0] step:3941/10000 train_time:368217ms step_avg:93.43ms +[2025-08-22 17:21:13] [Rank 0] step:3941/10000 train_time:368217ms step_avg:93.43ms +[2025-08-22 17:21:15] [Rank 0] step:3961/10000 train_time:370151ms step_avg:93.45ms +[2025-08-22 17:21:15] [Rank 0] step:3961/10000 train_time:370151ms step_avg:93.45ms +[2025-08-22 17:21:17] [Rank 0] step:3981/10000 train_time:372087ms step_avg:93.47ms +[2025-08-22 17:21:17] [Rank 0] step:3981/10000 train_time:372087ms step_avg:93.47ms +[2025-08-22 17:21:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:21:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:21:33] [Rank 0] PRINT: step:4000/10000 val_loss:3.7728 svd_entropy: attn_qk:H=0.7535,top10E=0.28,eRank=181.4,q75/q25=53.67 attn_vo:H=0.8339,top10E=0.06,eRank=403.6,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.0,q75/q25=2.83 mlp_w2:H=0.9669,top10E=0.05,eRank=616.5,q75/q25=2.95 vo_prod:H=0.6908,top10E=0.11,eRank=225.0,q75/q25=inf train_time:374083ms step_avg:93.52ms +[2025-08-22 17:21:33] [Rank 0] PRINT: step:4000/10000 val_loss:3.7728 svd_entropy: attn_qk:H=0.7535,top10E=0.28,eRank=181.4,q75/q25=53.67 attn_vo:H=0.8339,top10E=0.06,eRank=403.6,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.0,q75/q25=2.83 mlp_w2:H=0.9669,top10E=0.05,eRank=616.5,q75/q25=2.95 vo_prod:H=0.6908,top10E=0.11,eRank=225.0,q75/q25=inf train_time:374083ms step_avg:93.52ms +[2025-08-22 17:21:33] [Rank 0] step:4001/10000 train_time:374105ms step_avg:93.50ms +[2025-08-22 17:21:33] [Rank 0] step:4001/10000 train_time:374105ms step_avg:93.50ms +[2025-08-22 17:21:35] [Rank 0] step:4021/10000 train_time:376026ms step_avg:93.52ms +[2025-08-22 17:21:35] [Rank 0] step:4021/10000 train_time:376026ms step_avg:93.52ms +[2025-08-22 17:21:37] [Rank 0] step:4041/10000 train_time:377960ms step_avg:93.53ms +[2025-08-22 17:21:37] [Rank 0] step:4041/10000 train_time:377960ms step_avg:93.53ms +[2025-08-22 17:21:39] [Rank 0] step:4061/10000 train_time:379894ms step_avg:93.55ms +[2025-08-22 17:21:39] [Rank 0] step:4061/10000 train_time:379894ms step_avg:93.55ms +[2025-08-22 17:21:42] [Rank 0] step:4081/10000 train_time:382489ms step_avg:93.72ms +[2025-08-22 17:21:42] [Rank 0] step:4081/10000 train_time:382489ms step_avg:93.72ms +[2025-08-22 17:21:43] [Rank 0] step:4101/10000 train_time:384420ms step_avg:93.74ms +[2025-08-22 17:21:43] [Rank 0] step:4101/10000 train_time:384420ms step_avg:93.74ms +[2025-08-22 17:21:45] [Rank 0] step:4121/10000 train_time:386353ms step_avg:93.75ms +[2025-08-22 17:21:45] [Rank 0] step:4121/10000 train_time:386353ms step_avg:93.75ms +[2025-08-22 17:21:47] [Rank 0] step:4141/10000 train_time:388289ms step_avg:93.77ms +[2025-08-22 17:21:47] [Rank 0] step:4141/10000 train_time:388289ms step_avg:93.77ms +[2025-08-22 17:21:49] [Rank 0] step:4161/10000 train_time:390224ms step_avg:93.78ms +[2025-08-22 17:21:49] [Rank 0] step:4161/10000 train_time:390224ms step_avg:93.78ms +[2025-08-22 17:21:51] [Rank 0] step:4181/10000 train_time:392163ms step_avg:93.80ms +[2025-08-22 17:21:51] [Rank 0] step:4181/10000 train_time:392163ms step_avg:93.80ms +[2025-08-22 17:21:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:21:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:22:07] [Rank 0] PRINT: step:4200/10000 val_loss:3.7591 svd_entropy: attn_qk:H=0.7545,top10E=0.27,eRank=182.3,q75/q25=53.76 attn_vo:H=0.8340,top10E=0.06,eRank=403.6,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.2,q75/q25=2.83 mlp_w2:H=0.9669,top10E=0.05,eRank=616.7,q75/q25=2.94 vo_prod:H=0.6909,top10E=0.11,eRank=225.3,q75/q25=inf train_time:394103ms step_avg:93.83ms +[2025-08-22 17:22:07] [Rank 0] PRINT: step:4200/10000 val_loss:3.7591 svd_entropy: attn_qk:H=0.7545,top10E=0.27,eRank=182.3,q75/q25=53.76 attn_vo:H=0.8340,top10E=0.06,eRank=403.6,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.2,q75/q25=2.83 mlp_w2:H=0.9669,top10E=0.05,eRank=616.7,q75/q25=2.94 vo_prod:H=0.6909,top10E=0.11,eRank=225.3,q75/q25=inf train_time:394103ms step_avg:93.83ms +[2025-08-22 17:22:07] [Rank 0] step:4201/10000 train_time:394126ms step_avg:93.82ms +[2025-08-22 17:22:07] [Rank 0] step:4201/10000 train_time:394126ms step_avg:93.82ms +[2025-08-22 17:22:09] [Rank 0] step:4221/10000 train_time:396065ms step_avg:93.83ms +[2025-08-22 17:22:09] [Rank 0] step:4221/10000 train_time:396065ms step_avg:93.83ms +[2025-08-22 17:22:11] [Rank 0] step:4241/10000 train_time:398002ms step_avg:93.85ms +[2025-08-22 17:22:11] [Rank 0] step:4241/10000 train_time:398002ms step_avg:93.85ms +[2025-08-22 17:22:13] [Rank 0] step:4261/10000 train_time:399935ms step_avg:93.86ms +[2025-08-22 17:22:13] [Rank 0] step:4261/10000 train_time:399935ms step_avg:93.86ms +[2025-08-22 17:22:15] [Rank 0] step:4281/10000 train_time:401876ms step_avg:93.87ms +[2025-08-22 17:22:15] [Rank 0] step:4281/10000 train_time:401876ms step_avg:93.87ms +[2025-08-22 17:22:17] [Rank 0] step:4301/10000 train_time:403814ms step_avg:93.89ms +[2025-08-22 17:22:17] [Rank 0] step:4301/10000 train_time:403814ms step_avg:93.89ms +[2025-08-22 17:22:19] [Rank 0] step:4321/10000 train_time:405752ms step_avg:93.90ms +[2025-08-22 17:22:19] [Rank 0] step:4321/10000 train_time:405752ms step_avg:93.90ms +[2025-08-22 17:22:21] [Rank 0] step:4341/10000 train_time:407690ms step_avg:93.92ms +[2025-08-22 17:22:21] [Rank 0] step:4341/10000 train_time:407690ms step_avg:93.92ms +[2025-08-22 17:22:23] [Rank 0] step:4361/10000 train_time:409690ms step_avg:93.94ms +[2025-08-22 17:22:23] [Rank 0] step:4361/10000 train_time:409690ms step_avg:93.94ms +[2025-08-22 17:22:25] [Rank 0] step:4381/10000 train_time:411752ms step_avg:93.99ms +[2025-08-22 17:22:25] [Rank 0] step:4381/10000 train_time:411752ms step_avg:93.99ms +[2025-08-22 17:22:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:22:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:22:40] [Rank 0] PRINT: step:4400/10000 val_loss:3.7463 svd_entropy: attn_qk:H=0.7554,top10E=0.27,eRank=183.0,q75/q25=54.15 attn_vo:H=0.8340,top10E=0.06,eRank=403.7,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=616.8,q75/q25=2.94 vo_prod:H=0.6910,top10E=0.11,eRank=225.5,q75/q25=inf train_time:413696ms step_avg:94.02ms +[2025-08-22 17:22:40] [Rank 0] PRINT: step:4400/10000 val_loss:3.7463 svd_entropy: attn_qk:H=0.7554,top10E=0.27,eRank=183.0,q75/q25=54.15 attn_vo:H=0.8340,top10E=0.06,eRank=403.7,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.4,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=616.8,q75/q25=2.94 vo_prod:H=0.6910,top10E=0.11,eRank=225.5,q75/q25=inf train_time:413696ms step_avg:94.02ms +[2025-08-22 17:22:41] [Rank 0] step:4401/10000 train_time:413718ms step_avg:94.01ms +[2025-08-22 17:22:41] [Rank 0] step:4401/10000 train_time:413718ms step_avg:94.01ms +[2025-08-22 17:22:43] [Rank 0] step:4421/10000 train_time:415654ms step_avg:94.02ms +[2025-08-22 17:22:43] [Rank 0] step:4421/10000 train_time:415654ms step_avg:94.02ms +[2025-08-22 17:22:44] [Rank 0] step:4441/10000 train_time:417584ms step_avg:94.03ms +[2025-08-22 17:22:44] [Rank 0] step:4441/10000 train_time:417584ms step_avg:94.03ms +[2025-08-22 17:22:46] [Rank 0] step:4461/10000 train_time:419522ms step_avg:94.04ms +[2025-08-22 17:22:46] [Rank 0] step:4461/10000 train_time:419522ms step_avg:94.04ms +[2025-08-22 17:22:48] [Rank 0] step:4481/10000 train_time:421462ms step_avg:94.06ms +[2025-08-22 17:22:48] [Rank 0] step:4481/10000 train_time:421462ms step_avg:94.06ms +[2025-08-22 17:22:50] [Rank 0] step:4501/10000 train_time:423400ms step_avg:94.07ms +[2025-08-22 17:22:50] [Rank 0] step:4501/10000 train_time:423400ms step_avg:94.07ms +[2025-08-22 17:22:52] [Rank 0] step:4521/10000 train_time:425339ms step_avg:94.08ms +[2025-08-22 17:22:52] [Rank 0] step:4521/10000 train_time:425339ms step_avg:94.08ms +[2025-08-22 17:22:54] [Rank 0] step:4541/10000 train_time:427280ms step_avg:94.09ms +[2025-08-22 17:22:54] [Rank 0] step:4541/10000 train_time:427280ms step_avg:94.09ms +[2025-08-22 17:22:56] [Rank 0] step:4561/10000 train_time:429221ms step_avg:94.11ms +[2025-08-22 17:22:56] [Rank 0] step:4561/10000 train_time:429221ms step_avg:94.11ms +[2025-08-22 17:22:58] [Rank 0] step:4581/10000 train_time:431164ms step_avg:94.12ms +[2025-08-22 17:22:58] [Rank 0] step:4581/10000 train_time:431164ms step_avg:94.12ms +[2025-08-22 17:23:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:23:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:23:14] [Rank 0] PRINT: step:4600/10000 val_loss:3.7306 svd_entropy: attn_qk:H=0.7563,top10E=0.27,eRank=183.8,q75/q25=54.09 attn_vo:H=0.8340,top10E=0.06,eRank=403.8,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.5,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=616.9,q75/q25=2.93 vo_prod:H=0.6911,top10E=0.11,eRank=225.6,q75/q25=inf train_time:433111ms step_avg:94.15ms +[2025-08-22 17:23:14] [Rank 0] PRINT: step:4600/10000 val_loss:3.7306 svd_entropy: attn_qk:H=0.7563,top10E=0.27,eRank=183.8,q75/q25=54.09 attn_vo:H=0.8340,top10E=0.06,eRank=403.8,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.5,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=616.9,q75/q25=2.93 vo_prod:H=0.6911,top10E=0.11,eRank=225.6,q75/q25=inf train_time:433111ms step_avg:94.15ms +[2025-08-22 17:23:14] [Rank 0] step:4601/10000 train_time:433134ms step_avg:94.14ms +[2025-08-22 17:23:14] [Rank 0] step:4601/10000 train_time:433134ms step_avg:94.14ms +[2025-08-22 17:23:16] [Rank 0] step:4621/10000 train_time:435066ms step_avg:94.15ms +[2025-08-22 17:23:16] [Rank 0] step:4621/10000 train_time:435066ms step_avg:94.15ms +[2025-08-22 17:23:18] [Rank 0] step:4641/10000 train_time:437004ms step_avg:94.16ms +[2025-08-22 17:23:18] [Rank 0] step:4641/10000 train_time:437004ms step_avg:94.16ms +[2025-08-22 17:23:20] [Rank 0] step:4661/10000 train_time:438942ms step_avg:94.17ms +[2025-08-22 17:23:20] [Rank 0] step:4661/10000 train_time:438942ms step_avg:94.17ms +[2025-08-22 17:23:22] [Rank 0] step:4681/10000 train_time:440881ms step_avg:94.19ms +[2025-08-22 17:23:22] [Rank 0] step:4681/10000 train_time:440881ms step_avg:94.19ms +[2025-08-22 17:23:24] [Rank 0] step:4701/10000 train_time:442821ms step_avg:94.20ms +[2025-08-22 17:23:24] [Rank 0] step:4701/10000 train_time:442821ms step_avg:94.20ms +[2025-08-22 17:23:26] [Rank 0] step:4721/10000 train_time:444815ms step_avg:94.22ms +[2025-08-22 17:23:26] [Rank 0] step:4721/10000 train_time:444815ms step_avg:94.22ms +[2025-08-22 17:23:28] [Rank 0] step:4741/10000 train_time:446819ms step_avg:94.25ms +[2025-08-22 17:23:28] [Rank 0] step:4741/10000 train_time:446819ms step_avg:94.25ms +[2025-08-22 17:23:30] [Rank 0] step:4761/10000 train_time:448760ms step_avg:94.26ms +[2025-08-22 17:23:30] [Rank 0] step:4761/10000 train_time:448760ms step_avg:94.26ms +[2025-08-22 17:23:32] [Rank 0] step:4781/10000 train_time:450700ms step_avg:94.27ms +[2025-08-22 17:23:32] [Rank 0] step:4781/10000 train_time:450700ms step_avg:94.27ms +[2025-08-22 17:23:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:23:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:23:47] [Rank 0] PRINT: step:4800/10000 val_loss:3.7226 svd_entropy: attn_qk:H=0.7572,top10E=0.27,eRank=184.5,q75/q25=54.57 attn_vo:H=0.8340,top10E=0.06,eRank=403.9,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.6,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.92 vo_prod:H=0.6912,top10E=0.11,eRank=225.9,q75/q25=inf train_time:452644ms step_avg:94.30ms +[2025-08-22 17:23:47] [Rank 0] PRINT: step:4800/10000 val_loss:3.7226 svd_entropy: attn_qk:H=0.7572,top10E=0.27,eRank=184.5,q75/q25=54.57 attn_vo:H=0.8340,top10E=0.06,eRank=403.9,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.6,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.92 vo_prod:H=0.6912,top10E=0.11,eRank=225.9,q75/q25=inf train_time:452644ms step_avg:94.30ms +[2025-08-22 17:23:47] [Rank 0] step:4801/10000 train_time:452666ms step_avg:94.29ms +[2025-08-22 17:23:47] [Rank 0] step:4801/10000 train_time:452666ms step_avg:94.29ms +[2025-08-22 17:23:49] [Rank 0] step:4821/10000 train_time:454596ms step_avg:94.29ms +[2025-08-22 17:23:49] [Rank 0] step:4821/10000 train_time:454596ms step_avg:94.29ms +[2025-08-22 17:23:51] [Rank 0] step:4841/10000 train_time:456533ms step_avg:94.31ms +[2025-08-22 17:23:51] [Rank 0] step:4841/10000 train_time:456533ms step_avg:94.31ms +[2025-08-22 17:23:53] [Rank 0] step:4861/10000 train_time:458472ms step_avg:94.32ms +[2025-08-22 17:23:53] [Rank 0] step:4861/10000 train_time:458472ms step_avg:94.32ms +[2025-08-22 17:23:55] [Rank 0] step:4881/10000 train_time:460408ms step_avg:94.33ms +[2025-08-22 17:23:55] [Rank 0] step:4881/10000 train_time:460408ms step_avg:94.33ms +[2025-08-22 17:23:57] [Rank 0] step:4901/10000 train_time:462346ms step_avg:94.34ms +[2025-08-22 17:23:57] [Rank 0] step:4901/10000 train_time:462346ms step_avg:94.34ms +[2025-08-22 17:23:59] [Rank 0] step:4921/10000 train_time:464287ms step_avg:94.35ms +[2025-08-22 17:23:59] [Rank 0] step:4921/10000 train_time:464287ms step_avg:94.35ms +[2025-08-22 17:24:01] [Rank 0] step:4941/10000 train_time:466232ms step_avg:94.36ms +[2025-08-22 17:24:01] [Rank 0] step:4941/10000 train_time:466232ms step_avg:94.36ms +[2025-08-22 17:24:03] [Rank 0] step:4961/10000 train_time:468175ms step_avg:94.37ms +[2025-08-22 17:24:03] [Rank 0] step:4961/10000 train_time:468175ms step_avg:94.37ms +[2025-08-22 17:24:05] [Rank 0] step:4981/10000 train_time:470120ms step_avg:94.38ms +[2025-08-22 17:24:05] [Rank 0] step:4981/10000 train_time:470120ms step_avg:94.38ms +[2025-08-22 17:24:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:24:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:24:21] [Rank 0] PRINT: step:5000/10000 val_loss:3.7103 svd_entropy: attn_qk:H=0.7580,top10E=0.27,eRank=185.2,q75/q25=54.63 attn_vo:H=0.8340,top10E=0.06,eRank=403.9,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.7,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.93 vo_prod:H=0.6913,top10E=0.11,eRank=226.1,q75/q25=inf train_time:472069ms step_avg:94.41ms +[2025-08-22 17:24:21] [Rank 0] PRINT: step:5000/10000 val_loss:3.7103 svd_entropy: attn_qk:H=0.7580,top10E=0.27,eRank=185.2,q75/q25=54.63 attn_vo:H=0.8340,top10E=0.06,eRank=403.9,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.7,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.93 vo_prod:H=0.6913,top10E=0.11,eRank=226.1,q75/q25=inf train_time:472069ms step_avg:94.41ms +[2025-08-22 17:24:21] [Rank 0] step:5001/10000 train_time:472092ms step_avg:94.40ms +[2025-08-22 17:24:21] [Rank 0] step:5001/10000 train_time:472092ms step_avg:94.40ms +[2025-08-22 17:24:23] [Rank 0] step:5021/10000 train_time:474043ms step_avg:94.41ms +[2025-08-22 17:24:23] [Rank 0] step:5021/10000 train_time:474043ms step_avg:94.41ms +[2025-08-22 17:24:25] [Rank 0] step:5041/10000 train_time:475988ms step_avg:94.42ms +[2025-08-22 17:24:25] [Rank 0] step:5041/10000 train_time:475988ms step_avg:94.42ms +[2025-08-22 17:24:26] [Rank 0] step:5061/10000 train_time:477932ms step_avg:94.43ms +[2025-08-22 17:24:26] [Rank 0] step:5061/10000 train_time:477932ms step_avg:94.43ms +[2025-08-22 17:24:28] [Rank 0] step:5081/10000 train_time:479934ms step_avg:94.46ms +[2025-08-22 17:24:28] [Rank 0] step:5081/10000 train_time:479934ms step_avg:94.46ms +[2025-08-22 17:24:31] [Rank 0] step:5101/10000 train_time:481971ms step_avg:94.49ms +[2025-08-22 17:24:31] [Rank 0] step:5101/10000 train_time:481971ms step_avg:94.49ms +[2025-08-22 17:24:32] [Rank 0] step:5121/10000 train_time:483918ms step_avg:94.50ms +[2025-08-22 17:24:32] [Rank 0] step:5121/10000 train_time:483918ms step_avg:94.50ms +[2025-08-22 17:24:34] [Rank 0] step:5141/10000 train_time:485867ms step_avg:94.51ms +[2025-08-22 17:24:34] [Rank 0] step:5141/10000 train_time:485867ms step_avg:94.51ms +[2025-08-22 17:24:36] [Rank 0] step:5161/10000 train_time:487813ms step_avg:94.52ms +[2025-08-22 17:24:36] [Rank 0] step:5161/10000 train_time:487813ms step_avg:94.52ms +[2025-08-22 17:24:38] [Rank 0] step:5181/10000 train_time:489764ms step_avg:94.53ms +[2025-08-22 17:24:38] [Rank 0] step:5181/10000 train_time:489764ms step_avg:94.53ms +[2025-08-22 17:24:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:24:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:24:54] [Rank 0] PRINT: step:5200/10000 val_loss:3.6986 svd_entropy: attn_qk:H=0.7588,top10E=0.27,eRank=185.9,q75/q25=54.51 attn_vo:H=0.8340,top10E=0.06,eRank=403.9,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.8,q75/q25=2.82 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.93 vo_prod:H=0.6914,top10E=0.11,eRank=226.2,q75/q25=inf train_time:491739ms step_avg:94.57ms +[2025-08-22 17:24:54] [Rank 0] PRINT: step:5200/10000 val_loss:3.6986 svd_entropy: attn_qk:H=0.7588,top10E=0.27,eRank=185.9,q75/q25=54.51 attn_vo:H=0.8340,top10E=0.06,eRank=403.9,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.8,q75/q25=2.82 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.93 vo_prod:H=0.6914,top10E=0.11,eRank=226.2,q75/q25=inf train_time:491739ms step_avg:94.57ms +[2025-08-22 17:24:54] [Rank 0] step:5201/10000 train_time:491762ms step_avg:94.55ms +[2025-08-22 17:24:54] [Rank 0] step:5201/10000 train_time:491762ms step_avg:94.55ms +[2025-08-22 17:24:56] [Rank 0] step:5221/10000 train_time:493738ms step_avg:94.57ms +[2025-08-22 17:24:56] [Rank 0] step:5221/10000 train_time:493738ms step_avg:94.57ms +[2025-08-22 17:24:58] [Rank 0] step:5241/10000 train_time:495709ms step_avg:94.58ms +[2025-08-22 17:24:58] [Rank 0] step:5241/10000 train_time:495709ms step_avg:94.58ms +[2025-08-22 17:25:00] [Rank 0] step:5261/10000 train_time:497681ms step_avg:94.60ms +[2025-08-22 17:25:00] [Rank 0] step:5261/10000 train_time:497681ms step_avg:94.60ms +[2025-08-22 17:25:02] [Rank 0] step:5281/10000 train_time:499652ms step_avg:94.61ms +[2025-08-22 17:25:02] [Rank 0] step:5281/10000 train_time:499652ms step_avg:94.61ms +[2025-08-22 17:25:04] [Rank 0] step:5301/10000 train_time:501634ms step_avg:94.63ms +[2025-08-22 17:25:04] [Rank 0] step:5301/10000 train_time:501634ms step_avg:94.63ms +[2025-08-22 17:25:06] [Rank 0] step:5321/10000 train_time:503606ms step_avg:94.64ms +[2025-08-22 17:25:06] [Rank 0] step:5321/10000 train_time:503606ms step_avg:94.64ms +[2025-08-22 17:25:08] [Rank 0] step:5341/10000 train_time:505578ms step_avg:94.66ms +[2025-08-22 17:25:08] [Rank 0] step:5341/10000 train_time:505578ms step_avg:94.66ms +[2025-08-22 17:25:10] [Rank 0] step:5361/10000 train_time:507551ms step_avg:94.67ms +[2025-08-22 17:25:10] [Rank 0] step:5361/10000 train_time:507551ms step_avg:94.67ms +[2025-08-22 17:25:12] [Rank 0] step:5381/10000 train_time:509532ms step_avg:94.69ms +[2025-08-22 17:25:12] [Rank 0] step:5381/10000 train_time:509532ms step_avg:94.69ms +[2025-08-22 17:25:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:25:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:25:27] [Rank 0] PRINT: step:5400/10000 val_loss:3.6880 svd_entropy: attn_qk:H=0.7597,top10E=0.27,eRank=186.7,q75/q25=54.89 attn_vo:H=0.8341,top10E=0.06,eRank=404.0,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.92 vo_prod:H=0.6915,top10E=0.11,eRank=226.4,q75/q25=inf train_time:511506ms step_avg:94.72ms +[2025-08-22 17:25:27] [Rank 0] PRINT: step:5400/10000 val_loss:3.6880 svd_entropy: attn_qk:H=0.7597,top10E=0.27,eRank=186.7,q75/q25=54.89 attn_vo:H=0.8341,top10E=0.06,eRank=404.0,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.9,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.92 vo_prod:H=0.6915,top10E=0.11,eRank=226.4,q75/q25=inf train_time:511506ms step_avg:94.72ms +[2025-08-22 17:25:27] [Rank 0] step:5401/10000 train_time:511527ms step_avg:94.71ms +[2025-08-22 17:25:27] [Rank 0] step:5401/10000 train_time:511527ms step_avg:94.71ms +[2025-08-22 17:25:29] [Rank 0] step:5421/10000 train_time:513491ms step_avg:94.72ms +[2025-08-22 17:25:29] [Rank 0] step:5421/10000 train_time:513491ms step_avg:94.72ms +[2025-08-22 17:25:31] [Rank 0] step:5441/10000 train_time:515525ms step_avg:94.75ms +[2025-08-22 17:25:31] [Rank 0] step:5441/10000 train_time:515525ms step_avg:94.75ms +[2025-08-22 17:25:33] [Rank 0] step:5461/10000 train_time:517563ms step_avg:94.77ms +[2025-08-22 17:25:33] [Rank 0] step:5461/10000 train_time:517563ms step_avg:94.77ms +[2025-08-22 17:25:35] [Rank 0] step:5481/10000 train_time:519532ms step_avg:94.79ms +[2025-08-22 17:25:35] [Rank 0] step:5481/10000 train_time:519532ms step_avg:94.79ms +[2025-08-22 17:25:37] [Rank 0] step:5501/10000 train_time:521513ms step_avg:94.80ms +[2025-08-22 17:25:37] [Rank 0] step:5501/10000 train_time:521513ms step_avg:94.80ms +[2025-08-22 17:25:39] [Rank 0] step:5521/10000 train_time:523491ms step_avg:94.82ms +[2025-08-22 17:25:39] [Rank 0] step:5521/10000 train_time:523491ms step_avg:94.82ms +[2025-08-22 17:25:41] [Rank 0] step:5541/10000 train_time:525467ms step_avg:94.83ms +[2025-08-22 17:25:41] [Rank 0] step:5541/10000 train_time:525467ms step_avg:94.83ms +[2025-08-22 17:25:43] [Rank 0] step:5561/10000 train_time:527440ms step_avg:94.85ms +[2025-08-22 17:25:43] [Rank 0] step:5561/10000 train_time:527440ms step_avg:94.85ms +[2025-08-22 17:25:45] [Rank 0] step:5581/10000 train_time:529415ms step_avg:94.86ms +[2025-08-22 17:25:45] [Rank 0] step:5581/10000 train_time:529415ms step_avg:94.86ms +[2025-08-22 17:25:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:25:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:26:01] [Rank 0] PRINT: step:5600/10000 val_loss:3.6805 svd_entropy: attn_qk:H=0.7604,top10E=0.27,eRank=187.3,q75/q25=54.87 attn_vo:H=0.8341,top10E=0.06,eRank=404.0,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=637.1,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.91 vo_prod:H=0.6916,top10E=0.11,eRank=226.7,q75/q25=inf train_time:531395ms step_avg:94.89ms +[2025-08-22 17:26:01] [Rank 0] PRINT: step:5600/10000 val_loss:3.6805 svd_entropy: attn_qk:H=0.7604,top10E=0.27,eRank=187.3,q75/q25=54.87 attn_vo:H=0.8341,top10E=0.06,eRank=404.0,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=637.1,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.91 vo_prod:H=0.6916,top10E=0.11,eRank=226.7,q75/q25=inf train_time:531395ms step_avg:94.89ms +[2025-08-22 17:26:01] [Rank 0] step:5601/10000 train_time:531417ms step_avg:94.88ms +[2025-08-22 17:26:01] [Rank 0] step:5601/10000 train_time:531417ms step_avg:94.88ms +[2025-08-22 17:26:03] [Rank 0] step:5621/10000 train_time:533376ms step_avg:94.89ms +[2025-08-22 17:26:03] [Rank 0] step:5621/10000 train_time:533376ms step_avg:94.89ms +[2025-08-22 17:26:05] [Rank 0] step:5641/10000 train_time:535347ms step_avg:94.90ms +[2025-08-22 17:26:05] [Rank 0] step:5641/10000 train_time:535347ms step_avg:94.90ms +[2025-08-22 17:26:07] [Rank 0] step:5661/10000 train_time:537317ms step_avg:94.92ms +[2025-08-22 17:26:07] [Rank 0] step:5661/10000 train_time:537317ms step_avg:94.92ms +[2025-08-22 17:26:09] [Rank 0] step:5681/10000 train_time:539292ms step_avg:94.93ms +[2025-08-22 17:26:09] [Rank 0] step:5681/10000 train_time:539292ms step_avg:94.93ms +[2025-08-22 17:26:11] [Rank 0] step:5701/10000 train_time:541268ms step_avg:94.94ms +[2025-08-22 17:26:11] [Rank 0] step:5701/10000 train_time:541268ms step_avg:94.94ms +[2025-08-22 17:26:13] [Rank 0] step:5721/10000 train_time:543248ms step_avg:94.96ms +[2025-08-22 17:26:13] [Rank 0] step:5721/10000 train_time:543248ms step_avg:94.96ms +[2025-08-22 17:26:15] [Rank 0] step:5741/10000 train_time:545220ms step_avg:94.97ms +[2025-08-22 17:26:15] [Rank 0] step:5741/10000 train_time:545220ms step_avg:94.97ms +[2025-08-22 17:26:17] [Rank 0] step:5761/10000 train_time:547200ms step_avg:94.98ms +[2025-08-22 17:26:17] [Rank 0] step:5761/10000 train_time:547200ms step_avg:94.98ms +[2025-08-22 17:26:19] [Rank 0] step:5781/10000 train_time:549177ms step_avg:95.00ms +[2025-08-22 17:26:19] [Rank 0] step:5781/10000 train_time:549177ms step_avg:95.00ms +[2025-08-22 17:26:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:26:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:26:34] [Rank 0] PRINT: step:5800/10000 val_loss:3.6789 svd_entropy: attn_qk:H=0.7612,top10E=0.26,eRank=188.0,q75/q25=54.84 attn_vo:H=0.8341,top10E=0.06,eRank=404.1,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.2,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=2.91 vo_prod:H=0.6918,top10E=0.11,eRank=226.9,q75/q25=inf train_time:551161ms step_avg:95.03ms +[2025-08-22 17:26:34] [Rank 0] PRINT: step:5800/10000 val_loss:3.6789 svd_entropy: attn_qk:H=0.7612,top10E=0.26,eRank=188.0,q75/q25=54.84 attn_vo:H=0.8341,top10E=0.06,eRank=404.1,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.2,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=2.91 vo_prod:H=0.6918,top10E=0.11,eRank=226.9,q75/q25=inf train_time:551161ms step_avg:95.03ms +[2025-08-22 17:26:34] [Rank 0] step:5801/10000 train_time:551182ms step_avg:95.02ms +[2025-08-22 17:26:34] [Rank 0] step:5801/10000 train_time:551182ms step_avg:95.02ms +[2025-08-22 17:26:36] [Rank 0] step:5821/10000 train_time:553206ms step_avg:95.04ms +[2025-08-22 17:26:36] [Rank 0] step:5821/10000 train_time:553206ms step_avg:95.04ms +[2025-08-22 17:26:38] [Rank 0] step:5841/10000 train_time:555179ms step_avg:95.05ms +[2025-08-22 17:26:38] [Rank 0] step:5841/10000 train_time:555179ms step_avg:95.05ms +[2025-08-22 17:26:40] [Rank 0] step:5861/10000 train_time:557162ms step_avg:95.06ms +[2025-08-22 17:26:40] [Rank 0] step:5861/10000 train_time:557162ms step_avg:95.06ms +[2025-08-22 17:26:42] [Rank 0] step:5881/10000 train_time:559139ms step_avg:95.08ms +[2025-08-22 17:26:42] [Rank 0] step:5881/10000 train_time:559139ms step_avg:95.08ms +[2025-08-22 17:26:44] [Rank 0] step:5901/10000 train_time:561116ms step_avg:95.09ms +[2025-08-22 17:26:44] [Rank 0] step:5901/10000 train_time:561116ms step_avg:95.09ms +[2025-08-22 17:26:46] [Rank 0] step:5921/10000 train_time:563095ms step_avg:95.10ms +[2025-08-22 17:26:46] [Rank 0] step:5921/10000 train_time:563095ms step_avg:95.10ms +[2025-08-22 17:26:48] [Rank 0] step:5941/10000 train_time:565080ms step_avg:95.12ms +[2025-08-22 17:26:48] [Rank 0] step:5941/10000 train_time:565080ms step_avg:95.12ms +[2025-08-22 17:26:50] [Rank 0] step:5961/10000 train_time:567061ms step_avg:95.13ms +[2025-08-22 17:26:50] [Rank 0] step:5961/10000 train_time:567061ms step_avg:95.13ms +[2025-08-22 17:26:52] [Rank 0] step:5981/10000 train_time:569042ms step_avg:95.14ms +[2025-08-22 17:26:52] [Rank 0] step:5981/10000 train_time:569042ms step_avg:95.14ms +[2025-08-22 17:26:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:26:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:27:08] [Rank 0] PRINT: step:6000/10000 val_loss:3.6585 svd_entropy: attn_qk:H=0.7621,top10E=0.26,eRank=188.7,q75/q25=55.16 attn_vo:H=0.8341,top10E=0.06,eRank=404.2,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.7,q75/q25=2.91 vo_prod:H=0.6919,top10E=0.11,eRank=227.2,q75/q25=inf train_time:571023ms step_avg:95.17ms +[2025-08-22 17:27:08] [Rank 0] PRINT: step:6000/10000 val_loss:3.6585 svd_entropy: attn_qk:H=0.7621,top10E=0.26,eRank=188.7,q75/q25=55.16 attn_vo:H=0.8341,top10E=0.06,eRank=404.2,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.7,q75/q25=2.91 vo_prod:H=0.6919,top10E=0.11,eRank=227.2,q75/q25=inf train_time:571023ms step_avg:95.17ms +[2025-08-22 17:27:08] [Rank 0] step:6001/10000 train_time:571046ms step_avg:95.16ms +[2025-08-22 17:27:08] [Rank 0] step:6001/10000 train_time:571046ms step_avg:95.16ms +[2025-08-22 17:27:10] [Rank 0] step:6021/10000 train_time:573022ms step_avg:95.17ms +[2025-08-22 17:27:10] [Rank 0] step:6021/10000 train_time:573022ms step_avg:95.17ms +[2025-08-22 17:27:12] [Rank 0] step:6041/10000 train_time:574998ms step_avg:95.18ms +[2025-08-22 17:27:12] [Rank 0] step:6041/10000 train_time:574998ms step_avg:95.18ms +[2025-08-22 17:27:14] [Rank 0] step:6061/10000 train_time:576979ms step_avg:95.20ms +[2025-08-22 17:27:14] [Rank 0] step:6061/10000 train_time:576979ms step_avg:95.20ms +[2025-08-22 17:27:16] [Rank 0] step:6081/10000 train_time:578957ms step_avg:95.21ms +[2025-08-22 17:27:16] [Rank 0] step:6081/10000 train_time:578957ms step_avg:95.21ms +[2025-08-22 17:27:18] [Rank 0] step:6101/10000 train_time:580941ms step_avg:95.22ms +[2025-08-22 17:27:18] [Rank 0] step:6101/10000 train_time:580941ms step_avg:95.22ms +[2025-08-22 17:27:20] [Rank 0] step:6121/10000 train_time:582984ms step_avg:95.24ms +[2025-08-22 17:27:20] [Rank 0] step:6121/10000 train_time:582984ms step_avg:95.24ms +[2025-08-22 17:27:22] [Rank 0] step:6141/10000 train_time:584973ms step_avg:95.26ms +[2025-08-22 17:27:22] [Rank 0] step:6141/10000 train_time:584973ms step_avg:95.26ms +[2025-08-22 17:27:24] [Rank 0] step:6161/10000 train_time:586951ms step_avg:95.27ms +[2025-08-22 17:27:24] [Rank 0] step:6161/10000 train_time:586951ms step_avg:95.27ms +[2025-08-22 17:27:26] [Rank 0] step:6181/10000 train_time:588931ms step_avg:95.28ms +[2025-08-22 17:27:26] [Rank 0] step:6181/10000 train_time:588931ms step_avg:95.28ms +[2025-08-22 17:27:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:27:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:27:41] [Rank 0] PRINT: step:6200/10000 val_loss:3.6453 svd_entropy: attn_qk:H=0.7627,top10E=0.26,eRank=189.2,q75/q25=55.13 attn_vo:H=0.8341,top10E=0.06,eRank=404.2,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.91 vo_prod:H=0.6920,top10E=0.11,eRank=227.4,q75/q25=inf train_time:590914ms step_avg:95.31ms +[2025-08-22 17:27:41] [Rank 0] PRINT: step:6200/10000 val_loss:3.6453 svd_entropy: attn_qk:H=0.7627,top10E=0.26,eRank=189.2,q75/q25=55.13 attn_vo:H=0.8341,top10E=0.06,eRank=404.2,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.91 vo_prod:H=0.6920,top10E=0.11,eRank=227.4,q75/q25=inf train_time:590914ms step_avg:95.31ms +[2025-08-22 17:27:41] [Rank 0] step:6201/10000 train_time:590937ms step_avg:95.30ms +[2025-08-22 17:27:41] [Rank 0] step:6201/10000 train_time:590937ms step_avg:95.30ms +[2025-08-22 17:27:43] [Rank 0] step:6221/10000 train_time:592911ms step_avg:95.31ms +[2025-08-22 17:27:43] [Rank 0] step:6221/10000 train_time:592911ms step_avg:95.31ms +[2025-08-22 17:27:45] [Rank 0] step:6241/10000 train_time:594881ms step_avg:95.32ms +[2025-08-22 17:27:45] [Rank 0] step:6241/10000 train_time:594881ms step_avg:95.32ms +[2025-08-22 17:27:47] [Rank 0] step:6261/10000 train_time:596858ms step_avg:95.33ms +[2025-08-22 17:27:47] [Rank 0] step:6261/10000 train_time:596858ms step_avg:95.33ms +[2025-08-22 17:27:49] [Rank 0] step:6281/10000 train_time:598834ms step_avg:95.34ms +[2025-08-22 17:27:49] [Rank 0] step:6281/10000 train_time:598834ms step_avg:95.34ms +[2025-08-22 17:27:51] [Rank 0] step:6301/10000 train_time:600811ms step_avg:95.35ms +[2025-08-22 17:27:51] [Rank 0] step:6301/10000 train_time:600811ms step_avg:95.35ms +[2025-08-22 17:27:53] [Rank 0] step:6321/10000 train_time:602790ms step_avg:95.36ms +[2025-08-22 17:27:53] [Rank 0] step:6321/10000 train_time:602790ms step_avg:95.36ms +[2025-08-22 17:27:55] [Rank 0] step:6341/10000 train_time:604770ms step_avg:95.37ms +[2025-08-22 17:27:55] [Rank 0] step:6341/10000 train_time:604770ms step_avg:95.37ms +[2025-08-22 17:27:57] [Rank 0] step:6361/10000 train_time:606756ms step_avg:95.39ms +[2025-08-22 17:27:57] [Rank 0] step:6361/10000 train_time:606756ms step_avg:95.39ms +[2025-08-22 17:27:59] [Rank 0] step:6381/10000 train_time:608737ms step_avg:95.40ms +[2025-08-22 17:27:59] [Rank 0] step:6381/10000 train_time:608737ms step_avg:95.40ms +[2025-08-22 17:28:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:28:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:28:15] [Rank 0] PRINT: step:6400/10000 val_loss:3.6350 svd_entropy: attn_qk:H=0.7633,top10E=0.26,eRank=189.8,q75/q25=55.17 attn_vo:H=0.8341,top10E=0.06,eRank=404.3,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.91 vo_prod:H=0.6921,top10E=0.11,eRank=227.5,q75/q25=inf train_time:610719ms step_avg:95.42ms +[2025-08-22 17:28:15] [Rank 0] PRINT: step:6400/10000 val_loss:3.6350 svd_entropy: attn_qk:H=0.7633,top10E=0.26,eRank=189.8,q75/q25=55.17 attn_vo:H=0.8341,top10E=0.06,eRank=404.3,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.91 vo_prod:H=0.6921,top10E=0.11,eRank=227.5,q75/q25=inf train_time:610719ms step_avg:95.42ms +[2025-08-22 17:28:15] [Rank 0] step:6401/10000 train_time:610741ms step_avg:95.41ms +[2025-08-22 17:28:15] [Rank 0] step:6401/10000 train_time:610741ms step_avg:95.41ms +[2025-08-22 17:28:17] [Rank 0] step:6421/10000 train_time:612719ms step_avg:95.42ms +[2025-08-22 17:28:17] [Rank 0] step:6421/10000 train_time:612719ms step_avg:95.42ms +[2025-08-22 17:28:19] [Rank 0] step:6441/10000 train_time:614695ms step_avg:95.43ms +[2025-08-22 17:28:19] [Rank 0] step:6441/10000 train_time:614695ms step_avg:95.43ms +[2025-08-22 17:28:21] [Rank 0] step:6461/10000 train_time:616676ms step_avg:95.45ms +[2025-08-22 17:28:21] [Rank 0] step:6461/10000 train_time:616676ms step_avg:95.45ms +[2025-08-22 17:28:23] [Rank 0] step:6481/10000 train_time:618662ms step_avg:95.46ms +[2025-08-22 17:28:23] [Rank 0] step:6481/10000 train_time:618662ms step_avg:95.46ms +[2025-08-22 17:28:25] [Rank 0] step:6501/10000 train_time:620637ms step_avg:95.47ms +[2025-08-22 17:28:25] [Rank 0] step:6501/10000 train_time:620637ms step_avg:95.47ms +[2025-08-22 17:28:27] [Rank 0] step:6521/10000 train_time:622615ms step_avg:95.48ms +[2025-08-22 17:28:27] [Rank 0] step:6521/10000 train_time:622615ms step_avg:95.48ms +[2025-08-22 17:28:29] [Rank 0] step:6541/10000 train_time:624598ms step_avg:95.49ms +[2025-08-22 17:28:29] [Rank 0] step:6541/10000 train_time:624598ms step_avg:95.49ms +[2025-08-22 17:28:30] [Rank 0] step:6561/10000 train_time:626580ms step_avg:95.50ms +[2025-08-22 17:28:30] [Rank 0] step:6561/10000 train_time:626580ms step_avg:95.50ms +[2025-08-22 17:28:32] [Rank 0] step:6581/10000 train_time:628558ms step_avg:95.51ms +[2025-08-22 17:28:32] [Rank 0] step:6581/10000 train_time:628558ms step_avg:95.51ms +[2025-08-22 17:28:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:28:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:28:48] [Rank 0] PRINT: step:6600/10000 val_loss:3.6208 svd_entropy: attn_qk:H=0.7639,top10E=0.26,eRank=190.3,q75/q25=55.59 attn_vo:H=0.8342,top10E=0.06,eRank=404.3,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=2.90 vo_prod:H=0.6922,top10E=0.11,eRank=227.8,q75/q25=inf train_time:630544ms step_avg:95.54ms +[2025-08-22 17:28:48] [Rank 0] PRINT: step:6600/10000 val_loss:3.6208 svd_entropy: attn_qk:H=0.7639,top10E=0.26,eRank=190.3,q75/q25=55.59 attn_vo:H=0.8342,top10E=0.06,eRank=404.3,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=2.90 vo_prod:H=0.6922,top10E=0.11,eRank=227.8,q75/q25=inf train_time:630544ms step_avg:95.54ms +[2025-08-22 17:28:48] [Rank 0] step:6601/10000 train_time:630566ms step_avg:95.53ms +[2025-08-22 17:28:48] [Rank 0] step:6601/10000 train_time:630566ms step_avg:95.53ms +[2025-08-22 17:28:50] [Rank 0] step:6621/10000 train_time:632535ms step_avg:95.53ms +[2025-08-22 17:28:50] [Rank 0] step:6621/10000 train_time:632535ms step_avg:95.53ms +[2025-08-22 17:28:52] [Rank 0] step:6641/10000 train_time:634523ms step_avg:95.55ms +[2025-08-22 17:28:52] [Rank 0] step:6641/10000 train_time:634523ms step_avg:95.55ms +[2025-08-22 17:28:54] [Rank 0] step:6661/10000 train_time:636503ms step_avg:95.56ms +[2025-08-22 17:28:54] [Rank 0] step:6661/10000 train_time:636503ms step_avg:95.56ms +[2025-08-22 17:28:56] [Rank 0] step:6681/10000 train_time:638503ms step_avg:95.57ms +[2025-08-22 17:28:56] [Rank 0] step:6681/10000 train_time:638503ms step_avg:95.57ms +[2025-08-22 17:28:58] [Rank 0] step:6701/10000 train_time:640526ms step_avg:95.59ms +[2025-08-22 17:28:58] [Rank 0] step:6701/10000 train_time:640526ms step_avg:95.59ms +[2025-08-22 17:29:00] [Rank 0] step:6721/10000 train_time:642539ms step_avg:95.60ms +[2025-08-22 17:29:00] [Rank 0] step:6721/10000 train_time:642539ms step_avg:95.60ms +[2025-08-22 17:29:02] [Rank 0] step:6741/10000 train_time:644548ms step_avg:95.62ms +[2025-08-22 17:29:02] [Rank 0] step:6741/10000 train_time:644548ms step_avg:95.62ms +[2025-08-22 17:29:04] [Rank 0] step:6761/10000 train_time:646555ms step_avg:95.63ms +[2025-08-22 17:29:04] [Rank 0] step:6761/10000 train_time:646555ms step_avg:95.63ms +[2025-08-22 17:29:06] [Rank 0] step:6781/10000 train_time:648569ms step_avg:95.65ms +[2025-08-22 17:29:06] [Rank 0] step:6781/10000 train_time:648569ms step_avg:95.65ms +[2025-08-22 17:29:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:29:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:29:22] [Rank 0] PRINT: step:6800/10000 val_loss:3.6062 svd_entropy: attn_qk:H=0.7645,top10E=0.26,eRank=190.8,q75/q25=55.64 attn_vo:H=0.8342,top10E=0.06,eRank=404.3,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.6,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=618.0,q75/q25=2.90 vo_prod:H=0.6923,top10E=0.11,eRank=227.9,q75/q25=inf train_time:650584ms step_avg:95.67ms +[2025-08-22 17:29:22] [Rank 0] PRINT: step:6800/10000 val_loss:3.6062 svd_entropy: attn_qk:H=0.7645,top10E=0.26,eRank=190.8,q75/q25=55.64 attn_vo:H=0.8342,top10E=0.06,eRank=404.3,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.6,q75/q25=2.80 mlp_w2:H=0.9672,top10E=0.05,eRank=618.0,q75/q25=2.90 vo_prod:H=0.6923,top10E=0.11,eRank=227.9,q75/q25=inf train_time:650584ms step_avg:95.67ms +[2025-08-22 17:29:22] [Rank 0] step:6801/10000 train_time:650607ms step_avg:95.66ms +[2025-08-22 17:29:22] [Rank 0] step:6801/10000 train_time:650607ms step_avg:95.66ms +[2025-08-22 17:29:24] [Rank 0] step:6821/10000 train_time:652607ms step_avg:95.68ms +[2025-08-22 17:29:24] [Rank 0] step:6821/10000 train_time:652607ms step_avg:95.68ms +[2025-08-22 17:29:26] [Rank 0] step:6841/10000 train_time:654610ms step_avg:95.69ms +[2025-08-22 17:29:26] [Rank 0] step:6841/10000 train_time:654610ms step_avg:95.69ms +[2025-08-22 17:29:28] [Rank 0] step:6861/10000 train_time:656609ms step_avg:95.70ms +[2025-08-22 17:29:28] [Rank 0] step:6861/10000 train_time:656609ms step_avg:95.70ms +[2025-08-22 17:29:30] [Rank 0] step:6881/10000 train_time:658617ms step_avg:95.72ms +[2025-08-22 17:29:30] [Rank 0] step:6881/10000 train_time:658617ms step_avg:95.72ms +[2025-08-22 17:29:32] [Rank 0] step:6901/10000 train_time:660620ms step_avg:95.73ms +[2025-08-22 17:29:32] [Rank 0] step:6901/10000 train_time:660620ms step_avg:95.73ms +[2025-08-22 17:29:34] [Rank 0] step:6921/10000 train_time:662620ms step_avg:95.74ms +[2025-08-22 17:29:34] [Rank 0] step:6921/10000 train_time:662620ms step_avg:95.74ms +[2025-08-22 17:29:36] [Rank 0] step:6941/10000 train_time:664633ms step_avg:95.75ms +[2025-08-22 17:29:36] [Rank 0] step:6941/10000 train_time:664633ms step_avg:95.75ms +[2025-08-22 17:29:38] [Rank 0] step:6961/10000 train_time:666651ms step_avg:95.77ms +[2025-08-22 17:29:38] [Rank 0] step:6961/10000 train_time:666651ms step_avg:95.77ms +[2025-08-22 17:29:40] [Rank 0] step:6981/10000 train_time:668664ms step_avg:95.78ms +[2025-08-22 17:29:40] [Rank 0] step:6981/10000 train_time:668664ms step_avg:95.78ms +[2025-08-22 17:29:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:29:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:29:55] [Rank 0] PRINT: step:7000/10000 val_loss:3.5915 svd_entropy: attn_qk:H=0.7651,top10E=0.26,eRank=191.3,q75/q25=55.64 attn_vo:H=0.8342,top10E=0.06,eRank=404.4,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.80 mlp_w2:H=0.9673,top10E=0.05,eRank=618.1,q75/q25=2.90 vo_prod:H=0.6923,top10E=0.11,eRank=228.1,q75/q25=inf train_time:670677ms step_avg:95.81ms +[2025-08-22 17:29:55] [Rank 0] PRINT: step:7000/10000 val_loss:3.5915 svd_entropy: attn_qk:H=0.7651,top10E=0.26,eRank=191.3,q75/q25=55.64 attn_vo:H=0.8342,top10E=0.06,eRank=404.4,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.80 mlp_w2:H=0.9673,top10E=0.05,eRank=618.1,q75/q25=2.90 vo_prod:H=0.6923,top10E=0.11,eRank=228.1,q75/q25=inf train_time:670677ms step_avg:95.81ms +[2025-08-22 17:29:55] [Rank 0] step:7001/10000 train_time:670700ms step_avg:95.80ms +[2025-08-22 17:29:55] [Rank 0] step:7001/10000 train_time:670700ms step_avg:95.80ms +[2025-08-22 17:29:57] [Rank 0] step:7021/10000 train_time:672696ms step_avg:95.81ms +[2025-08-22 17:29:57] [Rank 0] step:7021/10000 train_time:672696ms step_avg:95.81ms +[2025-08-22 17:29:59] [Rank 0] step:7041/10000 train_time:674696ms step_avg:95.82ms +[2025-08-22 17:29:59] [Rank 0] step:7041/10000 train_time:674696ms step_avg:95.82ms +[2025-08-22 17:30:01] [Rank 0] step:7061/10000 train_time:676737ms step_avg:95.84ms +[2025-08-22 17:30:01] [Rank 0] step:7061/10000 train_time:676737ms step_avg:95.84ms +[2025-08-22 17:30:03] [Rank 0] step:7081/10000 train_time:678700ms step_avg:95.85ms +[2025-08-22 17:30:03] [Rank 0] step:7081/10000 train_time:678700ms step_avg:95.85ms +[2025-08-22 17:30:05] [Rank 0] step:7101/10000 train_time:680715ms step_avg:95.86ms +[2025-08-22 17:30:05] [Rank 0] step:7101/10000 train_time:680715ms step_avg:95.86ms +[2025-08-22 17:30:07] [Rank 0] step:7121/10000 train_time:682718ms step_avg:95.87ms +[2025-08-22 17:30:07] [Rank 0] step:7121/10000 train_time:682718ms step_avg:95.87ms +[2025-08-22 17:30:09] [Rank 0] step:7141/10000 train_time:684723ms step_avg:95.89ms +[2025-08-22 17:30:09] [Rank 0] step:7141/10000 train_time:684723ms step_avg:95.89ms +[2025-08-22 17:30:11] [Rank 0] step:7161/10000 train_time:686733ms step_avg:95.90ms +[2025-08-22 17:30:11] [Rank 0] step:7161/10000 train_time:686733ms step_avg:95.90ms +[2025-08-22 17:30:13] [Rank 0] step:7181/10000 train_time:688744ms step_avg:95.91ms +[2025-08-22 17:30:13] [Rank 0] step:7181/10000 train_time:688744ms step_avg:95.91ms +[2025-08-22 17:30:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:30:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:30:29] [Rank 0] PRINT: step:7200/10000 val_loss:3.5810 svd_entropy: attn_qk:H=0.7655,top10E=0.26,eRank=191.7,q75/q25=55.54 attn_vo:H=0.8342,top10E=0.06,eRank=404.5,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.80 mlp_w2:H=0.9673,top10E=0.05,eRank=618.2,q75/q25=2.90 vo_prod:H=0.6925,top10E=0.11,eRank=228.4,q75/q25=inf train_time:690759ms step_avg:95.94ms +[2025-08-22 17:30:29] [Rank 0] PRINT: step:7200/10000 val_loss:3.5810 svd_entropy: attn_qk:H=0.7655,top10E=0.26,eRank=191.7,q75/q25=55.54 attn_vo:H=0.8342,top10E=0.06,eRank=404.5,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.80 mlp_w2:H=0.9673,top10E=0.05,eRank=618.2,q75/q25=2.90 vo_prod:H=0.6925,top10E=0.11,eRank=228.4,q75/q25=inf train_time:690759ms step_avg:95.94ms +[2025-08-22 17:30:29] [Rank 0] step:7201/10000 train_time:690781ms step_avg:95.93ms +[2025-08-22 17:30:29] [Rank 0] step:7201/10000 train_time:690781ms step_avg:95.93ms +[2025-08-22 17:30:31] [Rank 0] step:7221/10000 train_time:692793ms step_avg:95.94ms +[2025-08-22 17:30:31] [Rank 0] step:7221/10000 train_time:692793ms step_avg:95.94ms +[2025-08-22 17:30:33] [Rank 0] step:7241/10000 train_time:694795ms step_avg:95.95ms +[2025-08-22 17:30:33] [Rank 0] step:7241/10000 train_time:694795ms step_avg:95.95ms +[2025-08-22 17:30:35] [Rank 0] step:7261/10000 train_time:696796ms step_avg:95.96ms +[2025-08-22 17:30:35] [Rank 0] step:7261/10000 train_time:696796ms step_avg:95.96ms +[2025-08-22 17:30:37] [Rank 0] step:7281/10000 train_time:698810ms step_avg:95.98ms +[2025-08-22 17:30:37] [Rank 0] step:7281/10000 train_time:698810ms step_avg:95.98ms +[2025-08-22 17:30:39] [Rank 0] step:7301/10000 train_time:700817ms step_avg:95.99ms +[2025-08-22 17:30:39] [Rank 0] step:7301/10000 train_time:700817ms step_avg:95.99ms +[2025-08-22 17:30:41] [Rank 0] step:7321/10000 train_time:702835ms step_avg:96.00ms +[2025-08-22 17:30:41] [Rank 0] step:7321/10000 train_time:702835ms step_avg:96.00ms +[2025-08-22 17:30:43] [Rank 0] step:7341/10000 train_time:704845ms step_avg:96.01ms +[2025-08-22 17:30:43] [Rank 0] step:7341/10000 train_time:704845ms step_avg:96.01ms +[2025-08-22 17:30:45] [Rank 0] step:7361/10000 train_time:706864ms step_avg:96.03ms +[2025-08-22 17:30:45] [Rank 0] step:7361/10000 train_time:706864ms step_avg:96.03ms +[2025-08-22 17:30:47] [Rank 0] step:7381/10000 train_time:708961ms step_avg:96.05ms +[2025-08-22 17:30:47] [Rank 0] step:7381/10000 train_time:708961ms step_avg:96.05ms +[2025-08-22 17:30:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:30:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:31:03] [Rank 0] PRINT: step:7400/10000 val_loss:3.5639 svd_entropy: attn_qk:H=0.7659,top10E=0.26,eRank=192.1,q75/q25=55.56 attn_vo:H=0.8342,top10E=0.06,eRank=404.6,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.79 mlp_w2:H=0.9673,top10E=0.05,eRank=618.3,q75/q25=2.90 vo_prod:H=0.6926,top10E=0.11,eRank=228.6,q75/q25=inf train_time:711037ms step_avg:96.09ms +[2025-08-22 17:31:03] [Rank 0] PRINT: step:7400/10000 val_loss:3.5639 svd_entropy: attn_qk:H=0.7659,top10E=0.26,eRank=192.1,q75/q25=55.56 attn_vo:H=0.8342,top10E=0.06,eRank=404.6,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.79 mlp_w2:H=0.9673,top10E=0.05,eRank=618.3,q75/q25=2.90 vo_prod:H=0.6926,top10E=0.11,eRank=228.6,q75/q25=inf train_time:711037ms step_avg:96.09ms +[2025-08-22 17:31:03] [Rank 0] step:7401/10000 train_time:711059ms step_avg:96.08ms +[2025-08-22 17:31:03] [Rank 0] step:7401/10000 train_time:711059ms step_avg:96.08ms +[2025-08-22 17:31:05] [Rank 0] step:7421/10000 train_time:713080ms step_avg:96.09ms +[2025-08-22 17:31:05] [Rank 0] step:7421/10000 train_time:713080ms step_avg:96.09ms +[2025-08-22 17:31:07] [Rank 0] step:7441/10000 train_time:715089ms step_avg:96.10ms +[2025-08-22 17:31:07] [Rank 0] step:7441/10000 train_time:715089ms step_avg:96.10ms +[2025-08-22 17:31:09] [Rank 0] step:7461/10000 train_time:717102ms step_avg:96.11ms +[2025-08-22 17:31:09] [Rank 0] step:7461/10000 train_time:717102ms step_avg:96.11ms +[2025-08-22 17:31:11] [Rank 0] step:7481/10000 train_time:719127ms step_avg:96.13ms +[2025-08-22 17:31:11] [Rank 0] step:7481/10000 train_time:719127ms step_avg:96.13ms +[2025-08-22 17:31:13] [Rank 0] step:7501/10000 train_time:721145ms step_avg:96.14ms +[2025-08-22 17:31:13] [Rank 0] step:7501/10000 train_time:721145ms step_avg:96.14ms +[2025-08-22 17:31:15] [Rank 0] step:7521/10000 train_time:723166ms step_avg:96.15ms +[2025-08-22 17:31:15] [Rank 0] step:7521/10000 train_time:723166ms step_avg:96.15ms +[2025-08-22 17:31:17] [Rank 0] step:7541/10000 train_time:725190ms step_avg:96.17ms +[2025-08-22 17:31:17] [Rank 0] step:7541/10000 train_time:725190ms step_avg:96.17ms +[2025-08-22 17:31:19] [Rank 0] step:7561/10000 train_time:727202ms step_avg:96.18ms +[2025-08-22 17:31:19] [Rank 0] step:7561/10000 train_time:727202ms step_avg:96.18ms +[2025-08-22 17:31:21] [Rank 0] step:7581/10000 train_time:729229ms step_avg:96.19ms +[2025-08-22 17:31:21] [Rank 0] step:7581/10000 train_time:729229ms step_avg:96.19ms +[2025-08-22 17:31:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:31:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:31:37] [Rank 0] PRINT: step:7600/10000 val_loss:3.5526 svd_entropy: attn_qk:H=0.7664,top10E=0.26,eRank=192.5,q75/q25=55.52 attn_vo:H=0.8343,top10E=0.06,eRank=404.6,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=638.0,q75/q25=2.79 mlp_w2:H=0.9673,top10E=0.05,eRank=618.4,q75/q25=2.89 vo_prod:H=0.6927,top10E=0.11,eRank=228.7,q75/q25=inf train_time:731257ms step_avg:96.22ms +[2025-08-22 17:31:37] [Rank 0] PRINT: step:7600/10000 val_loss:3.5526 svd_entropy: attn_qk:H=0.7664,top10E=0.26,eRank=192.5,q75/q25=55.52 attn_vo:H=0.8343,top10E=0.06,eRank=404.6,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=638.0,q75/q25=2.79 mlp_w2:H=0.9673,top10E=0.05,eRank=618.4,q75/q25=2.89 vo_prod:H=0.6927,top10E=0.11,eRank=228.7,q75/q25=inf train_time:731257ms step_avg:96.22ms +[2025-08-22 17:31:37] [Rank 0] step:7601/10000 train_time:731280ms step_avg:96.21ms +[2025-08-22 17:31:37] [Rank 0] step:7601/10000 train_time:731280ms step_avg:96.21ms +[2025-08-22 17:31:39] [Rank 0] step:7621/10000 train_time:733280ms step_avg:96.22ms +[2025-08-22 17:31:39] [Rank 0] step:7621/10000 train_time:733280ms step_avg:96.22ms +[2025-08-22 17:31:41] [Rank 0] step:7641/10000 train_time:735294ms step_avg:96.23ms +[2025-08-22 17:31:41] [Rank 0] step:7641/10000 train_time:735294ms step_avg:96.23ms +[2025-08-22 17:31:43] [Rank 0] step:7661/10000 train_time:737313ms step_avg:96.24ms +[2025-08-22 17:31:43] [Rank 0] step:7661/10000 train_time:737313ms step_avg:96.24ms +[2025-08-22 17:31:45] [Rank 0] step:7681/10000 train_time:739325ms step_avg:96.25ms +[2025-08-22 17:31:45] [Rank 0] step:7681/10000 train_time:739325ms step_avg:96.25ms +[2025-08-22 17:31:47] [Rank 0] step:7701/10000 train_time:741340ms step_avg:96.27ms +[2025-08-22 17:31:47] [Rank 0] step:7701/10000 train_time:741340ms step_avg:96.27ms +[2025-08-22 17:31:49] [Rank 0] step:7721/10000 train_time:743372ms step_avg:96.28ms +[2025-08-22 17:31:49] [Rank 0] step:7721/10000 train_time:743372ms step_avg:96.28ms +[2025-08-22 17:31:51] [Rank 0] step:7741/10000 train_time:745447ms step_avg:96.30ms +[2025-08-22 17:31:51] [Rank 0] step:7741/10000 train_time:745447ms step_avg:96.30ms +[2025-08-22 17:31:53] [Rank 0] step:7761/10000 train_time:747551ms step_avg:96.32ms +[2025-08-22 17:31:53] [Rank 0] step:7761/10000 train_time:747551ms step_avg:96.32ms +[2025-08-22 17:31:55] [Rank 0] step:7781/10000 train_time:749572ms step_avg:96.33ms +[2025-08-22 17:31:55] [Rank 0] step:7781/10000 train_time:749572ms step_avg:96.33ms +[2025-08-22 17:31:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:31:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:32:11] [Rank 0] PRINT: step:7800/10000 val_loss:3.5406 svd_entropy: attn_qk:H=0.7668,top10E=0.26,eRank=192.8,q75/q25=55.55 attn_vo:H=0.8343,top10E=0.06,eRank=404.7,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.79 mlp_w2:H=0.9674,top10E=0.05,eRank=618.5,q75/q25=2.89 vo_prod:H=0.6927,top10E=0.11,eRank=228.9,q75/q25=inf train_time:751608ms step_avg:96.36ms +[2025-08-22 17:32:11] [Rank 0] PRINT: step:7800/10000 val_loss:3.5406 svd_entropy: attn_qk:H=0.7668,top10E=0.26,eRank=192.8,q75/q25=55.55 attn_vo:H=0.8343,top10E=0.06,eRank=404.7,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.79 mlp_w2:H=0.9674,top10E=0.05,eRank=618.5,q75/q25=2.89 vo_prod:H=0.6927,top10E=0.11,eRank=228.9,q75/q25=inf train_time:751608ms step_avg:96.36ms +[2025-08-22 17:32:11] [Rank 0] step:7801/10000 train_time:751631ms step_avg:96.35ms +[2025-08-22 17:32:11] [Rank 0] step:7801/10000 train_time:751631ms step_avg:96.35ms +[2025-08-22 17:32:13] [Rank 0] step:7821/10000 train_time:753637ms step_avg:96.36ms +[2025-08-22 17:32:13] [Rank 0] step:7821/10000 train_time:753637ms step_avg:96.36ms +[2025-08-22 17:32:15] [Rank 0] step:7841/10000 train_time:755643ms step_avg:96.37ms +[2025-08-22 17:32:15] [Rank 0] step:7841/10000 train_time:755643ms step_avg:96.37ms +[2025-08-22 17:32:17] [Rank 0] step:7861/10000 train_time:757658ms step_avg:96.38ms +[2025-08-22 17:32:17] [Rank 0] step:7861/10000 train_time:757658ms step_avg:96.38ms +[2025-08-22 17:32:19] [Rank 0] step:7881/10000 train_time:759679ms step_avg:96.39ms +[2025-08-22 17:32:19] [Rank 0] step:7881/10000 train_time:759679ms step_avg:96.39ms +[2025-08-22 17:32:21] [Rank 0] step:7901/10000 train_time:761687ms step_avg:96.40ms +[2025-08-22 17:32:21] [Rank 0] step:7901/10000 train_time:761687ms step_avg:96.40ms +[2025-08-22 17:32:23] [Rank 0] step:7921/10000 train_time:763701ms step_avg:96.41ms +[2025-08-22 17:32:23] [Rank 0] step:7921/10000 train_time:763701ms step_avg:96.41ms +[2025-08-22 17:32:25] [Rank 0] step:7941/10000 train_time:765719ms step_avg:96.43ms +[2025-08-22 17:32:25] [Rank 0] step:7941/10000 train_time:765719ms step_avg:96.43ms +[2025-08-22 17:32:27] [Rank 0] step:7961/10000 train_time:767734ms step_avg:96.44ms +[2025-08-22 17:32:27] [Rank 0] step:7961/10000 train_time:767734ms step_avg:96.44ms +[2025-08-22 17:32:29] [Rank 0] step:7981/10000 train_time:769741ms step_avg:96.45ms +[2025-08-22 17:32:29] [Rank 0] step:7981/10000 train_time:769741ms step_avg:96.45ms +[2025-08-22 17:32:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:32:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:32:45] [Rank 0] PRINT: step:8000/10000 val_loss:3.5255 svd_entropy: attn_qk:H=0.7671,top10E=0.26,eRank=193.1,q75/q25=55.61 attn_vo:H=0.8343,top10E=0.06,eRank=404.8,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.79 mlp_w2:H=0.9674,top10E=0.05,eRank=618.6,q75/q25=2.89 vo_prod:H=0.6928,top10E=0.11,eRank=229.1,q75/q25=inf train_time:771759ms step_avg:96.47ms +[2025-08-22 17:32:45] [Rank 0] PRINT: step:8000/10000 val_loss:3.5255 svd_entropy: attn_qk:H=0.7671,top10E=0.26,eRank=193.1,q75/q25=55.61 attn_vo:H=0.8343,top10E=0.06,eRank=404.8,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.79 mlp_w2:H=0.9674,top10E=0.05,eRank=618.6,q75/q25=2.89 vo_prod:H=0.6928,top10E=0.11,eRank=229.1,q75/q25=inf train_time:771759ms step_avg:96.47ms +[2025-08-22 17:32:45] [Rank 0] step:8001/10000 train_time:771781ms step_avg:96.46ms +[2025-08-22 17:32:45] [Rank 0] step:8001/10000 train_time:771781ms step_avg:96.46ms +[2025-08-22 17:32:47] [Rank 0] step:8021/10000 train_time:773792ms step_avg:96.47ms +[2025-08-22 17:32:47] [Rank 0] step:8021/10000 train_time:773792ms step_avg:96.47ms +[2025-08-22 17:32:49] [Rank 0] step:8041/10000 train_time:775811ms step_avg:96.48ms +[2025-08-22 17:32:49] [Rank 0] step:8041/10000 train_time:775811ms step_avg:96.48ms +[2025-08-22 17:32:51] [Rank 0] step:8061/10000 train_time:777821ms step_avg:96.49ms +[2025-08-22 17:32:51] [Rank 0] step:8061/10000 train_time:777821ms step_avg:96.49ms +[2025-08-22 17:32:53] [Rank 0] step:8081/10000 train_time:779825ms step_avg:96.50ms +[2025-08-22 17:32:53] [Rank 0] step:8081/10000 train_time:779825ms step_avg:96.50ms +[2025-08-22 17:32:55] [Rank 0] step:8101/10000 train_time:781901ms step_avg:96.52ms +[2025-08-22 17:32:55] [Rank 0] step:8101/10000 train_time:781901ms step_avg:96.52ms +[2025-08-22 17:32:57] [Rank 0] step:8121/10000 train_time:783984ms step_avg:96.54ms +[2025-08-22 17:32:57] [Rank 0] step:8121/10000 train_time:783984ms step_avg:96.54ms +[2025-08-22 17:33:00] [Rank 0] step:8141/10000 train_time:786662ms step_avg:96.63ms +[2025-08-22 17:33:00] [Rank 0] step:8141/10000 train_time:786662ms step_avg:96.63ms +[2025-08-22 17:33:02] [Rank 0] step:8161/10000 train_time:788693ms step_avg:96.64ms +[2025-08-22 17:33:02] [Rank 0] step:8161/10000 train_time:788693ms step_avg:96.64ms +[2025-08-22 17:33:04] [Rank 0] step:8181/10000 train_time:790733ms step_avg:96.65ms +[2025-08-22 17:33:04] [Rank 0] step:8181/10000 train_time:790733ms step_avg:96.65ms +[2025-08-22 17:33:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:33:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:33:19] [Rank 0] PRINT: step:8200/10000 val_loss:3.5142 svd_entropy: attn_qk:H=0.7674,top10E=0.26,eRank=193.4,q75/q25=56.06 attn_vo:H=0.8343,top10E=0.06,eRank=404.9,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.79 mlp_w2:H=0.9674,top10E=0.05,eRank=618.8,q75/q25=2.89 vo_prod:H=0.6930,top10E=0.11,eRank=229.3,q75/q25=inf train_time:792798ms step_avg:96.68ms +[2025-08-22 17:33:19] [Rank 0] PRINT: step:8200/10000 val_loss:3.5142 svd_entropy: attn_qk:H=0.7674,top10E=0.26,eRank=193.4,q75/q25=56.06 attn_vo:H=0.8343,top10E=0.06,eRank=404.9,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.79 mlp_w2:H=0.9674,top10E=0.05,eRank=618.8,q75/q25=2.89 vo_prod:H=0.6930,top10E=0.11,eRank=229.3,q75/q25=inf train_time:792798ms step_avg:96.68ms +[2025-08-22 17:33:20] [Rank 0] step:8201/10000 train_time:792821ms step_avg:96.67ms +[2025-08-22 17:33:20] [Rank 0] step:8201/10000 train_time:792821ms step_avg:96.67ms +[2025-08-22 17:33:22] [Rank 0] step:8221/10000 train_time:794862ms step_avg:96.69ms +[2025-08-22 17:33:22] [Rank 0] step:8221/10000 train_time:794862ms step_avg:96.69ms +[2025-08-22 17:33:24] [Rank 0] step:8241/10000 train_time:796904ms step_avg:96.70ms +[2025-08-22 17:33:24] [Rank 0] step:8241/10000 train_time:796904ms step_avg:96.70ms +[2025-08-22 17:33:26] [Rank 0] step:8261/10000 train_time:798950ms step_avg:96.71ms +[2025-08-22 17:33:26] [Rank 0] step:8261/10000 train_time:798950ms step_avg:96.71ms +[2025-08-22 17:33:28] [Rank 0] step:8281/10000 train_time:800985ms step_avg:96.73ms +[2025-08-22 17:33:28] [Rank 0] step:8281/10000 train_time:800985ms step_avg:96.73ms +[2025-08-22 17:33:30] [Rank 0] step:8301/10000 train_time:803023ms step_avg:96.74ms +[2025-08-22 17:33:30] [Rank 0] step:8301/10000 train_time:803023ms step_avg:96.74ms +[2025-08-22 17:33:32] [Rank 0] step:8321/10000 train_time:805056ms step_avg:96.75ms +[2025-08-22 17:33:32] [Rank 0] step:8321/10000 train_time:805056ms step_avg:96.75ms +[2025-08-22 17:33:34] [Rank 0] step:8341/10000 train_time:807101ms step_avg:96.76ms +[2025-08-22 17:33:34] [Rank 0] step:8341/10000 train_time:807101ms step_avg:96.76ms +[2025-08-22 17:33:36] [Rank 0] step:8361/10000 train_time:809140ms step_avg:96.78ms +[2025-08-22 17:33:36] [Rank 0] step:8361/10000 train_time:809140ms step_avg:96.78ms +[2025-08-22 17:33:38] [Rank 0] step:8381/10000 train_time:811180ms step_avg:96.79ms +[2025-08-22 17:33:38] [Rank 0] step:8381/10000 train_time:811180ms step_avg:96.79ms +[2025-08-22 17:33:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:33:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:33:54] [Rank 0] PRINT: step:8400/10000 val_loss:3.5013 svd_entropy: attn_qk:H=0.7677,top10E=0.26,eRank=193.6,q75/q25=56.05 attn_vo:H=0.8344,top10E=0.06,eRank=404.9,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.79 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=2.88 vo_prod:H=0.6930,top10E=0.11,eRank=229.5,q75/q25=inf train_time:813221ms step_avg:96.81ms +[2025-08-22 17:33:54] [Rank 0] PRINT: step:8400/10000 val_loss:3.5013 svd_entropy: attn_qk:H=0.7677,top10E=0.26,eRank=193.6,q75/q25=56.05 attn_vo:H=0.8344,top10E=0.06,eRank=404.9,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.79 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=2.88 vo_prod:H=0.6930,top10E=0.11,eRank=229.5,q75/q25=inf train_time:813221ms step_avg:96.81ms +[2025-08-22 17:33:54] [Rank 0] step:8401/10000 train_time:813244ms step_avg:96.80ms +[2025-08-22 17:33:54] [Rank 0] step:8401/10000 train_time:813244ms step_avg:96.80ms +[2025-08-22 17:33:56] [Rank 0] step:8421/10000 train_time:815273ms step_avg:96.81ms +[2025-08-22 17:33:56] [Rank 0] step:8421/10000 train_time:815273ms step_avg:96.81ms +[2025-08-22 17:33:58] [Rank 0] step:8441/10000 train_time:817396ms step_avg:96.84ms +[2025-08-22 17:33:58] [Rank 0] step:8441/10000 train_time:817396ms step_avg:96.84ms +[2025-08-22 17:34:00] [Rank 0] step:8461/10000 train_time:819490ms step_avg:96.85ms +[2025-08-22 17:34:00] [Rank 0] step:8461/10000 train_time:819490ms step_avg:96.85ms +[2025-08-22 17:34:02] [Rank 0] step:8481/10000 train_time:821536ms step_avg:96.87ms +[2025-08-22 17:34:02] [Rank 0] step:8481/10000 train_time:821536ms step_avg:96.87ms +[2025-08-22 17:34:04] [Rank 0] step:8501/10000 train_time:823604ms step_avg:96.88ms +[2025-08-22 17:34:04] [Rank 0] step:8501/10000 train_time:823604ms step_avg:96.88ms +[2025-08-22 17:34:06] [Rank 0] step:8521/10000 train_time:825650ms step_avg:96.90ms +[2025-08-22 17:34:06] [Rank 0] step:8521/10000 train_time:825650ms step_avg:96.90ms +[2025-08-22 17:34:08] [Rank 0] step:8541/10000 train_time:827708ms step_avg:96.91ms +[2025-08-22 17:34:08] [Rank 0] step:8541/10000 train_time:827708ms step_avg:96.91ms +[2025-08-22 17:34:10] [Rank 0] step:8561/10000 train_time:829759ms step_avg:96.92ms +[2025-08-22 17:34:10] [Rank 0] step:8561/10000 train_time:829759ms step_avg:96.92ms +[2025-08-22 17:34:12] [Rank 0] step:8581/10000 train_time:831806ms step_avg:96.94ms +[2025-08-22 17:34:12] [Rank 0] step:8581/10000 train_time:831806ms step_avg:96.94ms +[2025-08-22 17:34:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:34:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:34:28] [Rank 0] PRINT: step:8600/10000 val_loss:3.4927 svd_entropy: attn_qk:H=0.7679,top10E=0.25,eRank=193.8,q75/q25=55.93 attn_vo:H=0.8344,top10E=0.06,eRank=405.0,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.78 mlp_w2:H=0.9675,top10E=0.05,eRank=619.0,q75/q25=2.88 vo_prod:H=0.6931,top10E=0.11,eRank=229.6,q75/q25=inf train_time:833848ms step_avg:96.96ms +[2025-08-22 17:34:28] [Rank 0] PRINT: step:8600/10000 val_loss:3.4927 svd_entropy: attn_qk:H=0.7679,top10E=0.25,eRank=193.8,q75/q25=55.93 attn_vo:H=0.8344,top10E=0.06,eRank=405.0,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.78 mlp_w2:H=0.9675,top10E=0.05,eRank=619.0,q75/q25=2.88 vo_prod:H=0.6931,top10E=0.11,eRank=229.6,q75/q25=inf train_time:833848ms step_avg:96.96ms +[2025-08-22 17:34:28] [Rank 0] step:8601/10000 train_time:833871ms step_avg:96.95ms +[2025-08-22 17:34:28] [Rank 0] step:8601/10000 train_time:833871ms step_avg:96.95ms +[2025-08-22 17:34:30] [Rank 0] step:8621/10000 train_time:835899ms step_avg:96.96ms +[2025-08-22 17:34:30] [Rank 0] step:8621/10000 train_time:835899ms step_avg:96.96ms +[2025-08-22 17:34:32] [Rank 0] step:8641/10000 train_time:837938ms step_avg:96.97ms +[2025-08-22 17:34:32] [Rank 0] step:8641/10000 train_time:837938ms step_avg:96.97ms +[2025-08-22 17:34:34] [Rank 0] step:8661/10000 train_time:839974ms step_avg:96.98ms +[2025-08-22 17:34:34] [Rank 0] step:8661/10000 train_time:839974ms step_avg:96.98ms +[2025-08-22 17:34:36] [Rank 0] step:8681/10000 train_time:842017ms step_avg:97.00ms +[2025-08-22 17:34:36] [Rank 0] step:8681/10000 train_time:842017ms step_avg:97.00ms +[2025-08-22 17:34:38] [Rank 0] step:8701/10000 train_time:844048ms step_avg:97.01ms +[2025-08-22 17:34:38] [Rank 0] step:8701/10000 train_time:844048ms step_avg:97.01ms +[2025-08-22 17:34:40] [Rank 0] step:8721/10000 train_time:846091ms step_avg:97.02ms +[2025-08-22 17:34:40] [Rank 0] step:8721/10000 train_time:846091ms step_avg:97.02ms +[2025-08-22 17:34:42] [Rank 0] step:8741/10000 train_time:848125ms step_avg:97.03ms +[2025-08-22 17:34:42] [Rank 0] step:8741/10000 train_time:848125ms step_avg:97.03ms +[2025-08-22 17:34:44] [Rank 0] step:8761/10000 train_time:850167ms step_avg:97.04ms +[2025-08-22 17:34:44] [Rank 0] step:8761/10000 train_time:850167ms step_avg:97.04ms +[2025-08-22 17:34:46] [Rank 0] step:8781/10000 train_time:852211ms step_avg:97.05ms +[2025-08-22 17:34:46] [Rank 0] step:8781/10000 train_time:852211ms step_avg:97.05ms +[2025-08-22 17:34:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:34:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:35:02] [Rank 0] PRINT: step:8800/10000 val_loss:3.4805 svd_entropy: attn_qk:H=0.7681,top10E=0.25,eRank=194.0,q75/q25=55.83 attn_vo:H=0.8344,top10E=0.06,eRank=405.1,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.79 mlp_w2:H=0.9675,top10E=0.05,eRank=619.1,q75/q25=2.88 vo_prod:H=0.6932,top10E=0.11,eRank=229.8,q75/q25=inf train_time:854255ms step_avg:97.07ms +[2025-08-22 17:35:02] [Rank 0] PRINT: step:8800/10000 val_loss:3.4805 svd_entropy: attn_qk:H=0.7681,top10E=0.25,eRank=194.0,q75/q25=55.83 attn_vo:H=0.8344,top10E=0.06,eRank=405.1,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.79 mlp_w2:H=0.9675,top10E=0.05,eRank=619.1,q75/q25=2.88 vo_prod:H=0.6932,top10E=0.11,eRank=229.8,q75/q25=inf train_time:854255ms step_avg:97.07ms +[2025-08-22 17:35:02] [Rank 0] step:8801/10000 train_time:854278ms step_avg:97.07ms +[2025-08-22 17:35:02] [Rank 0] step:8801/10000 train_time:854278ms step_avg:97.07ms +[2025-08-22 17:35:04] [Rank 0] step:8821/10000 train_time:856307ms step_avg:97.08ms +[2025-08-22 17:35:04] [Rank 0] step:8821/10000 train_time:856307ms step_avg:97.08ms +[2025-08-22 17:35:06] [Rank 0] step:8841/10000 train_time:858359ms step_avg:97.09ms +[2025-08-22 17:35:06] [Rank 0] step:8841/10000 train_time:858359ms step_avg:97.09ms +[2025-08-22 17:35:08] [Rank 0] step:8861/10000 train_time:860393ms step_avg:97.10ms +[2025-08-22 17:35:08] [Rank 0] step:8861/10000 train_time:860393ms step_avg:97.10ms +[2025-08-22 17:35:10] [Rank 0] step:8881/10000 train_time:862431ms step_avg:97.11ms +[2025-08-22 17:35:10] [Rank 0] step:8881/10000 train_time:862431ms step_avg:97.11ms +[2025-08-22 17:35:12] [Rank 0] step:8901/10000 train_time:864472ms step_avg:97.12ms +[2025-08-22 17:35:12] [Rank 0] step:8901/10000 train_time:864472ms step_avg:97.12ms +[2025-08-22 17:35:14] [Rank 0] step:8921/10000 train_time:866528ms step_avg:97.13ms +[2025-08-22 17:35:14] [Rank 0] step:8921/10000 train_time:866528ms step_avg:97.13ms +[2025-08-22 17:35:16] [Rank 0] step:8941/10000 train_time:868571ms step_avg:97.14ms +[2025-08-22 17:35:16] [Rank 0] step:8941/10000 train_time:868571ms step_avg:97.14ms +[2025-08-22 17:35:19] [Rank 0] step:8961/10000 train_time:870610ms step_avg:97.16ms +[2025-08-22 17:35:19] [Rank 0] step:8961/10000 train_time:870610ms step_avg:97.16ms +[2025-08-22 17:35:21] [Rank 0] step:8981/10000 train_time:872648ms step_avg:97.17ms +[2025-08-22 17:35:21] [Rank 0] step:8981/10000 train_time:872648ms step_avg:97.17ms +[2025-08-22 17:35:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:35:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:35:36] [Rank 0] PRINT: step:9000/10000 val_loss:3.4696 svd_entropy: attn_qk:H=0.7684,top10E=0.25,eRank=194.2,q75/q25=55.95 attn_vo:H=0.8344,top10E=0.06,eRank=405.1,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.3,q75/q25=2.88 vo_prod:H=0.6933,top10E=0.11,eRank=230.0,q75/q25=inf train_time:874691ms step_avg:97.19ms +[2025-08-22 17:35:36] [Rank 0] PRINT: step:9000/10000 val_loss:3.4696 svd_entropy: attn_qk:H=0.7684,top10E=0.25,eRank=194.2,q75/q25=55.95 attn_vo:H=0.8344,top10E=0.06,eRank=405.1,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.3,q75/q25=2.88 vo_prod:H=0.6933,top10E=0.11,eRank=230.0,q75/q25=inf train_time:874691ms step_avg:97.19ms +[2025-08-22 17:35:36] [Rank 0] step:9001/10000 train_time:874713ms step_avg:97.18ms +[2025-08-22 17:35:36] [Rank 0] step:9001/10000 train_time:874713ms step_avg:97.18ms +[2025-08-22 17:35:38] [Rank 0] step:9021/10000 train_time:876760ms step_avg:97.19ms +[2025-08-22 17:35:38] [Rank 0] step:9021/10000 train_time:876760ms step_avg:97.19ms +[2025-08-22 17:35:40] [Rank 0] step:9041/10000 train_time:878796ms step_avg:97.20ms +[2025-08-22 17:35:40] [Rank 0] step:9041/10000 train_time:878796ms step_avg:97.20ms +[2025-08-22 17:35:42] [Rank 0] step:9061/10000 train_time:880842ms step_avg:97.21ms +[2025-08-22 17:35:42] [Rank 0] step:9061/10000 train_time:880842ms step_avg:97.21ms +[2025-08-22 17:35:44] [Rank 0] step:9081/10000 train_time:882888ms step_avg:97.22ms +[2025-08-22 17:35:44] [Rank 0] step:9081/10000 train_time:882888ms step_avg:97.22ms +[2025-08-22 17:35:47] [Rank 0] step:9101/10000 train_time:884947ms step_avg:97.24ms +[2025-08-22 17:35:47] [Rank 0] step:9101/10000 train_time:884947ms step_avg:97.24ms +[2025-08-22 17:35:49] [Rank 0] step:9121/10000 train_time:886991ms step_avg:97.25ms +[2025-08-22 17:35:49] [Rank 0] step:9121/10000 train_time:886991ms step_avg:97.25ms +[2025-08-22 17:35:51] [Rank 0] step:9141/10000 train_time:889024ms step_avg:97.26ms +[2025-08-22 17:35:51] [Rank 0] step:9141/10000 train_time:889024ms step_avg:97.26ms +[2025-08-22 17:35:53] [Rank 0] step:9161/10000 train_time:891062ms step_avg:97.27ms +[2025-08-22 17:35:53] [Rank 0] step:9161/10000 train_time:891062ms step_avg:97.27ms +[2025-08-22 17:35:55] [Rank 0] step:9181/10000 train_time:893142ms step_avg:97.28ms +[2025-08-22 17:35:55] [Rank 0] step:9181/10000 train_time:893142ms step_avg:97.28ms +[2025-08-22 17:35:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:35:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:36:11] [Rank 0] PRINT: step:9200/10000 val_loss:3.4606 svd_entropy: attn_qk:H=0.7686,top10E=0.25,eRank=194.4,q75/q25=56.12 attn_vo:H=0.8344,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.4,q75/q25=2.88 vo_prod:H=0.6934,top10E=0.11,eRank=230.1,q75/q25=inf train_time:895184ms step_avg:97.30ms +[2025-08-22 17:36:11] [Rank 0] PRINT: step:9200/10000 val_loss:3.4606 svd_entropy: attn_qk:H=0.7686,top10E=0.25,eRank=194.4,q75/q25=56.12 attn_vo:H=0.8344,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.4,q75/q25=2.88 vo_prod:H=0.6934,top10E=0.11,eRank=230.1,q75/q25=inf train_time:895184ms step_avg:97.30ms +[2025-08-22 17:36:11] [Rank 0] step:9201/10000 train_time:895207ms step_avg:97.29ms +[2025-08-22 17:36:11] [Rank 0] step:9201/10000 train_time:895207ms step_avg:97.29ms +[2025-08-22 17:36:13] [Rank 0] step:9221/10000 train_time:897254ms step_avg:97.31ms +[2025-08-22 17:36:13] [Rank 0] step:9221/10000 train_time:897254ms step_avg:97.31ms +[2025-08-22 17:36:15] [Rank 0] step:9241/10000 train_time:899307ms step_avg:97.32ms +[2025-08-22 17:36:15] [Rank 0] step:9241/10000 train_time:899307ms step_avg:97.32ms +[2025-08-22 17:36:17] [Rank 0] step:9261/10000 train_time:901357ms step_avg:97.33ms +[2025-08-22 17:36:17] [Rank 0] step:9261/10000 train_time:901357ms step_avg:97.33ms +[2025-08-22 17:36:19] [Rank 0] step:9281/10000 train_time:903400ms step_avg:97.34ms +[2025-08-22 17:36:19] [Rank 0] step:9281/10000 train_time:903400ms step_avg:97.34ms +[2025-08-22 17:36:21] [Rank 0] step:9301/10000 train_time:905445ms step_avg:97.35ms +[2025-08-22 17:36:21] [Rank 0] step:9301/10000 train_time:905445ms step_avg:97.35ms +[2025-08-22 17:36:23] [Rank 0] step:9321/10000 train_time:907498ms step_avg:97.36ms +[2025-08-22 17:36:23] [Rank 0] step:9321/10000 train_time:907498ms step_avg:97.36ms +[2025-08-22 17:36:25] [Rank 0] step:9341/10000 train_time:909547ms step_avg:97.37ms +[2025-08-22 17:36:25] [Rank 0] step:9341/10000 train_time:909547ms step_avg:97.37ms +[2025-08-22 17:36:27] [Rank 0] step:9361/10000 train_time:911600ms step_avg:97.38ms +[2025-08-22 17:36:27] [Rank 0] step:9361/10000 train_time:911600ms step_avg:97.38ms +[2025-08-22 17:36:29] [Rank 0] step:9381/10000 train_time:913662ms step_avg:97.39ms +[2025-08-22 17:36:29] [Rank 0] step:9381/10000 train_time:913662ms step_avg:97.39ms +[2025-08-22 17:36:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:36:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:36:45] [Rank 0] PRINT: step:9400/10000 val_loss:3.4516 svd_entropy: attn_qk:H=0.7687,top10E=0.25,eRank=194.5,q75/q25=55.98 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.5,q75/q25=2.88 vo_prod:H=0.6935,top10E=0.11,eRank=230.3,q75/q25=inf train_time:915719ms step_avg:97.42ms +[2025-08-22 17:36:45] [Rank 0] PRINT: step:9400/10000 val_loss:3.4516 svd_entropy: attn_qk:H=0.7687,top10E=0.25,eRank=194.5,q75/q25=55.98 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.5,q75/q25=2.88 vo_prod:H=0.6935,top10E=0.11,eRank=230.3,q75/q25=inf train_time:915719ms step_avg:97.42ms +[2025-08-22 17:36:45] [Rank 0] step:9401/10000 train_time:915742ms step_avg:97.41ms +[2025-08-22 17:36:45] [Rank 0] step:9401/10000 train_time:915742ms step_avg:97.41ms +[2025-08-22 17:36:47] [Rank 0] step:9421/10000 train_time:917788ms step_avg:97.42ms +[2025-08-22 17:36:47] [Rank 0] step:9421/10000 train_time:917788ms step_avg:97.42ms +[2025-08-22 17:36:49] [Rank 0] step:9441/10000 train_time:919838ms step_avg:97.43ms +[2025-08-22 17:36:49] [Rank 0] step:9441/10000 train_time:919838ms step_avg:97.43ms +[2025-08-22 17:36:51] [Rank 0] step:9461/10000 train_time:921894ms step_avg:97.44ms +[2025-08-22 17:36:51] [Rank 0] step:9461/10000 train_time:921894ms step_avg:97.44ms +[2025-08-22 17:36:53] [Rank 0] step:9481/10000 train_time:923947ms step_avg:97.45ms +[2025-08-22 17:36:53] [Rank 0] step:9481/10000 train_time:923947ms step_avg:97.45ms +[2025-08-22 17:36:55] [Rank 0] step:9501/10000 train_time:926009ms step_avg:97.46ms +[2025-08-22 17:36:55] [Rank 0] step:9501/10000 train_time:926009ms step_avg:97.46ms +[2025-08-22 17:36:57] [Rank 0] step:9521/10000 train_time:928052ms step_avg:97.47ms +[2025-08-22 17:36:57] [Rank 0] step:9521/10000 train_time:928052ms step_avg:97.47ms +[2025-08-22 17:36:59] [Rank 0] step:9541/10000 train_time:930104ms step_avg:97.48ms +[2025-08-22 17:36:59] [Rank 0] step:9541/10000 train_time:930104ms step_avg:97.48ms +[2025-08-22 17:37:01] [Rank 0] step:9561/10000 train_time:932150ms step_avg:97.50ms +[2025-08-22 17:37:01] [Rank 0] step:9561/10000 train_time:932150ms step_avg:97.50ms +[2025-08-22 17:37:03] [Rank 0] step:9581/10000 train_time:934202ms step_avg:97.51ms +[2025-08-22 17:37:03] [Rank 0] step:9581/10000 train_time:934202ms step_avg:97.51ms +[2025-08-22 17:37:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:37:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:37:19] [Rank 0] PRINT: step:9600/10000 val_loss:3.4433 svd_entropy: attn_qk:H=0.7688,top10E=0.25,eRank=194.6,q75/q25=56.11 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.6,q75/q25=2.88 vo_prod:H=0.6935,top10E=0.11,eRank=230.4,q75/q25=inf train_time:936268ms step_avg:97.53ms +[2025-08-22 17:37:19] [Rank 0] PRINT: step:9600/10000 val_loss:3.4433 svd_entropy: attn_qk:H=0.7688,top10E=0.25,eRank=194.6,q75/q25=56.11 attn_vo:H=0.8345,top10E=0.06,eRank=405.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.6,q75/q25=2.88 vo_prod:H=0.6935,top10E=0.11,eRank=230.4,q75/q25=inf train_time:936268ms step_avg:97.53ms +[2025-08-22 17:37:19] [Rank 0] step:9601/10000 train_time:936290ms step_avg:97.52ms +[2025-08-22 17:37:19] [Rank 0] step:9601/10000 train_time:936290ms step_avg:97.52ms +[2025-08-22 17:37:21] [Rank 0] step:9621/10000 train_time:938335ms step_avg:97.53ms +[2025-08-22 17:37:21] [Rank 0] step:9621/10000 train_time:938335ms step_avg:97.53ms +[2025-08-22 17:37:23] [Rank 0] step:9641/10000 train_time:940383ms step_avg:97.54ms +[2025-08-22 17:37:23] [Rank 0] step:9641/10000 train_time:940383ms step_avg:97.54ms +[2025-08-22 17:37:25] [Rank 0] step:9661/10000 train_time:942456ms step_avg:97.55ms +[2025-08-22 17:37:25] [Rank 0] step:9661/10000 train_time:942456ms step_avg:97.55ms +[2025-08-22 17:37:27] [Rank 0] step:9681/10000 train_time:944522ms step_avg:97.56ms +[2025-08-22 17:37:27] [Rank 0] step:9681/10000 train_time:944522ms step_avg:97.56ms +[2025-08-22 17:37:30] [Rank 0] step:9701/10000 train_time:946605ms step_avg:97.58ms +[2025-08-22 17:37:30] [Rank 0] step:9701/10000 train_time:946605ms step_avg:97.58ms +[2025-08-22 17:37:32] [Rank 0] step:9721/10000 train_time:948670ms step_avg:97.59ms +[2025-08-22 17:37:32] [Rank 0] step:9721/10000 train_time:948670ms step_avg:97.59ms +[2025-08-22 17:37:34] [Rank 0] step:9741/10000 train_time:950759ms step_avg:97.60ms +[2025-08-22 17:37:34] [Rank 0] step:9741/10000 train_time:950759ms step_avg:97.60ms +[2025-08-22 17:37:36] [Rank 0] step:9761/10000 train_time:952833ms step_avg:97.62ms +[2025-08-22 17:37:36] [Rank 0] step:9761/10000 train_time:952833ms step_avg:97.62ms +[2025-08-22 17:37:38] [Rank 0] step:9781/10000 train_time:954910ms step_avg:97.63ms +[2025-08-22 17:37:38] [Rank 0] step:9781/10000 train_time:954910ms step_avg:97.63ms +[2025-08-22 17:37:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:37:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:37:54] [Rank 0] PRINT: step:9800/10000 val_loss:3.4352 svd_entropy: attn_qk:H=0.7689,top10E=0.25,eRank=194.7,q75/q25=56.07 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.5,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.6,q75/q25=2.88 vo_prod:H=0.6936,top10E=0.11,eRank=230.5,q75/q25=inf train_time:957000ms step_avg:97.65ms +[2025-08-22 17:37:54] [Rank 0] PRINT: step:9800/10000 val_loss:3.4352 svd_entropy: attn_qk:H=0.7689,top10E=0.25,eRank=194.7,q75/q25=56.07 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.5,q75/q25=2.78 mlp_w2:H=0.9676,top10E=0.05,eRank=619.6,q75/q25=2.88 vo_prod:H=0.6936,top10E=0.11,eRank=230.5,q75/q25=inf train_time:957000ms step_avg:97.65ms +[2025-08-22 17:37:54] [Rank 0] step:9801/10000 train_time:957026ms step_avg:97.65ms +[2025-08-22 17:37:54] [Rank 0] step:9801/10000 train_time:957026ms step_avg:97.65ms +[2025-08-22 17:37:56] [Rank 0] step:9821/10000 train_time:959079ms step_avg:97.66ms +[2025-08-22 17:37:56] [Rank 0] step:9821/10000 train_time:959079ms step_avg:97.66ms +[2025-08-22 17:37:58] [Rank 0] step:9841/10000 train_time:961154ms step_avg:97.67ms +[2025-08-22 17:37:58] [Rank 0] step:9841/10000 train_time:961154ms step_avg:97.67ms +[2025-08-22 17:38:00] [Rank 0] step:9861/10000 train_time:963213ms step_avg:97.68ms +[2025-08-22 17:38:00] [Rank 0] step:9861/10000 train_time:963213ms step_avg:97.68ms +[2025-08-22 17:38:02] [Rank 0] step:9881/10000 train_time:965269ms step_avg:97.69ms +[2025-08-22 17:38:02] [Rank 0] step:9881/10000 train_time:965269ms step_avg:97.69ms +[2025-08-22 17:38:04] [Rank 0] step:9901/10000 train_time:967350ms step_avg:97.70ms +[2025-08-22 17:38:04] [Rank 0] step:9901/10000 train_time:967350ms step_avg:97.70ms +[2025-08-22 17:38:06] [Rank 0] step:9921/10000 train_time:969416ms step_avg:97.71ms +[2025-08-22 17:38:06] [Rank 0] step:9921/10000 train_time:969416ms step_avg:97.71ms +[2025-08-22 17:38:08] [Rank 0] step:9941/10000 train_time:971497ms step_avg:97.73ms +[2025-08-22 17:38:08] [Rank 0] step:9941/10000 train_time:971497ms step_avg:97.73ms +[2025-08-22 17:38:10] [Rank 0] step:9961/10000 train_time:973621ms step_avg:97.74ms +[2025-08-22 17:38:10] [Rank 0] step:9961/10000 train_time:973621ms step_avg:97.74ms +[2025-08-22 17:38:12] [Rank 0] step:9981/10000 train_time:975758ms step_avg:97.76ms +[2025-08-22 17:38:12] [Rank 0] step:9981/10000 train_time:975758ms step_avg:97.76ms +[2025-08-22 17:38:14] [Rank 0] step:10000/10000 train_time:977732ms step_avg:97.77ms +[2025-08-22 17:38:14] [Rank 0] step:10000/10000 train_time:977732ms step_avg:97.77ms +[2025-08-22 17:38:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:38:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:38:28] [Rank 0] PRINT: step:10000/10000 val_loss:3.4285 svd_entropy: attn_qk:H=0.7690,top10E=0.25,eRank=194.7,q75/q25=56.08 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.5,q75/q25=2.78 mlp_w2:H=0.9677,top10E=0.05,eRank=619.7,q75/q25=2.88 vo_prod:H=0.6936,top10E=0.11,eRank=230.5,q75/q25=inf train_time:977844ms step_avg:97.78ms +[2025-08-22 17:38:28] [Rank 0] PRINT: step:10000/10000 val_loss:3.4285 svd_entropy: attn_qk:H=0.7690,top10E=0.25,eRank=194.7,q75/q25=56.08 attn_vo:H=0.8345,top10E=0.06,eRank=405.3,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.5,q75/q25=2.78 mlp_w2:H=0.9677,top10E=0.05,eRank=619.7,q75/q25=2.88 vo_prod:H=0.6936,top10E=0.11,eRank=230.5,q75/q25=inf train_time:977844ms step_avg:97.78ms +[2025-08-22 17:38:28] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 17:38:28 2025 --- +[2025-08-22 17:38:28] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 17:38:28 2025 --- +[2025-08-22 17:38:28] [Rank 0] PRINT: Peak memory allocated: 11123 MiB reserved: 17036 MiB +[2025-08-22 17:38:28] [Rank 0] PRINT: Peak memory allocated: 11123 MiB reserved: 17036 MiB diff --git a/logs_svd_gated/mode_7_param_gated_seed_43/config.json b/logs_svd_gated/mode_7_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..c0a3237c39fd703628c420958a77e1dc627ea246 --- /dev/null +++ b/logs_svd_gated/mode_7_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 7, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "2d32eac7-0481-4b39-8c07-ed0b3acb0d13", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_7_param_gated_seed_43/training_log_2d32eac7-0481-4b39-8c07-ed0b3acb0d13.txt b/logs_svd_gated/mode_7_param_gated_seed_43/training_log_2d32eac7-0481-4b39-8c07-ed0b3acb0d13.txt new file mode 100644 index 0000000000000000000000000000000000000000..bec2940fe4ef68a85e41f17d404e913d64e2881b --- /dev/null +++ b/logs_svd_gated/mode_7_param_gated_seed_43/training_log_2d32eac7-0481-4b39-8c07-ed0b3acb0d13.txt @@ -0,0 +1,2926 @@ +[2025-08-22 22:23:09] [Rank 0] PRINT: --- Script Start: Fri Aug 22 22:23:09 2025 --- +[2025-08-22 22:23:09] [Rank 0] PRINT: --- Script Start: Fri Aug 22 22:23:09 2025 --- +[2025-08-22 22:23:09] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=7, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 22:23:09] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=7, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 22:23:09] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 22:23:09] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 22:23:09] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 22:23:09] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 22:23:09] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_7_param_gated_seed_43 +[2025-08-22 22:23:09] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_7_param_gated_seed_43 +[2025-08-22 22:23:09] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 22:23:09] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 22:23:09] [Rank 0] PRINT: Constructing model... +[2025-08-22 22:23:09] [Rank 0] PRINT: Constructing model... +[2025-08-22 22:23:11] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 22:23:11] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 22:23:11] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 22:23:11] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 22:23:11] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 22:23:11] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 22:23:11] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-08-22 22:23:11] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 7 +[2025-08-22 22:23:11] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.05). +[2025-08-22 22:23:11] [Rank 0] PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: 0.05). +[2025-08-22 22:23:11] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 22:23:11] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 22:23:11] [Rank 0] PRINT: Muon optimizer is active with 58 parameters. +[2025-08-22 22:23:11] [Rank 0] PRINT: Muon optimizer is active with 58 parameters. +[2025-08-22 22:23:11] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 22:23:11] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 22:23:11] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 22:23:11] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 22:23:11] [Rank 0] PRINT: Starting warmup... +[2025-08-22 22:23:11] [Rank 0] PRINT: Starting warmup... +[2025-08-22 22:23:55] [Rank 0] PRINT: Warmup complete. +[2025-08-22 22:23:55] [Rank 0] PRINT: Warmup complete. +[2025-08-22 22:23:55] [Rank 0] PRINT: Starting training... +[2025-08-22 22:23:55] [Rank 0] PRINT: Starting training... +[2025-08-22 22:23:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:23:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:24:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 22:24:13] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 22:24:15] [Rank 0] step:21/10000 train_time:1856ms step_avg:88.39ms +[2025-08-22 22:24:15] [Rank 0] step:21/10000 train_time:1856ms step_avg:88.39ms +[2025-08-22 22:24:17] [Rank 0] step:41/10000 train_time:3674ms step_avg:89.60ms +[2025-08-22 22:24:17] [Rank 0] step:41/10000 train_time:3674ms step_avg:89.60ms +[2025-08-22 22:24:19] [Rank 0] step:61/10000 train_time:5491ms step_avg:90.02ms +[2025-08-22 22:24:19] [Rank 0] step:61/10000 train_time:5491ms step_avg:90.02ms +[2025-08-22 22:24:20] [Rank 0] step:81/10000 train_time:7312ms step_avg:90.27ms +[2025-08-22 22:24:20] [Rank 0] step:81/10000 train_time:7312ms step_avg:90.27ms +[2025-08-22 22:24:22] [Rank 0] step:101/10000 train_time:9133ms step_avg:90.43ms +[2025-08-22 22:24:22] [Rank 0] step:101/10000 train_time:9133ms step_avg:90.43ms +[2025-08-22 22:24:24] [Rank 0] step:121/10000 train_time:10956ms step_avg:90.55ms +[2025-08-22 22:24:24] [Rank 0] step:121/10000 train_time:10956ms step_avg:90.55ms +[2025-08-22 22:24:26] [Rank 0] step:141/10000 train_time:12778ms step_avg:90.62ms +[2025-08-22 22:24:26] [Rank 0] step:141/10000 train_time:12778ms step_avg:90.62ms +[2025-08-22 22:24:28] [Rank 0] step:161/10000 train_time:14603ms step_avg:90.70ms +[2025-08-22 22:24:28] [Rank 0] step:161/10000 train_time:14603ms step_avg:90.70ms +[2025-08-22 22:24:30] [Rank 0] step:181/10000 train_time:16429ms step_avg:90.77ms +[2025-08-22 22:24:30] [Rank 0] step:181/10000 train_time:16429ms step_avg:90.77ms +[2025-08-22 22:24:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:24:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:24:45] [Rank 0] PRINT: step:200/10000 val_loss:5.3424 svd_entropy: attn_qk:H=0.7006,top10E=0.37,eRank=147.5,q75/q25=38.34 attn_vo:H=0.8335,top10E=0.06,eRank=402.2,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.6,q75/q25=2.97 mlp_w2:H=0.9694,top10E=0.04,eRank=626.9,q75/q25=3.09 vo_prod:H=0.6835,top10E=0.11,eRank=211.7,q75/q25=inf train_time:18255ms step_avg:91.28ms +[2025-08-22 22:24:45] [Rank 0] PRINT: step:200/10000 val_loss:5.3424 svd_entropy: attn_qk:H=0.7006,top10E=0.37,eRank=147.5,q75/q25=38.34 attn_vo:H=0.8335,top10E=0.06,eRank=402.2,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.6,q75/q25=2.97 mlp_w2:H=0.9694,top10E=0.04,eRank=626.9,q75/q25=3.09 vo_prod:H=0.6835,top10E=0.11,eRank=211.7,q75/q25=inf train_time:18255ms step_avg:91.28ms +[2025-08-22 22:24:45] [Rank 0] step:201/10000 train_time:18279ms step_avg:90.94ms +[2025-08-22 22:24:45] [Rank 0] step:201/10000 train_time:18279ms step_avg:90.94ms +[2025-08-22 22:24:47] [Rank 0] step:221/10000 train_time:20087ms step_avg:90.89ms +[2025-08-22 22:24:47] [Rank 0] step:221/10000 train_time:20087ms step_avg:90.89ms +[2025-08-22 22:24:49] [Rank 0] step:241/10000 train_time:21911ms step_avg:90.92ms +[2025-08-22 22:24:49] [Rank 0] step:241/10000 train_time:21911ms step_avg:90.92ms +[2025-08-22 22:24:51] [Rank 0] step:261/10000 train_time:23734ms step_avg:90.93ms +[2025-08-22 22:24:51] [Rank 0] step:261/10000 train_time:23734ms step_avg:90.93ms +[2025-08-22 22:24:53] [Rank 0] step:281/10000 train_time:25706ms step_avg:91.48ms +[2025-08-22 22:24:53] [Rank 0] step:281/10000 train_time:25706ms step_avg:91.48ms +[2025-08-22 22:24:55] [Rank 0] step:301/10000 train_time:27682ms step_avg:91.97ms +[2025-08-22 22:24:55] [Rank 0] step:301/10000 train_time:27682ms step_avg:91.97ms +[2025-08-22 22:24:57] [Rank 0] step:321/10000 train_time:29507ms step_avg:91.92ms +[2025-08-22 22:24:57] [Rank 0] step:321/10000 train_time:29507ms step_avg:91.92ms +[2025-08-22 22:24:58] [Rank 0] step:341/10000 train_time:31333ms step_avg:91.89ms +[2025-08-22 22:24:58] [Rank 0] step:341/10000 train_time:31333ms step_avg:91.89ms +[2025-08-22 22:25:00] [Rank 0] step:361/10000 train_time:33158ms step_avg:91.85ms +[2025-08-22 22:25:00] [Rank 0] step:361/10000 train_time:33158ms step_avg:91.85ms +[2025-08-22 22:25:02] [Rank 0] step:381/10000 train_time:34986ms step_avg:91.83ms +[2025-08-22 22:25:02] [Rank 0] step:381/10000 train_time:34986ms step_avg:91.83ms +[2025-08-22 22:25:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:25:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:25:18] [Rank 0] PRINT: step:400/10000 val_loss:5.0070 svd_entropy: attn_qk:H=0.7216,top10E=0.33,eRank=158.8,q75/q25=45.38 attn_vo:H=0.8379,top10E=0.05,eRank=415.1,q75/q25=inf mlp_w1:H=0.9702,top10E=0.04,eRank=630.5,q75/q25=2.99 mlp_w2:H=0.9692,top10E=0.04,eRank=625.9,q75/q25=3.06 vo_prod:H=0.6939,top10E=0.10,eRank=229.5,q75/q25=inf train_time:36814ms step_avg:92.04ms +[2025-08-22 22:25:18] [Rank 0] PRINT: step:400/10000 val_loss:5.0070 svd_entropy: attn_qk:H=0.7216,top10E=0.33,eRank=158.8,q75/q25=45.38 attn_vo:H=0.8379,top10E=0.05,eRank=415.1,q75/q25=inf mlp_w1:H=0.9702,top10E=0.04,eRank=630.5,q75/q25=2.99 mlp_w2:H=0.9692,top10E=0.04,eRank=625.9,q75/q25=3.06 vo_prod:H=0.6939,top10E=0.10,eRank=229.5,q75/q25=inf train_time:36814ms step_avg:92.04ms +[2025-08-22 22:25:18] [Rank 0] step:401/10000 train_time:36838ms step_avg:91.87ms +[2025-08-22 22:25:18] [Rank 0] step:401/10000 train_time:36838ms step_avg:91.87ms +[2025-08-22 22:25:20] [Rank 0] step:421/10000 train_time:38666ms step_avg:91.84ms +[2025-08-22 22:25:20] [Rank 0] step:421/10000 train_time:38666ms step_avg:91.84ms +[2025-08-22 22:25:22] [Rank 0] step:441/10000 train_time:40488ms step_avg:91.81ms +[2025-08-22 22:25:22] [Rank 0] step:441/10000 train_time:40488ms step_avg:91.81ms +[2025-08-22 22:25:23] [Rank 0] step:461/10000 train_time:42310ms step_avg:91.78ms +[2025-08-22 22:25:23] [Rank 0] step:461/10000 train_time:42310ms step_avg:91.78ms +[2025-08-22 22:25:25] [Rank 0] step:481/10000 train_time:44135ms step_avg:91.76ms +[2025-08-22 22:25:25] [Rank 0] step:481/10000 train_time:44135ms step_avg:91.76ms +[2025-08-22 22:25:27] [Rank 0] step:501/10000 train_time:45958ms step_avg:91.73ms +[2025-08-22 22:25:27] [Rank 0] step:501/10000 train_time:45958ms step_avg:91.73ms +[2025-08-22 22:25:29] [Rank 0] step:521/10000 train_time:47783ms step_avg:91.71ms +[2025-08-22 22:25:29] [Rank 0] step:521/10000 train_time:47783ms step_avg:91.71ms +[2025-08-22 22:25:31] [Rank 0] step:541/10000 train_time:49607ms step_avg:91.69ms +[2025-08-22 22:25:31] [Rank 0] step:541/10000 train_time:49607ms step_avg:91.69ms +[2025-08-22 22:25:33] [Rank 0] step:561/10000 train_time:51433ms step_avg:91.68ms +[2025-08-22 22:25:33] [Rank 0] step:561/10000 train_time:51433ms step_avg:91.68ms +[2025-08-22 22:25:34] [Rank 0] step:581/10000 train_time:53259ms step_avg:91.67ms +[2025-08-22 22:25:34] [Rank 0] step:581/10000 train_time:53259ms step_avg:91.67ms +[2025-08-22 22:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:25:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:25:50] [Rank 0] PRINT: step:600/10000 val_loss:4.7956 svd_entropy: attn_qk:H=0.7297,top10E=0.31,eRank=163.9,q75/q25=48.51 attn_vo:H=0.8365,top10E=0.05,eRank=411.1,q75/q25=inf mlp_w1:H=0.9698,top10E=0.04,eRank=628.7,q75/q25=3.00 mlp_w2:H=0.9683,top10E=0.04,eRank=622.3,q75/q25=3.07 vo_prod:H=0.6925,top10E=0.10,eRank=227.1,q75/q25=inf train_time:55086ms step_avg:91.81ms +[2025-08-22 22:25:50] [Rank 0] PRINT: step:600/10000 val_loss:4.7956 svd_entropy: attn_qk:H=0.7297,top10E=0.31,eRank=163.9,q75/q25=48.51 attn_vo:H=0.8365,top10E=0.05,eRank=411.1,q75/q25=inf mlp_w1:H=0.9698,top10E=0.04,eRank=628.7,q75/q25=3.00 mlp_w2:H=0.9683,top10E=0.04,eRank=622.3,q75/q25=3.07 vo_prod:H=0.6925,top10E=0.10,eRank=227.1,q75/q25=inf train_time:55086ms step_avg:91.81ms +[2025-08-22 22:25:50] [Rank 0] step:601/10000 train_time:55109ms step_avg:91.70ms +[2025-08-22 22:25:50] [Rank 0] step:601/10000 train_time:55109ms step_avg:91.70ms +[2025-08-22 22:25:52] [Rank 0] step:621/10000 train_time:56932ms step_avg:91.68ms +[2025-08-22 22:25:52] [Rank 0] step:621/10000 train_time:56932ms step_avg:91.68ms +[2025-08-22 22:25:54] [Rank 0] step:641/10000 train_time:58753ms step_avg:91.66ms +[2025-08-22 22:25:54] [Rank 0] step:641/10000 train_time:58753ms step_avg:91.66ms +[2025-08-22 22:25:56] [Rank 0] step:661/10000 train_time:60678ms step_avg:91.80ms +[2025-08-22 22:25:56] [Rank 0] step:661/10000 train_time:60678ms step_avg:91.80ms +[2025-08-22 22:25:58] [Rank 0] step:681/10000 train_time:62569ms step_avg:91.88ms +[2025-08-22 22:25:58] [Rank 0] step:681/10000 train_time:62569ms step_avg:91.88ms +[2025-08-22 22:25:59] [Rank 0] step:701/10000 train_time:64396ms step_avg:91.86ms +[2025-08-22 22:25:59] [Rank 0] step:701/10000 train_time:64396ms step_avg:91.86ms +[2025-08-22 22:26:01] [Rank 0] step:721/10000 train_time:66223ms step_avg:91.85ms +[2025-08-22 22:26:01] [Rank 0] step:721/10000 train_time:66223ms step_avg:91.85ms +[2025-08-22 22:26:03] [Rank 0] step:741/10000 train_time:68054ms step_avg:91.84ms +[2025-08-22 22:26:03] [Rank 0] step:741/10000 train_time:68054ms step_avg:91.84ms +[2025-08-22 22:26:05] [Rank 0] step:761/10000 train_time:69895ms step_avg:91.85ms +[2025-08-22 22:26:05] [Rank 0] step:761/10000 train_time:69895ms step_avg:91.85ms +[2025-08-22 22:26:07] [Rank 0] step:781/10000 train_time:71737ms step_avg:91.85ms +[2025-08-22 22:26:07] [Rank 0] step:781/10000 train_time:71737ms step_avg:91.85ms +[2025-08-22 22:26:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:26:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:26:23] [Rank 0] PRINT: step:800/10000 val_loss:4.5728 svd_entropy: attn_qk:H=0.7350,top10E=0.30,eRank=167.3,q75/q25=47.66 attn_vo:H=0.8355,top10E=0.05,eRank=407.8,q75/q25=inf mlp_w1:H=0.9700,top10E=0.04,eRank=629.3,q75/q25=2.98 mlp_w2:H=0.9679,top10E=0.04,eRank=620.6,q75/q25=3.05 vo_prod:H=0.6916,top10E=0.10,eRank=225.5,q75/q25=inf train_time:73582ms step_avg:91.98ms +[2025-08-22 22:26:23] [Rank 0] PRINT: step:800/10000 val_loss:4.5728 svd_entropy: attn_qk:H=0.7350,top10E=0.30,eRank=167.3,q75/q25=47.66 attn_vo:H=0.8355,top10E=0.05,eRank=407.8,q75/q25=inf mlp_w1:H=0.9700,top10E=0.04,eRank=629.3,q75/q25=2.98 mlp_w2:H=0.9679,top10E=0.04,eRank=620.6,q75/q25=3.05 vo_prod:H=0.6916,top10E=0.10,eRank=225.5,q75/q25=inf train_time:73582ms step_avg:91.98ms +[2025-08-22 22:26:23] [Rank 0] step:801/10000 train_time:73605ms step_avg:91.89ms +[2025-08-22 22:26:23] [Rank 0] step:801/10000 train_time:73605ms step_avg:91.89ms +[2025-08-22 22:26:24] [Rank 0] step:821/10000 train_time:75437ms step_avg:91.88ms +[2025-08-22 22:26:24] [Rank 0] step:821/10000 train_time:75437ms step_avg:91.88ms +[2025-08-22 22:26:26] [Rank 0] step:841/10000 train_time:77275ms step_avg:91.88ms +[2025-08-22 22:26:26] [Rank 0] step:841/10000 train_time:77275ms step_avg:91.88ms +[2025-08-22 22:26:28] [Rank 0] step:861/10000 train_time:79111ms step_avg:91.88ms +[2025-08-22 22:26:28] [Rank 0] step:861/10000 train_time:79111ms step_avg:91.88ms +[2025-08-22 22:26:30] [Rank 0] step:881/10000 train_time:80949ms step_avg:91.88ms +[2025-08-22 22:26:30] [Rank 0] step:881/10000 train_time:80949ms step_avg:91.88ms +[2025-08-22 22:26:32] [Rank 0] step:901/10000 train_time:82787ms step_avg:91.88ms +[2025-08-22 22:26:32] [Rank 0] step:901/10000 train_time:82787ms step_avg:91.88ms +[2025-08-22 22:26:34] [Rank 0] step:921/10000 train_time:84625ms step_avg:91.88ms +[2025-08-22 22:26:34] [Rank 0] step:921/10000 train_time:84625ms step_avg:91.88ms +[2025-08-22 22:26:36] [Rank 0] step:941/10000 train_time:86463ms step_avg:91.88ms +[2025-08-22 22:26:36] [Rank 0] step:941/10000 train_time:86463ms step_avg:91.88ms +[2025-08-22 22:26:37] [Rank 0] step:961/10000 train_time:88302ms step_avg:91.89ms +[2025-08-22 22:26:37] [Rank 0] step:961/10000 train_time:88302ms step_avg:91.89ms +[2025-08-22 22:26:39] [Rank 0] step:981/10000 train_time:90141ms step_avg:91.89ms +[2025-08-22 22:26:39] [Rank 0] step:981/10000 train_time:90141ms step_avg:91.89ms +[2025-08-22 22:26:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:26:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:26:55] [Rank 0] PRINT: step:1000/10000 val_loss:4.4120 svd_entropy: attn_qk:H=0.7388,top10E=0.30,eRank=169.9,q75/q25=47.72 attn_vo:H=0.8350,top10E=0.05,eRank=406.6,q75/q25=inf mlp_w1:H=0.9702,top10E=0.04,eRank=630.3,q75/q25=2.96 mlp_w2:H=0.9677,top10E=0.05,eRank=619.6,q75/q25=3.04 vo_prod:H=0.6913,top10E=0.10,eRank=225.1,q75/q25=inf train_time:91982ms step_avg:91.98ms +[2025-08-22 22:26:55] [Rank 0] PRINT: step:1000/10000 val_loss:4.4120 svd_entropy: attn_qk:H=0.7388,top10E=0.30,eRank=169.9,q75/q25=47.72 attn_vo:H=0.8350,top10E=0.05,eRank=406.6,q75/q25=inf mlp_w1:H=0.9702,top10E=0.04,eRank=630.3,q75/q25=2.96 mlp_w2:H=0.9677,top10E=0.05,eRank=619.6,q75/q25=3.04 vo_prod:H=0.6913,top10E=0.10,eRank=225.1,q75/q25=inf train_time:91982ms step_avg:91.98ms +[2025-08-22 22:26:55] [Rank 0] step:1001/10000 train_time:92006ms step_avg:91.91ms +[2025-08-22 22:26:55] [Rank 0] step:1001/10000 train_time:92006ms step_avg:91.91ms +[2025-08-22 22:26:57] [Rank 0] step:1021/10000 train_time:93849ms step_avg:91.92ms +[2025-08-22 22:26:57] [Rank 0] step:1021/10000 train_time:93849ms step_avg:91.92ms +[2025-08-22 22:26:59] [Rank 0] step:1041/10000 train_time:95839ms step_avg:92.06ms +[2025-08-22 22:26:59] [Rank 0] step:1041/10000 train_time:95839ms step_avg:92.06ms +[2025-08-22 22:27:01] [Rank 0] step:1061/10000 train_time:97753ms step_avg:92.13ms +[2025-08-22 22:27:01] [Rank 0] step:1061/10000 train_time:97753ms step_avg:92.13ms +[2025-08-22 22:27:03] [Rank 0] step:1081/10000 train_time:99589ms step_avg:92.13ms +[2025-08-22 22:27:03] [Rank 0] step:1081/10000 train_time:99589ms step_avg:92.13ms +[2025-08-22 22:27:05] [Rank 0] step:1101/10000 train_time:101427ms step_avg:92.12ms +[2025-08-22 22:27:05] [Rank 0] step:1101/10000 train_time:101427ms step_avg:92.12ms +[2025-08-22 22:27:06] [Rank 0] step:1121/10000 train_time:103264ms step_avg:92.12ms +[2025-08-22 22:27:06] [Rank 0] step:1121/10000 train_time:103264ms step_avg:92.12ms +[2025-08-22 22:27:08] [Rank 0] step:1141/10000 train_time:105105ms step_avg:92.12ms +[2025-08-22 22:27:08] [Rank 0] step:1141/10000 train_time:105105ms step_avg:92.12ms +[2025-08-22 22:27:10] [Rank 0] step:1161/10000 train_time:106942ms step_avg:92.11ms +[2025-08-22 22:27:10] [Rank 0] step:1161/10000 train_time:106942ms step_avg:92.11ms +[2025-08-22 22:27:12] [Rank 0] step:1181/10000 train_time:108781ms step_avg:92.11ms +[2025-08-22 22:27:12] [Rank 0] step:1181/10000 train_time:108781ms step_avg:92.11ms +[2025-08-22 22:27:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:27:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:27:27] [Rank 0] PRINT: step:1200/10000 val_loss:4.3017 svd_entropy: attn_qk:H=0.7415,top10E=0.29,eRank=171.8,q75/q25=48.37 attn_vo:H=0.8347,top10E=0.05,eRank=405.5,q75/q25=inf mlp_w1:H=0.9704,top10E=0.04,eRank=631.0,q75/q25=2.94 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=3.03 vo_prod:H=0.6910,top10E=0.10,eRank=224.6,q75/q25=inf train_time:110622ms step_avg:92.18ms +[2025-08-22 22:27:27] [Rank 0] PRINT: step:1200/10000 val_loss:4.3017 svd_entropy: attn_qk:H=0.7415,top10E=0.29,eRank=171.8,q75/q25=48.37 attn_vo:H=0.8347,top10E=0.05,eRank=405.5,q75/q25=inf mlp_w1:H=0.9704,top10E=0.04,eRank=631.0,q75/q25=2.94 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=3.03 vo_prod:H=0.6910,top10E=0.10,eRank=224.6,q75/q25=inf train_time:110622ms step_avg:92.18ms +[2025-08-22 22:27:27] [Rank 0] step:1201/10000 train_time:110645ms step_avg:92.13ms +[2025-08-22 22:27:27] [Rank 0] step:1201/10000 train_time:110645ms step_avg:92.13ms +[2025-08-22 22:27:29] [Rank 0] step:1221/10000 train_time:112471ms step_avg:92.11ms +[2025-08-22 22:27:29] [Rank 0] step:1221/10000 train_time:112471ms step_avg:92.11ms +[2025-08-22 22:27:31] [Rank 0] step:1241/10000 train_time:114306ms step_avg:92.11ms +[2025-08-22 22:27:31] [Rank 0] step:1241/10000 train_time:114306ms step_avg:92.11ms +[2025-08-22 22:27:33] [Rank 0] step:1261/10000 train_time:116145ms step_avg:92.11ms +[2025-08-22 22:27:33] [Rank 0] step:1261/10000 train_time:116145ms step_avg:92.11ms +[2025-08-22 22:27:35] [Rank 0] step:1281/10000 train_time:117984ms step_avg:92.10ms +[2025-08-22 22:27:35] [Rank 0] step:1281/10000 train_time:117984ms step_avg:92.10ms +[2025-08-22 22:27:37] [Rank 0] step:1301/10000 train_time:119824ms step_avg:92.10ms +[2025-08-22 22:27:37] [Rank 0] step:1301/10000 train_time:119824ms step_avg:92.10ms +[2025-08-22 22:27:39] [Rank 0] step:1321/10000 train_time:121665ms step_avg:92.10ms +[2025-08-22 22:27:39] [Rank 0] step:1321/10000 train_time:121665ms step_avg:92.10ms +[2025-08-22 22:27:40] [Rank 0] step:1341/10000 train_time:123507ms step_avg:92.10ms +[2025-08-22 22:27:40] [Rank 0] step:1341/10000 train_time:123507ms step_avg:92.10ms +[2025-08-22 22:27:42] [Rank 0] step:1361/10000 train_time:125352ms step_avg:92.10ms +[2025-08-22 22:27:42] [Rank 0] step:1361/10000 train_time:125352ms step_avg:92.10ms +[2025-08-22 22:27:44] [Rank 0] step:1381/10000 train_time:127196ms step_avg:92.10ms +[2025-08-22 22:27:44] [Rank 0] step:1381/10000 train_time:127196ms step_avg:92.10ms +[2025-08-22 22:27:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:27:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:28:00] [Rank 0] PRINT: step:1400/10000 val_loss:4.2465 svd_entropy: attn_qk:H=0.7437,top10E=0.29,eRank=173.5,q75/q25=49.19 attn_vo:H=0.8344,top10E=0.05,eRank=404.8,q75/q25=inf mlp_w1:H=0.9706,top10E=0.04,eRank=631.8,q75/q25=2.92 mlp_w2:H=0.9673,top10E=0.05,eRank=618.4,q75/q25=3.01 vo_prod:H=0.6905,top10E=0.10,eRank=223.8,q75/q25=inf train_time:129040ms step_avg:92.17ms +[2025-08-22 22:28:00] [Rank 0] PRINT: step:1400/10000 val_loss:4.2465 svd_entropy: attn_qk:H=0.7437,top10E=0.29,eRank=173.5,q75/q25=49.19 attn_vo:H=0.8344,top10E=0.05,eRank=404.8,q75/q25=inf mlp_w1:H=0.9706,top10E=0.04,eRank=631.8,q75/q25=2.92 mlp_w2:H=0.9673,top10E=0.05,eRank=618.4,q75/q25=3.01 vo_prod:H=0.6905,top10E=0.10,eRank=223.8,q75/q25=inf train_time:129040ms step_avg:92.17ms +[2025-08-22 22:28:00] [Rank 0] step:1401/10000 train_time:129062ms step_avg:92.12ms +[2025-08-22 22:28:00] [Rank 0] step:1401/10000 train_time:129062ms step_avg:92.12ms +[2025-08-22 22:28:02] [Rank 0] step:1421/10000 train_time:131026ms step_avg:92.21ms +[2025-08-22 22:28:02] [Rank 0] step:1421/10000 train_time:131026ms step_avg:92.21ms +[2025-08-22 22:28:04] [Rank 0] step:1441/10000 train_time:132979ms step_avg:92.28ms +[2025-08-22 22:28:04] [Rank 0] step:1441/10000 train_time:132979ms step_avg:92.28ms +[2025-08-22 22:28:06] [Rank 0] step:1461/10000 train_time:134814ms step_avg:92.28ms +[2025-08-22 22:28:06] [Rank 0] step:1461/10000 train_time:134814ms step_avg:92.28ms +[2025-08-22 22:28:08] [Rank 0] step:1481/10000 train_time:136650ms step_avg:92.27ms +[2025-08-22 22:28:08] [Rank 0] step:1481/10000 train_time:136650ms step_avg:92.27ms +[2025-08-22 22:28:09] [Rank 0] step:1501/10000 train_time:138494ms step_avg:92.27ms +[2025-08-22 22:28:09] [Rank 0] step:1501/10000 train_time:138494ms step_avg:92.27ms +[2025-08-22 22:28:11] [Rank 0] step:1521/10000 train_time:140340ms step_avg:92.27ms +[2025-08-22 22:28:11] [Rank 0] step:1521/10000 train_time:140340ms step_avg:92.27ms +[2025-08-22 22:28:13] [Rank 0] step:1541/10000 train_time:142189ms step_avg:92.27ms +[2025-08-22 22:28:13] [Rank 0] step:1541/10000 train_time:142189ms step_avg:92.27ms +[2025-08-22 22:28:15] [Rank 0] step:1561/10000 train_time:144037ms step_avg:92.27ms +[2025-08-22 22:28:15] [Rank 0] step:1561/10000 train_time:144037ms step_avg:92.27ms +[2025-08-22 22:28:17] [Rank 0] step:1581/10000 train_time:145887ms step_avg:92.27ms +[2025-08-22 22:28:17] [Rank 0] step:1581/10000 train_time:145887ms step_avg:92.27ms +[2025-08-22 22:28:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:28:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:28:32] [Rank 0] PRINT: step:1600/10000 val_loss:4.1492 svd_entropy: attn_qk:H=0.7456,top10E=0.28,eRank=175.0,q75/q25=49.72 attn_vo:H=0.8342,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9707,top10E=0.04,eRank=632.3,q75/q25=2.91 mlp_w2:H=0.9672,top10E=0.05,eRank=618.0,q75/q25=3.00 vo_prod:H=0.6900,top10E=0.10,eRank=223.0,q75/q25=inf train_time:147740ms step_avg:92.34ms +[2025-08-22 22:28:32] [Rank 0] PRINT: step:1600/10000 val_loss:4.1492 svd_entropy: attn_qk:H=0.7456,top10E=0.28,eRank=175.0,q75/q25=49.72 attn_vo:H=0.8342,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9707,top10E=0.04,eRank=632.3,q75/q25=2.91 mlp_w2:H=0.9672,top10E=0.05,eRank=618.0,q75/q25=3.00 vo_prod:H=0.6900,top10E=0.10,eRank=223.0,q75/q25=inf train_time:147740ms step_avg:92.34ms +[2025-08-22 22:28:33] [Rank 0] step:1601/10000 train_time:147763ms step_avg:92.29ms +[2025-08-22 22:28:33] [Rank 0] step:1601/10000 train_time:147763ms step_avg:92.29ms +[2025-08-22 22:28:34] [Rank 0] step:1621/10000 train_time:149615ms step_avg:92.30ms +[2025-08-22 22:28:34] [Rank 0] step:1621/10000 train_time:149615ms step_avg:92.30ms +[2025-08-22 22:28:36] [Rank 0] step:1641/10000 train_time:151457ms step_avg:92.30ms +[2025-08-22 22:28:36] [Rank 0] step:1641/10000 train_time:151457ms step_avg:92.30ms +[2025-08-22 22:28:38] [Rank 0] step:1661/10000 train_time:153302ms step_avg:92.30ms +[2025-08-22 22:28:38] [Rank 0] step:1661/10000 train_time:153302ms step_avg:92.30ms +[2025-08-22 22:28:40] [Rank 0] step:1681/10000 train_time:155148ms step_avg:92.29ms +[2025-08-22 22:28:40] [Rank 0] step:1681/10000 train_time:155148ms step_avg:92.29ms +[2025-08-22 22:28:42] [Rank 0] step:1701/10000 train_time:156994ms step_avg:92.30ms +[2025-08-22 22:28:42] [Rank 0] step:1701/10000 train_time:156994ms step_avg:92.30ms +[2025-08-22 22:28:44] [Rank 0] step:1721/10000 train_time:158841ms step_avg:92.30ms +[2025-08-22 22:28:44] [Rank 0] step:1721/10000 train_time:158841ms step_avg:92.30ms +[2025-08-22 22:28:46] [Rank 0] step:1741/10000 train_time:160689ms step_avg:92.30ms +[2025-08-22 22:28:46] [Rank 0] step:1741/10000 train_time:160689ms step_avg:92.30ms +[2025-08-22 22:28:47] [Rank 0] step:1761/10000 train_time:162536ms step_avg:92.30ms +[2025-08-22 22:28:47] [Rank 0] step:1761/10000 train_time:162536ms step_avg:92.30ms +[2025-08-22 22:28:49] [Rank 0] step:1781/10000 train_time:164386ms step_avg:92.30ms +[2025-08-22 22:28:49] [Rank 0] step:1781/10000 train_time:164386ms step_avg:92.30ms +[2025-08-22 22:28:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:28:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:29:05] [Rank 0] PRINT: step:1800/10000 val_loss:4.0917 svd_entropy: attn_qk:H=0.7472,top10E=0.28,eRank=176.2,q75/q25=50.15 attn_vo:H=0.8340,top10E=0.05,eRank=403.8,q75/q25=inf mlp_w1:H=0.9708,top10E=0.04,eRank=632.7,q75/q25=2.89 mlp_w2:H=0.9672,top10E=0.05,eRank=617.7,q75/q25=2.99 vo_prod:H=0.6897,top10E=0.10,eRank=222.6,q75/q25=inf train_time:166234ms step_avg:92.35ms +[2025-08-22 22:29:05] [Rank 0] PRINT: step:1800/10000 val_loss:4.0917 svd_entropy: attn_qk:H=0.7472,top10E=0.28,eRank=176.2,q75/q25=50.15 attn_vo:H=0.8340,top10E=0.05,eRank=403.8,q75/q25=inf mlp_w1:H=0.9708,top10E=0.04,eRank=632.7,q75/q25=2.89 mlp_w2:H=0.9672,top10E=0.05,eRank=617.7,q75/q25=2.99 vo_prod:H=0.6897,top10E=0.10,eRank=222.6,q75/q25=inf train_time:166234ms step_avg:92.35ms +[2025-08-22 22:29:05] [Rank 0] step:1801/10000 train_time:166258ms step_avg:92.31ms +[2025-08-22 22:29:05] [Rank 0] step:1801/10000 train_time:166258ms step_avg:92.31ms +[2025-08-22 22:29:07] [Rank 0] step:1821/10000 train_time:168198ms step_avg:92.37ms +[2025-08-22 22:29:07] [Rank 0] step:1821/10000 train_time:168198ms step_avg:92.37ms +[2025-08-22 22:29:09] [Rank 0] step:1841/10000 train_time:170042ms step_avg:92.36ms +[2025-08-22 22:29:09] [Rank 0] step:1841/10000 train_time:170042ms step_avg:92.36ms +[2025-08-22 22:29:11] [Rank 0] step:1861/10000 train_time:171886ms step_avg:92.36ms +[2025-08-22 22:29:11] [Rank 0] step:1861/10000 train_time:171886ms step_avg:92.36ms +[2025-08-22 22:29:13] [Rank 0] step:1881/10000 train_time:173730ms step_avg:92.36ms +[2025-08-22 22:29:13] [Rank 0] step:1881/10000 train_time:173730ms step_avg:92.36ms +[2025-08-22 22:29:14] [Rank 0] step:1901/10000 train_time:175576ms step_avg:92.36ms +[2025-08-22 22:29:14] [Rank 0] step:1901/10000 train_time:175576ms step_avg:92.36ms +[2025-08-22 22:29:16] [Rank 0] step:1921/10000 train_time:177422ms step_avg:92.36ms +[2025-08-22 22:29:16] [Rank 0] step:1921/10000 train_time:177422ms step_avg:92.36ms +[2025-08-22 22:29:18] [Rank 0] step:1941/10000 train_time:179269ms step_avg:92.36ms +[2025-08-22 22:29:18] [Rank 0] step:1941/10000 train_time:179269ms step_avg:92.36ms +[2025-08-22 22:29:20] [Rank 0] step:1961/10000 train_time:181117ms step_avg:92.36ms +[2025-08-22 22:29:20] [Rank 0] step:1961/10000 train_time:181117ms step_avg:92.36ms +[2025-08-22 22:29:22] [Rank 0] step:1981/10000 train_time:182966ms step_avg:92.36ms +[2025-08-22 22:29:22] [Rank 0] step:1981/10000 train_time:182966ms step_avg:92.36ms +[2025-08-22 22:29:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:29:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:29:38] [Rank 0] PRINT: step:2000/10000 val_loss:4.0616 svd_entropy: attn_qk:H=0.7488,top10E=0.28,eRank=177.3,q75/q25=50.37 attn_vo:H=0.8340,top10E=0.05,eRank=403.7,q75/q25=inf mlp_w1:H=0.9709,top10E=0.04,eRank=633.1,q75/q25=2.88 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.98 vo_prod:H=0.6895,top10E=0.10,eRank=222.5,q75/q25=inf train_time:184817ms step_avg:92.41ms +[2025-08-22 22:29:38] [Rank 0] PRINT: step:2000/10000 val_loss:4.0616 svd_entropy: attn_qk:H=0.7488,top10E=0.28,eRank=177.3,q75/q25=50.37 attn_vo:H=0.8340,top10E=0.05,eRank=403.7,q75/q25=inf mlp_w1:H=0.9709,top10E=0.04,eRank=633.1,q75/q25=2.88 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.98 vo_prod:H=0.6895,top10E=0.10,eRank=222.5,q75/q25=inf train_time:184817ms step_avg:92.41ms +[2025-08-22 22:29:38] [Rank 0] step:2001/10000 train_time:184840ms step_avg:92.37ms +[2025-08-22 22:29:38] [Rank 0] step:2001/10000 train_time:184840ms step_avg:92.37ms +[2025-08-22 22:29:40] [Rank 0] step:2021/10000 train_time:186678ms step_avg:92.37ms +[2025-08-22 22:29:40] [Rank 0] step:2021/10000 train_time:186678ms step_avg:92.37ms +[2025-08-22 22:29:42] [Rank 0] step:2041/10000 train_time:188773ms step_avg:92.49ms +[2025-08-22 22:29:42] [Rank 0] step:2041/10000 train_time:188773ms step_avg:92.49ms +[2025-08-22 22:29:43] [Rank 0] step:2061/10000 train_time:190620ms step_avg:92.49ms +[2025-08-22 22:29:43] [Rank 0] step:2061/10000 train_time:190620ms step_avg:92.49ms +[2025-08-22 22:29:45] [Rank 0] step:2081/10000 train_time:192469ms step_avg:92.49ms +[2025-08-22 22:29:45] [Rank 0] step:2081/10000 train_time:192469ms step_avg:92.49ms +[2025-08-22 22:29:47] [Rank 0] step:2101/10000 train_time:194319ms step_avg:92.49ms +[2025-08-22 22:29:47] [Rank 0] step:2101/10000 train_time:194319ms step_avg:92.49ms +[2025-08-22 22:29:49] [Rank 0] step:2121/10000 train_time:196170ms step_avg:92.49ms +[2025-08-22 22:29:49] [Rank 0] step:2121/10000 train_time:196170ms step_avg:92.49ms +[2025-08-22 22:29:51] [Rank 0] step:2141/10000 train_time:198021ms step_avg:92.49ms +[2025-08-22 22:29:51] [Rank 0] step:2141/10000 train_time:198021ms step_avg:92.49ms +[2025-08-22 22:29:53] [Rank 0] step:2161/10000 train_time:199875ms step_avg:92.49ms +[2025-08-22 22:29:53] [Rank 0] step:2161/10000 train_time:199875ms step_avg:92.49ms +[2025-08-22 22:29:55] [Rank 0] step:2181/10000 train_time:201727ms step_avg:92.49ms +[2025-08-22 22:29:55] [Rank 0] step:2181/10000 train_time:201727ms step_avg:92.49ms +[2025-08-22 22:29:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:29:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:30:10] [Rank 0] PRINT: step:2200/10000 val_loss:4.0183 svd_entropy: attn_qk:H=0.7502,top10E=0.28,eRank=178.5,q75/q25=50.87 attn_vo:H=0.8339,top10E=0.05,eRank=403.6,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.6,q75/q25=2.88 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.97 vo_prod:H=0.6895,top10E=0.10,eRank=222.4,q75/q25=inf train_time:203583ms step_avg:92.54ms +[2025-08-22 22:30:10] [Rank 0] PRINT: step:2200/10000 val_loss:4.0183 svd_entropy: attn_qk:H=0.7502,top10E=0.28,eRank=178.5,q75/q25=50.87 attn_vo:H=0.8339,top10E=0.05,eRank=403.6,q75/q25=inf mlp_w1:H=0.9710,top10E=0.04,eRank=633.6,q75/q25=2.88 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.97 vo_prod:H=0.6895,top10E=0.10,eRank=222.4,q75/q25=inf train_time:203583ms step_avg:92.54ms +[2025-08-22 22:30:10] [Rank 0] step:2201/10000 train_time:203606ms step_avg:92.51ms +[2025-08-22 22:30:10] [Rank 0] step:2201/10000 train_time:203606ms step_avg:92.51ms +[2025-08-22 22:30:12] [Rank 0] step:2221/10000 train_time:205444ms step_avg:92.50ms +[2025-08-22 22:30:12] [Rank 0] step:2221/10000 train_time:205444ms step_avg:92.50ms +[2025-08-22 22:30:14] [Rank 0] step:2241/10000 train_time:207325ms step_avg:92.51ms +[2025-08-22 22:30:14] [Rank 0] step:2241/10000 train_time:207325ms step_avg:92.51ms +[2025-08-22 22:30:16] [Rank 0] step:2261/10000 train_time:209214ms step_avg:92.53ms +[2025-08-22 22:30:16] [Rank 0] step:2261/10000 train_time:209214ms step_avg:92.53ms +[2025-08-22 22:30:18] [Rank 0] step:2281/10000 train_time:211107ms step_avg:92.55ms +[2025-08-22 22:30:18] [Rank 0] step:2281/10000 train_time:211107ms step_avg:92.55ms +[2025-08-22 22:30:20] [Rank 0] step:2301/10000 train_time:212999ms step_avg:92.57ms +[2025-08-22 22:30:20] [Rank 0] step:2301/10000 train_time:212999ms step_avg:92.57ms +[2025-08-22 22:30:22] [Rank 0] step:2321/10000 train_time:214891ms step_avg:92.59ms +[2025-08-22 22:30:22] [Rank 0] step:2321/10000 train_time:214891ms step_avg:92.59ms +[2025-08-22 22:30:24] [Rank 0] step:2341/10000 train_time:216784ms step_avg:92.60ms +[2025-08-22 22:30:24] [Rank 0] step:2341/10000 train_time:216784ms step_avg:92.60ms +[2025-08-22 22:30:25] [Rank 0] step:2361/10000 train_time:218677ms step_avg:92.62ms +[2025-08-22 22:30:25] [Rank 0] step:2361/10000 train_time:218677ms step_avg:92.62ms +[2025-08-22 22:30:27] [Rank 0] step:2381/10000 train_time:220571ms step_avg:92.64ms +[2025-08-22 22:30:27] [Rank 0] step:2381/10000 train_time:220571ms step_avg:92.64ms +[2025-08-22 22:30:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:30:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:30:43] [Rank 0] PRINT: step:2400/10000 val_loss:3.9537 svd_entropy: attn_qk:H=0.7514,top10E=0.28,eRank=179.5,q75/q25=50.80 attn_vo:H=0.8340,top10E=0.05,eRank=403.6,q75/q25=inf mlp_w1:H=0.9711,top10E=0.04,eRank=634.1,q75/q25=2.87 mlp_w2:H=0.9671,top10E=0.05,eRank=617.2,q75/q25=2.96 vo_prod:H=0.6897,top10E=0.10,eRank=222.9,q75/q25=inf train_time:222466ms step_avg:92.69ms +[2025-08-22 22:30:43] [Rank 0] PRINT: step:2400/10000 val_loss:3.9537 svd_entropy: attn_qk:H=0.7514,top10E=0.28,eRank=179.5,q75/q25=50.80 attn_vo:H=0.8340,top10E=0.05,eRank=403.6,q75/q25=inf mlp_w1:H=0.9711,top10E=0.04,eRank=634.1,q75/q25=2.87 mlp_w2:H=0.9671,top10E=0.05,eRank=617.2,q75/q25=2.96 vo_prod:H=0.6897,top10E=0.10,eRank=222.9,q75/q25=inf train_time:222466ms step_avg:92.69ms +[2025-08-22 22:30:43] [Rank 0] step:2401/10000 train_time:222489ms step_avg:92.67ms +[2025-08-22 22:30:43] [Rank 0] step:2401/10000 train_time:222489ms step_avg:92.67ms +[2025-08-22 22:30:45] [Rank 0] step:2421/10000 train_time:224372ms step_avg:92.68ms +[2025-08-22 22:30:45] [Rank 0] step:2421/10000 train_time:224372ms step_avg:92.68ms +[2025-08-22 22:30:47] [Rank 0] step:2441/10000 train_time:226260ms step_avg:92.69ms +[2025-08-22 22:30:47] [Rank 0] step:2441/10000 train_time:226260ms step_avg:92.69ms +[2025-08-22 22:30:49] [Rank 0] step:2461/10000 train_time:228149ms step_avg:92.71ms +[2025-08-22 22:30:49] [Rank 0] step:2461/10000 train_time:228149ms step_avg:92.71ms +[2025-08-22 22:30:51] [Rank 0] step:2481/10000 train_time:230037ms step_avg:92.72ms +[2025-08-22 22:30:51] [Rank 0] step:2481/10000 train_time:230037ms step_avg:92.72ms +[2025-08-22 22:30:53] [Rank 0] step:2501/10000 train_time:231927ms step_avg:92.73ms +[2025-08-22 22:30:53] [Rank 0] step:2501/10000 train_time:231927ms step_avg:92.73ms +[2025-08-22 22:30:55] [Rank 0] step:2521/10000 train_time:233817ms step_avg:92.75ms +[2025-08-22 22:30:55] [Rank 0] step:2521/10000 train_time:233817ms step_avg:92.75ms +[2025-08-22 22:30:57] [Rank 0] step:2541/10000 train_time:235710ms step_avg:92.76ms +[2025-08-22 22:30:57] [Rank 0] step:2541/10000 train_time:235710ms step_avg:92.76ms +[2025-08-22 22:30:58] [Rank 0] step:2561/10000 train_time:237602ms step_avg:92.78ms +[2025-08-22 22:30:58] [Rank 0] step:2561/10000 train_time:237602ms step_avg:92.78ms +[2025-08-22 22:31:00] [Rank 0] step:2581/10000 train_time:239494ms step_avg:92.79ms +[2025-08-22 22:31:00] [Rank 0] step:2581/10000 train_time:239494ms step_avg:92.79ms +[2025-08-22 22:31:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:31:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:31:16] [Rank 0] PRINT: step:2600/10000 val_loss:3.9302 svd_entropy: attn_qk:H=0.7527,top10E=0.27,eRank=180.5,q75/q25=51.17 attn_vo:H=0.8340,top10E=0.05,eRank=403.8,q75/q25=inf mlp_w1:H=0.9712,top10E=0.04,eRank=634.5,q75/q25=2.86 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.95 vo_prod:H=0.6900,top10E=0.10,eRank=223.5,q75/q25=inf train_time:241389ms step_avg:92.84ms +[2025-08-22 22:31:16] [Rank 0] PRINT: step:2600/10000 val_loss:3.9302 svd_entropy: attn_qk:H=0.7527,top10E=0.27,eRank=180.5,q75/q25=51.17 attn_vo:H=0.8340,top10E=0.05,eRank=403.8,q75/q25=inf mlp_w1:H=0.9712,top10E=0.04,eRank=634.5,q75/q25=2.86 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.95 vo_prod:H=0.6900,top10E=0.10,eRank=223.5,q75/q25=inf train_time:241389ms step_avg:92.84ms +[2025-08-22 22:31:16] [Rank 0] step:2601/10000 train_time:241412ms step_avg:92.81ms +[2025-08-22 22:31:16] [Rank 0] step:2601/10000 train_time:241412ms step_avg:92.81ms +[2025-08-22 22:31:18] [Rank 0] step:2621/10000 train_time:243292ms step_avg:92.82ms +[2025-08-22 22:31:18] [Rank 0] step:2621/10000 train_time:243292ms step_avg:92.82ms +[2025-08-22 22:31:20] [Rank 0] step:2641/10000 train_time:245179ms step_avg:92.84ms +[2025-08-22 22:31:20] [Rank 0] step:2641/10000 train_time:245179ms step_avg:92.84ms +[2025-08-22 22:31:22] [Rank 0] step:2661/10000 train_time:247067ms step_avg:92.85ms +[2025-08-22 22:31:22] [Rank 0] step:2661/10000 train_time:247067ms step_avg:92.85ms +[2025-08-22 22:31:24] [Rank 0] step:2681/10000 train_time:248957ms step_avg:92.86ms +[2025-08-22 22:31:24] [Rank 0] step:2681/10000 train_time:248957ms step_avg:92.86ms +[2025-08-22 22:31:26] [Rank 0] step:2701/10000 train_time:250847ms step_avg:92.87ms +[2025-08-22 22:31:26] [Rank 0] step:2701/10000 train_time:250847ms step_avg:92.87ms +[2025-08-22 22:31:28] [Rank 0] step:2721/10000 train_time:252740ms step_avg:92.88ms +[2025-08-22 22:31:28] [Rank 0] step:2721/10000 train_time:252740ms step_avg:92.88ms +[2025-08-22 22:31:29] [Rank 0] step:2741/10000 train_time:254632ms step_avg:92.90ms +[2025-08-22 22:31:29] [Rank 0] step:2741/10000 train_time:254632ms step_avg:92.90ms +[2025-08-22 22:31:31] [Rank 0] step:2761/10000 train_time:256525ms step_avg:92.91ms +[2025-08-22 22:31:31] [Rank 0] step:2761/10000 train_time:256525ms step_avg:92.91ms +[2025-08-22 22:31:33] [Rank 0] step:2781/10000 train_time:258418ms step_avg:92.92ms +[2025-08-22 22:31:33] [Rank 0] step:2781/10000 train_time:258418ms step_avg:92.92ms +[2025-08-22 22:31:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:31:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:31:49] [Rank 0] PRINT: step:2800/10000 val_loss:3.9074 svd_entropy: attn_qk:H=0.7540,top10E=0.27,eRank=181.6,q75/q25=51.09 attn_vo:H=0.8341,top10E=0.05,eRank=403.9,q75/q25=inf mlp_w1:H=0.9713,top10E=0.04,eRank=634.9,q75/q25=2.85 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.95 vo_prod:H=0.6903,top10E=0.10,eRank=224.0,q75/q25=inf train_time:260316ms step_avg:92.97ms +[2025-08-22 22:31:49] [Rank 0] PRINT: step:2800/10000 val_loss:3.9074 svd_entropy: attn_qk:H=0.7540,top10E=0.27,eRank=181.6,q75/q25=51.09 attn_vo:H=0.8341,top10E=0.05,eRank=403.9,q75/q25=inf mlp_w1:H=0.9713,top10E=0.04,eRank=634.9,q75/q25=2.85 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.95 vo_prod:H=0.6903,top10E=0.10,eRank=224.0,q75/q25=inf train_time:260316ms step_avg:92.97ms +[2025-08-22 22:31:49] [Rank 0] step:2801/10000 train_time:260338ms step_avg:92.94ms +[2025-08-22 22:31:49] [Rank 0] step:2801/10000 train_time:260338ms step_avg:92.94ms +[2025-08-22 22:31:51] [Rank 0] step:2821/10000 train_time:262217ms step_avg:92.95ms +[2025-08-22 22:31:51] [Rank 0] step:2821/10000 train_time:262217ms step_avg:92.95ms +[2025-08-22 22:31:53] [Rank 0] step:2841/10000 train_time:264108ms step_avg:92.96ms +[2025-08-22 22:31:53] [Rank 0] step:2841/10000 train_time:264108ms step_avg:92.96ms +[2025-08-22 22:31:55] [Rank 0] step:2861/10000 train_time:266002ms step_avg:92.98ms +[2025-08-22 22:31:55] [Rank 0] step:2861/10000 train_time:266002ms step_avg:92.98ms +[2025-08-22 22:31:57] [Rank 0] step:2881/10000 train_time:267895ms step_avg:92.99ms +[2025-08-22 22:31:57] [Rank 0] step:2881/10000 train_time:267895ms step_avg:92.99ms +[2025-08-22 22:31:59] [Rank 0] step:2901/10000 train_time:269791ms step_avg:93.00ms +[2025-08-22 22:31:59] [Rank 0] step:2901/10000 train_time:269791ms step_avg:93.00ms +[2025-08-22 22:32:00] [Rank 0] step:2921/10000 train_time:271686ms step_avg:93.01ms +[2025-08-22 22:32:00] [Rank 0] step:2921/10000 train_time:271686ms step_avg:93.01ms +[2025-08-22 22:32:02] [Rank 0] step:2941/10000 train_time:273586ms step_avg:93.02ms +[2025-08-22 22:32:02] [Rank 0] step:2941/10000 train_time:273586ms step_avg:93.02ms +[2025-08-22 22:32:04] [Rank 0] step:2961/10000 train_time:275483ms step_avg:93.04ms +[2025-08-22 22:32:04] [Rank 0] step:2961/10000 train_time:275483ms step_avg:93.04ms +[2025-08-22 22:32:06] [Rank 0] step:2981/10000 train_time:277387ms step_avg:93.05ms +[2025-08-22 22:32:06] [Rank 0] step:2981/10000 train_time:277387ms step_avg:93.05ms +[2025-08-22 22:32:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:32:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:32:22] [Rank 0] PRINT: step:3000/10000 val_loss:3.8769 svd_entropy: attn_qk:H=0.7552,top10E=0.27,eRank=182.6,q75/q25=51.28 attn_vo:H=0.8341,top10E=0.05,eRank=404.1,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.2,q75/q25=2.85 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.94 vo_prod:H=0.6905,top10E=0.10,eRank=224.5,q75/q25=inf train_time:279295ms step_avg:93.10ms +[2025-08-22 22:32:22] [Rank 0] PRINT: step:3000/10000 val_loss:3.8769 svd_entropy: attn_qk:H=0.7552,top10E=0.27,eRank=182.6,q75/q25=51.28 attn_vo:H=0.8341,top10E=0.05,eRank=404.1,q75/q25=inf mlp_w1:H=0.9714,top10E=0.04,eRank=635.2,q75/q25=2.85 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.94 vo_prod:H=0.6905,top10E=0.10,eRank=224.5,q75/q25=inf train_time:279295ms step_avg:93.10ms +[2025-08-22 22:32:22] [Rank 0] step:3001/10000 train_time:279317ms step_avg:93.07ms +[2025-08-22 22:32:22] [Rank 0] step:3001/10000 train_time:279317ms step_avg:93.07ms +[2025-08-22 22:32:24] [Rank 0] step:3021/10000 train_time:281224ms step_avg:93.09ms +[2025-08-22 22:32:24] [Rank 0] step:3021/10000 train_time:281224ms step_avg:93.09ms +[2025-08-22 22:32:26] [Rank 0] step:3041/10000 train_time:283120ms step_avg:93.10ms +[2025-08-22 22:32:26] [Rank 0] step:3041/10000 train_time:283120ms step_avg:93.10ms +[2025-08-22 22:32:28] [Rank 0] step:3061/10000 train_time:285016ms step_avg:93.11ms +[2025-08-22 22:32:28] [Rank 0] step:3061/10000 train_time:285016ms step_avg:93.11ms +[2025-08-22 22:32:30] [Rank 0] step:3081/10000 train_time:286916ms step_avg:93.12ms +[2025-08-22 22:32:30] [Rank 0] step:3081/10000 train_time:286916ms step_avg:93.12ms +[2025-08-22 22:32:32] [Rank 0] step:3101/10000 train_time:288816ms step_avg:93.14ms +[2025-08-22 22:32:32] [Rank 0] step:3101/10000 train_time:288816ms step_avg:93.14ms +[2025-08-22 22:32:34] [Rank 0] step:3121/10000 train_time:290715ms step_avg:93.15ms +[2025-08-22 22:32:34] [Rank 0] step:3121/10000 train_time:290715ms step_avg:93.15ms +[2025-08-22 22:32:35] [Rank 0] step:3141/10000 train_time:292615ms step_avg:93.16ms +[2025-08-22 22:32:35] [Rank 0] step:3141/10000 train_time:292615ms step_avg:93.16ms +[2025-08-22 22:32:37] [Rank 0] step:3161/10000 train_time:294516ms step_avg:93.17ms +[2025-08-22 22:32:37] [Rank 0] step:3161/10000 train_time:294516ms step_avg:93.17ms +[2025-08-22 22:32:39] [Rank 0] step:3181/10000 train_time:296416ms step_avg:93.18ms +[2025-08-22 22:32:39] [Rank 0] step:3181/10000 train_time:296416ms step_avg:93.18ms +[2025-08-22 22:32:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:32:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:32:55] [Rank 0] PRINT: step:3200/10000 val_loss:3.8511 svd_entropy: attn_qk:H=0.7562,top10E=0.27,eRank=183.4,q75/q25=51.38 attn_vo:H=0.8341,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.6,q75/q25=2.83 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.94 vo_prod:H=0.6906,top10E=0.10,eRank=224.7,q75/q25=inf train_time:298321ms step_avg:93.23ms +[2025-08-22 22:32:55] [Rank 0] PRINT: step:3200/10000 val_loss:3.8511 svd_entropy: attn_qk:H=0.7562,top10E=0.27,eRank=183.4,q75/q25=51.38 attn_vo:H=0.8341,top10E=0.05,eRank=404.2,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.6,q75/q25=2.83 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.94 vo_prod:H=0.6906,top10E=0.10,eRank=224.7,q75/q25=inf train_time:298321ms step_avg:93.23ms +[2025-08-22 22:32:55] [Rank 0] step:3201/10000 train_time:298344ms step_avg:93.20ms +[2025-08-22 22:32:55] [Rank 0] step:3201/10000 train_time:298344ms step_avg:93.20ms +[2025-08-22 22:32:57] [Rank 0] step:3221/10000 train_time:300244ms step_avg:93.21ms +[2025-08-22 22:32:57] [Rank 0] step:3221/10000 train_time:300244ms step_avg:93.21ms +[2025-08-22 22:32:59] [Rank 0] step:3241/10000 train_time:302139ms step_avg:93.22ms +[2025-08-22 22:32:59] [Rank 0] step:3241/10000 train_time:302139ms step_avg:93.22ms +[2025-08-22 22:33:01] [Rank 0] step:3261/10000 train_time:304035ms step_avg:93.23ms +[2025-08-22 22:33:01] [Rank 0] step:3261/10000 train_time:304035ms step_avg:93.23ms +[2025-08-22 22:33:03] [Rank 0] step:3281/10000 train_time:305932ms step_avg:93.24ms +[2025-08-22 22:33:03] [Rank 0] step:3281/10000 train_time:305932ms step_avg:93.24ms +[2025-08-22 22:33:05] [Rank 0] step:3301/10000 train_time:307831ms step_avg:93.25ms +[2025-08-22 22:33:05] [Rank 0] step:3301/10000 train_time:307831ms step_avg:93.25ms +[2025-08-22 22:33:07] [Rank 0] step:3321/10000 train_time:309731ms step_avg:93.26ms +[2025-08-22 22:33:07] [Rank 0] step:3321/10000 train_time:309731ms step_avg:93.26ms +[2025-08-22 22:33:08] [Rank 0] step:3341/10000 train_time:311632ms step_avg:93.28ms +[2025-08-22 22:33:08] [Rank 0] step:3341/10000 train_time:311632ms step_avg:93.28ms +[2025-08-22 22:33:10] [Rank 0] step:3361/10000 train_time:313531ms step_avg:93.28ms +[2025-08-22 22:33:10] [Rank 0] step:3361/10000 train_time:313531ms step_avg:93.28ms +[2025-08-22 22:33:12] [Rank 0] step:3381/10000 train_time:315433ms step_avg:93.30ms +[2025-08-22 22:33:12] [Rank 0] step:3381/10000 train_time:315433ms step_avg:93.30ms +[2025-08-22 22:33:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:33:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:33:28] [Rank 0] PRINT: step:3400/10000 val_loss:3.8278 svd_entropy: attn_qk:H=0.7574,top10E=0.27,eRank=184.4,q75/q25=51.25 attn_vo:H=0.8342,top10E=0.05,eRank=404.3,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.8,q75/q25=2.83 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.93 vo_prod:H=0.6908,top10E=0.10,eRank=225.1,q75/q25=inf train_time:317332ms step_avg:93.33ms +[2025-08-22 22:33:28] [Rank 0] PRINT: step:3400/10000 val_loss:3.8278 svd_entropy: attn_qk:H=0.7574,top10E=0.27,eRank=184.4,q75/q25=51.25 attn_vo:H=0.8342,top10E=0.05,eRank=404.3,q75/q25=inf mlp_w1:H=0.9715,top10E=0.04,eRank=635.8,q75/q25=2.83 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.93 vo_prod:H=0.6908,top10E=0.10,eRank=225.1,q75/q25=inf train_time:317332ms step_avg:93.33ms +[2025-08-22 22:33:28] [Rank 0] step:3401/10000 train_time:317355ms step_avg:93.31ms +[2025-08-22 22:33:28] [Rank 0] step:3401/10000 train_time:317355ms step_avg:93.31ms +[2025-08-22 22:33:30] [Rank 0] step:3421/10000 train_time:319250ms step_avg:93.32ms +[2025-08-22 22:33:30] [Rank 0] step:3421/10000 train_time:319250ms step_avg:93.32ms +[2025-08-22 22:33:32] [Rank 0] step:3441/10000 train_time:321144ms step_avg:93.33ms +[2025-08-22 22:33:32] [Rank 0] step:3441/10000 train_time:321144ms step_avg:93.33ms +[2025-08-22 22:33:34] [Rank 0] step:3461/10000 train_time:323042ms step_avg:93.34ms +[2025-08-22 22:33:34] [Rank 0] step:3461/10000 train_time:323042ms step_avg:93.34ms +[2025-08-22 22:33:36] [Rank 0] step:3481/10000 train_time:324937ms step_avg:93.35ms +[2025-08-22 22:33:36] [Rank 0] step:3481/10000 train_time:324937ms step_avg:93.35ms +[2025-08-22 22:33:38] [Rank 0] step:3501/10000 train_time:326838ms step_avg:93.36ms +[2025-08-22 22:33:38] [Rank 0] step:3501/10000 train_time:326838ms step_avg:93.36ms +[2025-08-22 22:33:40] [Rank 0] step:3521/10000 train_time:328739ms step_avg:93.37ms +[2025-08-22 22:33:40] [Rank 0] step:3521/10000 train_time:328739ms step_avg:93.37ms +[2025-08-22 22:33:41] [Rank 0] step:3541/10000 train_time:330639ms step_avg:93.37ms +[2025-08-22 22:33:41] [Rank 0] step:3541/10000 train_time:330639ms step_avg:93.37ms +[2025-08-22 22:33:43] [Rank 0] step:3561/10000 train_time:332540ms step_avg:93.38ms +[2025-08-22 22:33:43] [Rank 0] step:3561/10000 train_time:332540ms step_avg:93.38ms +[2025-08-22 22:33:45] [Rank 0] step:3581/10000 train_time:334442ms step_avg:93.39ms +[2025-08-22 22:33:45] [Rank 0] step:3581/10000 train_time:334442ms step_avg:93.39ms +[2025-08-22 22:33:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:33:47] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:34:01] [Rank 0] PRINT: step:3600/10000 val_loss:3.8205 svd_entropy: attn_qk:H=0.7584,top10E=0.27,eRank=185.3,q75/q25=51.49 attn_vo:H=0.8342,top10E=0.05,eRank=404.5,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.1,q75/q25=2.83 mlp_w2:H=0.9670,top10E=0.05,eRank=617.2,q75/q25=2.93 vo_prod:H=0.6910,top10E=0.10,eRank=225.5,q75/q25=inf train_time:336348ms step_avg:93.43ms +[2025-08-22 22:34:01] [Rank 0] PRINT: step:3600/10000 val_loss:3.8205 svd_entropy: attn_qk:H=0.7584,top10E=0.27,eRank=185.3,q75/q25=51.49 attn_vo:H=0.8342,top10E=0.05,eRank=404.5,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.1,q75/q25=2.83 mlp_w2:H=0.9670,top10E=0.05,eRank=617.2,q75/q25=2.93 vo_prod:H=0.6910,top10E=0.10,eRank=225.5,q75/q25=inf train_time:336348ms step_avg:93.43ms +[2025-08-22 22:34:01] [Rank 0] step:3601/10000 train_time:336371ms step_avg:93.41ms +[2025-08-22 22:34:01] [Rank 0] step:3601/10000 train_time:336371ms step_avg:93.41ms +[2025-08-22 22:34:03] [Rank 0] step:3621/10000 train_time:338283ms step_avg:93.42ms +[2025-08-22 22:34:03] [Rank 0] step:3621/10000 train_time:338283ms step_avg:93.42ms +[2025-08-22 22:34:05] [Rank 0] step:3641/10000 train_time:340184ms step_avg:93.43ms +[2025-08-22 22:34:05] [Rank 0] step:3641/10000 train_time:340184ms step_avg:93.43ms +[2025-08-22 22:34:07] [Rank 0] step:3661/10000 train_time:342085ms step_avg:93.44ms +[2025-08-22 22:34:07] [Rank 0] step:3661/10000 train_time:342085ms step_avg:93.44ms +[2025-08-22 22:34:09] [Rank 0] step:3681/10000 train_time:343986ms step_avg:93.45ms +[2025-08-22 22:34:09] [Rank 0] step:3681/10000 train_time:343986ms step_avg:93.45ms +[2025-08-22 22:34:11] [Rank 0] step:3701/10000 train_time:345890ms step_avg:93.46ms +[2025-08-22 22:34:11] [Rank 0] step:3701/10000 train_time:345890ms step_avg:93.46ms +[2025-08-22 22:34:13] [Rank 0] step:3721/10000 train_time:347822ms step_avg:93.48ms +[2025-08-22 22:34:13] [Rank 0] step:3721/10000 train_time:347822ms step_avg:93.48ms +[2025-08-22 22:34:15] [Rank 0] step:3741/10000 train_time:349765ms step_avg:93.50ms +[2025-08-22 22:34:15] [Rank 0] step:3741/10000 train_time:349765ms step_avg:93.50ms +[2025-08-22 22:34:17] [Rank 0] step:3761/10000 train_time:351706ms step_avg:93.51ms +[2025-08-22 22:34:17] [Rank 0] step:3761/10000 train_time:351706ms step_avg:93.51ms +[2025-08-22 22:34:18] [Rank 0] step:3781/10000 train_time:353648ms step_avg:93.53ms +[2025-08-22 22:34:18] [Rank 0] step:3781/10000 train_time:353648ms step_avg:93.53ms +[2025-08-22 22:34:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:34:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:34:34] [Rank 0] PRINT: step:3800/10000 val_loss:3.7909 svd_entropy: attn_qk:H=0.7594,top10E=0.26,eRank=186.1,q75/q25=51.92 attn_vo:H=0.8342,top10E=0.05,eRank=404.5,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.3,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.92 vo_prod:H=0.6911,top10E=0.10,eRank=225.7,q75/q25=inf train_time:355593ms step_avg:93.58ms +[2025-08-22 22:34:34] [Rank 0] PRINT: step:3800/10000 val_loss:3.7909 svd_entropy: attn_qk:H=0.7594,top10E=0.26,eRank=186.1,q75/q25=51.92 attn_vo:H=0.8342,top10E=0.05,eRank=404.5,q75/q25=inf mlp_w1:H=0.9716,top10E=0.04,eRank=636.3,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.92 vo_prod:H=0.6911,top10E=0.10,eRank=225.7,q75/q25=inf train_time:355593ms step_avg:93.58ms +[2025-08-22 22:34:34] [Rank 0] step:3801/10000 train_time:355616ms step_avg:93.56ms +[2025-08-22 22:34:34] [Rank 0] step:3801/10000 train_time:355616ms step_avg:93.56ms +[2025-08-22 22:34:36] [Rank 0] step:3821/10000 train_time:357541ms step_avg:93.57ms +[2025-08-22 22:34:36] [Rank 0] step:3821/10000 train_time:357541ms step_avg:93.57ms +[2025-08-22 22:34:38] [Rank 0] step:3841/10000 train_time:359478ms step_avg:93.59ms +[2025-08-22 22:34:38] [Rank 0] step:3841/10000 train_time:359478ms step_avg:93.59ms +[2025-08-22 22:34:40] [Rank 0] step:3861/10000 train_time:361415ms step_avg:93.61ms +[2025-08-22 22:34:40] [Rank 0] step:3861/10000 train_time:361415ms step_avg:93.61ms +[2025-08-22 22:34:42] [Rank 0] step:3881/10000 train_time:363351ms step_avg:93.62ms +[2025-08-22 22:34:42] [Rank 0] step:3881/10000 train_time:363351ms step_avg:93.62ms +[2025-08-22 22:34:44] [Rank 0] step:3901/10000 train_time:365285ms step_avg:93.64ms +[2025-08-22 22:34:44] [Rank 0] step:3901/10000 train_time:365285ms step_avg:93.64ms +[2025-08-22 22:34:46] [Rank 0] step:3921/10000 train_time:367221ms step_avg:93.65ms +[2025-08-22 22:34:46] [Rank 0] step:3921/10000 train_time:367221ms step_avg:93.65ms +[2025-08-22 22:34:48] [Rank 0] step:3941/10000 train_time:369157ms step_avg:93.67ms +[2025-08-22 22:34:48] [Rank 0] step:3941/10000 train_time:369157ms step_avg:93.67ms +[2025-08-22 22:34:50] [Rank 0] step:3961/10000 train_time:371092ms step_avg:93.69ms +[2025-08-22 22:34:50] [Rank 0] step:3961/10000 train_time:371092ms step_avg:93.69ms +[2025-08-22 22:34:52] [Rank 0] step:3981/10000 train_time:373029ms step_avg:93.70ms +[2025-08-22 22:34:52] [Rank 0] step:3981/10000 train_time:373029ms step_avg:93.70ms +[2025-08-22 22:34:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:34:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:35:08] [Rank 0] PRINT: step:4000/10000 val_loss:3.7741 svd_entropy: attn_qk:H=0.7604,top10E=0.26,eRank=187.0,q75/q25=51.84 attn_vo:H=0.8343,top10E=0.05,eRank=404.6,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.5,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.92 vo_prod:H=0.6912,top10E=0.10,eRank=226.0,q75/q25=inf train_time:374969ms step_avg:93.74ms +[2025-08-22 22:35:08] [Rank 0] PRINT: step:4000/10000 val_loss:3.7741 svd_entropy: attn_qk:H=0.7604,top10E=0.26,eRank=187.0,q75/q25=51.84 attn_vo:H=0.8343,top10E=0.05,eRank=404.6,q75/q25=inf mlp_w1:H=0.9717,top10E=0.04,eRank=636.5,q75/q25=2.82 mlp_w2:H=0.9670,top10E=0.05,eRank=617.1,q75/q25=2.92 vo_prod:H=0.6912,top10E=0.10,eRank=226.0,q75/q25=inf train_time:374969ms step_avg:93.74ms +[2025-08-22 22:35:08] [Rank 0] step:4001/10000 train_time:374992ms step_avg:93.72ms +[2025-08-22 22:35:08] [Rank 0] step:4001/10000 train_time:374992ms step_avg:93.72ms +[2025-08-22 22:35:10] [Rank 0] step:4021/10000 train_time:376930ms step_avg:93.74ms +[2025-08-22 22:35:10] [Rank 0] step:4021/10000 train_time:376930ms step_avg:93.74ms +[2025-08-22 22:35:12] [Rank 0] step:4041/10000 train_time:378861ms step_avg:93.75ms +[2025-08-22 22:35:12] [Rank 0] step:4041/10000 train_time:378861ms step_avg:93.75ms +[2025-08-22 22:35:14] [Rank 0] step:4061/10000 train_time:380796ms step_avg:93.77ms +[2025-08-22 22:35:14] [Rank 0] step:4061/10000 train_time:380796ms step_avg:93.77ms +[2025-08-22 22:35:16] [Rank 0] step:4081/10000 train_time:383376ms step_avg:93.94ms +[2025-08-22 22:35:16] [Rank 0] step:4081/10000 train_time:383376ms step_avg:93.94ms +[2025-08-22 22:35:18] [Rank 0] step:4101/10000 train_time:385309ms step_avg:93.95ms +[2025-08-22 22:35:18] [Rank 0] step:4101/10000 train_time:385309ms step_avg:93.95ms +[2025-08-22 22:35:20] [Rank 0] step:4121/10000 train_time:387241ms step_avg:93.97ms +[2025-08-22 22:35:20] [Rank 0] step:4121/10000 train_time:387241ms step_avg:93.97ms +[2025-08-22 22:35:22] [Rank 0] step:4141/10000 train_time:389178ms step_avg:93.98ms +[2025-08-22 22:35:22] [Rank 0] step:4141/10000 train_time:389178ms step_avg:93.98ms +[2025-08-22 22:35:24] [Rank 0] step:4161/10000 train_time:391193ms step_avg:94.01ms +[2025-08-22 22:35:24] [Rank 0] step:4161/10000 train_time:391193ms step_avg:94.01ms +[2025-08-22 22:35:26] [Rank 0] step:4181/10000 train_time:393125ms step_avg:94.03ms +[2025-08-22 22:35:26] [Rank 0] step:4181/10000 train_time:393125ms step_avg:94.03ms +[2025-08-22 22:35:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:35:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:35:42] [Rank 0] PRINT: step:4200/10000 val_loss:3.7633 svd_entropy: attn_qk:H=0.7614,top10E=0.26,eRank=187.8,q75/q25=51.81 attn_vo:H=0.8343,top10E=0.05,eRank=404.7,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.8,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.2,q75/q25=2.91 vo_prod:H=0.6914,top10E=0.10,eRank=226.4,q75/q25=inf train_time:395119ms step_avg:94.08ms +[2025-08-22 22:35:42] [Rank 0] PRINT: step:4200/10000 val_loss:3.7633 svd_entropy: attn_qk:H=0.7614,top10E=0.26,eRank=187.8,q75/q25=51.81 attn_vo:H=0.8343,top10E=0.05,eRank=404.7,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=636.8,q75/q25=2.81 mlp_w2:H=0.9671,top10E=0.05,eRank=617.2,q75/q25=2.91 vo_prod:H=0.6914,top10E=0.10,eRank=226.4,q75/q25=inf train_time:395119ms step_avg:94.08ms +[2025-08-22 22:35:42] [Rank 0] step:4201/10000 train_time:395142ms step_avg:94.06ms +[2025-08-22 22:35:42] [Rank 0] step:4201/10000 train_time:395142ms step_avg:94.06ms +[2025-08-22 22:35:44] [Rank 0] step:4221/10000 train_time:397083ms step_avg:94.07ms +[2025-08-22 22:35:44] [Rank 0] step:4221/10000 train_time:397083ms step_avg:94.07ms +[2025-08-22 22:35:46] [Rank 0] step:4241/10000 train_time:399017ms step_avg:94.09ms +[2025-08-22 22:35:46] [Rank 0] step:4241/10000 train_time:399017ms step_avg:94.09ms +[2025-08-22 22:35:48] [Rank 0] step:4261/10000 train_time:400949ms step_avg:94.10ms +[2025-08-22 22:35:48] [Rank 0] step:4261/10000 train_time:400949ms step_avg:94.10ms +[2025-08-22 22:35:50] [Rank 0] step:4281/10000 train_time:402883ms step_avg:94.11ms +[2025-08-22 22:35:50] [Rank 0] step:4281/10000 train_time:402883ms step_avg:94.11ms +[2025-08-22 22:35:52] [Rank 0] step:4301/10000 train_time:404819ms step_avg:94.12ms +[2025-08-22 22:35:52] [Rank 0] step:4301/10000 train_time:404819ms step_avg:94.12ms +[2025-08-22 22:35:54] [Rank 0] step:4321/10000 train_time:406755ms step_avg:94.13ms +[2025-08-22 22:35:54] [Rank 0] step:4321/10000 train_time:406755ms step_avg:94.13ms +[2025-08-22 22:35:56] [Rank 0] step:4341/10000 train_time:408692ms step_avg:94.15ms +[2025-08-22 22:35:56] [Rank 0] step:4341/10000 train_time:408692ms step_avg:94.15ms +[2025-08-22 22:35:57] [Rank 0] step:4361/10000 train_time:410630ms step_avg:94.16ms +[2025-08-22 22:35:57] [Rank 0] step:4361/10000 train_time:410630ms step_avg:94.16ms +[2025-08-22 22:35:59] [Rank 0] step:4381/10000 train_time:412569ms step_avg:94.17ms +[2025-08-22 22:35:59] [Rank 0] step:4381/10000 train_time:412569ms step_avg:94.17ms +[2025-08-22 22:36:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:36:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:36:15] [Rank 0] PRINT: step:4400/10000 val_loss:3.7487 svd_entropy: attn_qk:H=0.7622,top10E=0.26,eRank=188.5,q75/q25=52.09 attn_vo:H=0.8343,top10E=0.05,eRank=404.8,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=637.0,q75/q25=2.81 mlp_w2:H=0.9670,top10E=0.05,eRank=617.2,q75/q25=2.91 vo_prod:H=0.6916,top10E=0.10,eRank=226.6,q75/q25=inf train_time:414509ms step_avg:94.21ms +[2025-08-22 22:36:15] [Rank 0] PRINT: step:4400/10000 val_loss:3.7487 svd_entropy: attn_qk:H=0.7622,top10E=0.26,eRank=188.5,q75/q25=52.09 attn_vo:H=0.8343,top10E=0.05,eRank=404.8,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=637.0,q75/q25=2.81 mlp_w2:H=0.9670,top10E=0.05,eRank=617.2,q75/q25=2.91 vo_prod:H=0.6916,top10E=0.10,eRank=226.6,q75/q25=inf train_time:414509ms step_avg:94.21ms +[2025-08-22 22:36:15] [Rank 0] step:4401/10000 train_time:414533ms step_avg:94.19ms +[2025-08-22 22:36:15] [Rank 0] step:4401/10000 train_time:414533ms step_avg:94.19ms +[2025-08-22 22:36:17] [Rank 0] step:4421/10000 train_time:416456ms step_avg:94.20ms +[2025-08-22 22:36:17] [Rank 0] step:4421/10000 train_time:416456ms step_avg:94.20ms +[2025-08-22 22:36:19] [Rank 0] step:4441/10000 train_time:418391ms step_avg:94.21ms +[2025-08-22 22:36:19] [Rank 0] step:4441/10000 train_time:418391ms step_avg:94.21ms +[2025-08-22 22:36:21] [Rank 0] step:4461/10000 train_time:420336ms step_avg:94.22ms +[2025-08-22 22:36:21] [Rank 0] step:4461/10000 train_time:420336ms step_avg:94.22ms +[2025-08-22 22:36:23] [Rank 0] step:4481/10000 train_time:422279ms step_avg:94.24ms +[2025-08-22 22:36:23] [Rank 0] step:4481/10000 train_time:422279ms step_avg:94.24ms +[2025-08-22 22:36:25] [Rank 0] step:4501/10000 train_time:424221ms step_avg:94.25ms +[2025-08-22 22:36:25] [Rank 0] step:4501/10000 train_time:424221ms step_avg:94.25ms +[2025-08-22 22:36:27] [Rank 0] step:4521/10000 train_time:426166ms step_avg:94.26ms +[2025-08-22 22:36:27] [Rank 0] step:4521/10000 train_time:426166ms step_avg:94.26ms +[2025-08-22 22:36:29] [Rank 0] step:4541/10000 train_time:428174ms step_avg:94.29ms +[2025-08-22 22:36:29] [Rank 0] step:4541/10000 train_time:428174ms step_avg:94.29ms +[2025-08-22 22:36:31] [Rank 0] step:4561/10000 train_time:430237ms step_avg:94.33ms +[2025-08-22 22:36:31] [Rank 0] step:4561/10000 train_time:430237ms step_avg:94.33ms +[2025-08-22 22:36:33] [Rank 0] step:4581/10000 train_time:432186ms step_avg:94.34ms +[2025-08-22 22:36:33] [Rank 0] step:4581/10000 train_time:432186ms step_avg:94.34ms +[2025-08-22 22:36:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:36:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:36:49] [Rank 0] PRINT: step:4600/10000 val_loss:3.7309 svd_entropy: attn_qk:H=0.7632,top10E=0.26,eRank=189.4,q75/q25=52.04 attn_vo:H=0.8344,top10E=0.05,eRank=405.0,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=637.1,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.91 vo_prod:H=0.6917,top10E=0.10,eRank=226.8,q75/q25=inf train_time:434138ms step_avg:94.38ms +[2025-08-22 22:36:49] [Rank 0] PRINT: step:4600/10000 val_loss:3.7309 svd_entropy: attn_qk:H=0.7632,top10E=0.26,eRank=189.4,q75/q25=52.04 attn_vo:H=0.8344,top10E=0.05,eRank=405.0,q75/q25=inf mlp_w1:H=0.9718,top10E=0.04,eRank=637.1,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.91 vo_prod:H=0.6917,top10E=0.10,eRank=226.8,q75/q25=inf train_time:434138ms step_avg:94.38ms +[2025-08-22 22:36:49] [Rank 0] step:4601/10000 train_time:434160ms step_avg:94.36ms +[2025-08-22 22:36:49] [Rank 0] step:4601/10000 train_time:434160ms step_avg:94.36ms +[2025-08-22 22:36:51] [Rank 0] step:4621/10000 train_time:436096ms step_avg:94.37ms +[2025-08-22 22:36:51] [Rank 0] step:4621/10000 train_time:436096ms step_avg:94.37ms +[2025-08-22 22:36:53] [Rank 0] step:4641/10000 train_time:438039ms step_avg:94.38ms +[2025-08-22 22:36:53] [Rank 0] step:4641/10000 train_time:438039ms step_avg:94.38ms +[2025-08-22 22:36:55] [Rank 0] step:4661/10000 train_time:439981ms step_avg:94.40ms +[2025-08-22 22:36:55] [Rank 0] step:4661/10000 train_time:439981ms step_avg:94.40ms +[2025-08-22 22:36:57] [Rank 0] step:4681/10000 train_time:441925ms step_avg:94.41ms +[2025-08-22 22:36:57] [Rank 0] step:4681/10000 train_time:441925ms step_avg:94.41ms +[2025-08-22 22:36:59] [Rank 0] step:4701/10000 train_time:443870ms step_avg:94.42ms +[2025-08-22 22:36:59] [Rank 0] step:4701/10000 train_time:443870ms step_avg:94.42ms +[2025-08-22 22:37:00] [Rank 0] step:4721/10000 train_time:445814ms step_avg:94.43ms +[2025-08-22 22:37:00] [Rank 0] step:4721/10000 train_time:445814ms step_avg:94.43ms +[2025-08-22 22:37:02] [Rank 0] step:4741/10000 train_time:447759ms step_avg:94.44ms +[2025-08-22 22:37:02] [Rank 0] step:4741/10000 train_time:447759ms step_avg:94.44ms +[2025-08-22 22:37:04] [Rank 0] step:4761/10000 train_time:449707ms step_avg:94.46ms +[2025-08-22 22:37:04] [Rank 0] step:4761/10000 train_time:449707ms step_avg:94.46ms +[2025-08-22 22:37:06] [Rank 0] step:4781/10000 train_time:451653ms step_avg:94.47ms +[2025-08-22 22:37:06] [Rank 0] step:4781/10000 train_time:451653ms step_avg:94.47ms +[2025-08-22 22:37:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:37:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:37:22] [Rank 0] PRINT: step:4800/10000 val_loss:3.7238 svd_entropy: attn_qk:H=0.7640,top10E=0.26,eRank=190.1,q75/q25=52.39 attn_vo:H=0.8344,top10E=0.05,eRank=405.1,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.90 vo_prod:H=0.6919,top10E=0.10,eRank=227.2,q75/q25=inf train_time:453606ms step_avg:94.50ms +[2025-08-22 22:37:22] [Rank 0] PRINT: step:4800/10000 val_loss:3.7238 svd_entropy: attn_qk:H=0.7640,top10E=0.26,eRank=190.1,q75/q25=52.39 attn_vo:H=0.8344,top10E=0.05,eRank=405.1,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.3,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.3,q75/q25=2.90 vo_prod:H=0.6919,top10E=0.10,eRank=227.2,q75/q25=inf train_time:453606ms step_avg:94.50ms +[2025-08-22 22:37:22] [Rank 0] step:4801/10000 train_time:453629ms step_avg:94.49ms +[2025-08-22 22:37:22] [Rank 0] step:4801/10000 train_time:453629ms step_avg:94.49ms +[2025-08-22 22:37:24] [Rank 0] step:4821/10000 train_time:455568ms step_avg:94.50ms +[2025-08-22 22:37:24] [Rank 0] step:4821/10000 train_time:455568ms step_avg:94.50ms +[2025-08-22 22:37:26] [Rank 0] step:4841/10000 train_time:457508ms step_avg:94.51ms +[2025-08-22 22:37:26] [Rank 0] step:4841/10000 train_time:457508ms step_avg:94.51ms +[2025-08-22 22:37:28] [Rank 0] step:4861/10000 train_time:459450ms step_avg:94.52ms +[2025-08-22 22:37:28] [Rank 0] step:4861/10000 train_time:459450ms step_avg:94.52ms +[2025-08-22 22:37:30] [Rank 0] step:4881/10000 train_time:461494ms step_avg:94.55ms +[2025-08-22 22:37:30] [Rank 0] step:4881/10000 train_time:461494ms step_avg:94.55ms +[2025-08-22 22:37:32] [Rank 0] step:4901/10000 train_time:463385ms step_avg:94.55ms +[2025-08-22 22:37:32] [Rank 0] step:4901/10000 train_time:463385ms step_avg:94.55ms +[2025-08-22 22:37:34] [Rank 0] step:4921/10000 train_time:465412ms step_avg:94.58ms +[2025-08-22 22:37:34] [Rank 0] step:4921/10000 train_time:465412ms step_avg:94.58ms +[2025-08-22 22:37:36] [Rank 0] step:4941/10000 train_time:467357ms step_avg:94.59ms +[2025-08-22 22:37:36] [Rank 0] step:4941/10000 train_time:467357ms step_avg:94.59ms +[2025-08-22 22:37:38] [Rank 0] step:4961/10000 train_time:469298ms step_avg:94.60ms +[2025-08-22 22:37:38] [Rank 0] step:4961/10000 train_time:469298ms step_avg:94.60ms +[2025-08-22 22:37:40] [Rank 0] step:4981/10000 train_time:471243ms step_avg:94.61ms +[2025-08-22 22:37:40] [Rank 0] step:4981/10000 train_time:471243ms step_avg:94.61ms +[2025-08-22 22:37:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:37:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:37:56] [Rank 0] PRINT: step:5000/10000 val_loss:3.7119 svd_entropy: attn_qk:H=0.7648,top10E=0.26,eRank=190.8,q75/q25=52.76 attn_vo:H=0.8344,top10E=0.05,eRank=405.1,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.4,q75/q25=2.90 vo_prod:H=0.6918,top10E=0.10,eRank=227.1,q75/q25=inf train_time:473192ms step_avg:94.64ms +[2025-08-22 22:37:56] [Rank 0] PRINT: step:5000/10000 val_loss:3.7119 svd_entropy: attn_qk:H=0.7648,top10E=0.26,eRank=190.8,q75/q25=52.76 attn_vo:H=0.8344,top10E=0.05,eRank=405.1,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.4,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.4,q75/q25=2.90 vo_prod:H=0.6918,top10E=0.10,eRank=227.1,q75/q25=inf train_time:473192ms step_avg:94.64ms +[2025-08-22 22:37:56] [Rank 0] step:5001/10000 train_time:473215ms step_avg:94.62ms +[2025-08-22 22:37:56] [Rank 0] step:5001/10000 train_time:473215ms step_avg:94.62ms +[2025-08-22 22:37:58] [Rank 0] step:5021/10000 train_time:475145ms step_avg:94.63ms +[2025-08-22 22:37:58] [Rank 0] step:5021/10000 train_time:475145ms step_avg:94.63ms +[2025-08-22 22:38:00] [Rank 0] step:5041/10000 train_time:477086ms step_avg:94.64ms +[2025-08-22 22:38:00] [Rank 0] step:5041/10000 train_time:477086ms step_avg:94.64ms +[2025-08-22 22:38:01] [Rank 0] step:5061/10000 train_time:479023ms step_avg:94.65ms +[2025-08-22 22:38:01] [Rank 0] step:5061/10000 train_time:479023ms step_avg:94.65ms +[2025-08-22 22:38:03] [Rank 0] step:5081/10000 train_time:480966ms step_avg:94.66ms +[2025-08-22 22:38:03] [Rank 0] step:5081/10000 train_time:480966ms step_avg:94.66ms +[2025-08-22 22:38:05] [Rank 0] step:5101/10000 train_time:482905ms step_avg:94.67ms +[2025-08-22 22:38:05] [Rank 0] step:5101/10000 train_time:482905ms step_avg:94.67ms +[2025-08-22 22:38:07] [Rank 0] step:5121/10000 train_time:484850ms step_avg:94.68ms +[2025-08-22 22:38:07] [Rank 0] step:5121/10000 train_time:484850ms step_avg:94.68ms +[2025-08-22 22:38:09] [Rank 0] step:5141/10000 train_time:486796ms step_avg:94.69ms +[2025-08-22 22:38:09] [Rank 0] step:5141/10000 train_time:486796ms step_avg:94.69ms +[2025-08-22 22:38:11] [Rank 0] step:5161/10000 train_time:488740ms step_avg:94.70ms +[2025-08-22 22:38:11] [Rank 0] step:5161/10000 train_time:488740ms step_avg:94.70ms +[2025-08-22 22:38:13] [Rank 0] step:5181/10000 train_time:490686ms step_avg:94.71ms +[2025-08-22 22:38:13] [Rank 0] step:5181/10000 train_time:490686ms step_avg:94.71ms +[2025-08-22 22:38:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:38:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:38:29] [Rank 0] PRINT: step:5200/10000 val_loss:3.7008 svd_entropy: attn_qk:H=0.7656,top10E=0.26,eRank=191.5,q75/q25=52.42 attn_vo:H=0.8345,top10E=0.05,eRank=405.2,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.4,q75/q25=2.90 vo_prod:H=0.6919,top10E=0.10,eRank=227.3,q75/q25=inf train_time:492659ms step_avg:94.74ms +[2025-08-22 22:38:29] [Rank 0] PRINT: step:5200/10000 val_loss:3.7008 svd_entropy: attn_qk:H=0.7656,top10E=0.26,eRank=191.5,q75/q25=52.42 attn_vo:H=0.8345,top10E=0.05,eRank=405.2,q75/q25=inf mlp_w1:H=0.9719,top10E=0.04,eRank=637.5,q75/q25=2.80 mlp_w2:H=0.9671,top10E=0.05,eRank=617.4,q75/q25=2.90 vo_prod:H=0.6919,top10E=0.10,eRank=227.3,q75/q25=inf train_time:492659ms step_avg:94.74ms +[2025-08-22 22:38:29] [Rank 0] step:5201/10000 train_time:492683ms step_avg:94.73ms +[2025-08-22 22:38:29] [Rank 0] step:5201/10000 train_time:492683ms step_avg:94.73ms +[2025-08-22 22:38:31] [Rank 0] step:5221/10000 train_time:494652ms step_avg:94.74ms +[2025-08-22 22:38:31] [Rank 0] step:5221/10000 train_time:494652ms step_avg:94.74ms +[2025-08-22 22:38:33] [Rank 0] step:5241/10000 train_time:496625ms step_avg:94.76ms +[2025-08-22 22:38:33] [Rank 0] step:5241/10000 train_time:496625ms step_avg:94.76ms +[2025-08-22 22:38:35] [Rank 0] step:5261/10000 train_time:498688ms step_avg:94.79ms +[2025-08-22 22:38:35] [Rank 0] step:5261/10000 train_time:498688ms step_avg:94.79ms +[2025-08-22 22:38:37] [Rank 0] step:5281/10000 train_time:500737ms step_avg:94.82ms +[2025-08-22 22:38:37] [Rank 0] step:5281/10000 train_time:500737ms step_avg:94.82ms +[2025-08-22 22:38:39] [Rank 0] step:5301/10000 train_time:502721ms step_avg:94.84ms +[2025-08-22 22:38:39] [Rank 0] step:5301/10000 train_time:502721ms step_avg:94.84ms +[2025-08-22 22:38:41] [Rank 0] step:5321/10000 train_time:504696ms step_avg:94.85ms +[2025-08-22 22:38:41] [Rank 0] step:5321/10000 train_time:504696ms step_avg:94.85ms +[2025-08-22 22:38:43] [Rank 0] step:5341/10000 train_time:506672ms step_avg:94.86ms +[2025-08-22 22:38:43] [Rank 0] step:5341/10000 train_time:506672ms step_avg:94.86ms +[2025-08-22 22:38:45] [Rank 0] step:5361/10000 train_time:508648ms step_avg:94.88ms +[2025-08-22 22:38:45] [Rank 0] step:5361/10000 train_time:508648ms step_avg:94.88ms +[2025-08-22 22:38:47] [Rank 0] step:5381/10000 train_time:510625ms step_avg:94.89ms +[2025-08-22 22:38:47] [Rank 0] step:5381/10000 train_time:510625ms step_avg:94.89ms +[2025-08-22 22:38:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:38:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:39:02] [Rank 0] PRINT: step:5400/10000 val_loss:3.6904 svd_entropy: attn_qk:H=0.7662,top10E=0.25,eRank=192.1,q75/q25=52.40 attn_vo:H=0.8345,top10E=0.05,eRank=405.2,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.4,q75/q25=2.90 vo_prod:H=0.6920,top10E=0.10,eRank=227.5,q75/q25=inf train_time:512604ms step_avg:94.93ms +[2025-08-22 22:39:02] [Rank 0] PRINT: step:5400/10000 val_loss:3.6904 svd_entropy: attn_qk:H=0.7662,top10E=0.25,eRank=192.1,q75/q25=52.40 attn_vo:H=0.8345,top10E=0.05,eRank=405.2,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.7,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.4,q75/q25=2.90 vo_prod:H=0.6920,top10E=0.10,eRank=227.5,q75/q25=inf train_time:512604ms step_avg:94.93ms +[2025-08-22 22:39:03] [Rank 0] step:5401/10000 train_time:512626ms step_avg:94.91ms +[2025-08-22 22:39:03] [Rank 0] step:5401/10000 train_time:512626ms step_avg:94.91ms +[2025-08-22 22:39:05] [Rank 0] step:5421/10000 train_time:514619ms step_avg:94.93ms +[2025-08-22 22:39:05] [Rank 0] step:5421/10000 train_time:514619ms step_avg:94.93ms +[2025-08-22 22:39:07] [Rank 0] step:5441/10000 train_time:516591ms step_avg:94.94ms +[2025-08-22 22:39:07] [Rank 0] step:5441/10000 train_time:516591ms step_avg:94.94ms +[2025-08-22 22:39:09] [Rank 0] step:5461/10000 train_time:518573ms step_avg:94.96ms +[2025-08-22 22:39:09] [Rank 0] step:5461/10000 train_time:518573ms step_avg:94.96ms +[2025-08-22 22:39:10] [Rank 0] step:5481/10000 train_time:520550ms step_avg:94.97ms +[2025-08-22 22:39:10] [Rank 0] step:5481/10000 train_time:520550ms step_avg:94.97ms +[2025-08-22 22:39:12] [Rank 0] step:5501/10000 train_time:522535ms step_avg:94.99ms +[2025-08-22 22:39:12] [Rank 0] step:5501/10000 train_time:522535ms step_avg:94.99ms +[2025-08-22 22:39:14] [Rank 0] step:5521/10000 train_time:524518ms step_avg:95.00ms +[2025-08-22 22:39:14] [Rank 0] step:5521/10000 train_time:524518ms step_avg:95.00ms +[2025-08-22 22:39:16] [Rank 0] step:5541/10000 train_time:526497ms step_avg:95.02ms +[2025-08-22 22:39:16] [Rank 0] step:5541/10000 train_time:526497ms step_avg:95.02ms +[2025-08-22 22:39:18] [Rank 0] step:5561/10000 train_time:528476ms step_avg:95.03ms +[2025-08-22 22:39:18] [Rank 0] step:5561/10000 train_time:528476ms step_avg:95.03ms +[2025-08-22 22:39:20] [Rank 0] step:5581/10000 train_time:530456ms step_avg:95.05ms +[2025-08-22 22:39:20] [Rank 0] step:5581/10000 train_time:530456ms step_avg:95.05ms +[2025-08-22 22:39:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:39:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:39:36] [Rank 0] PRINT: step:5600/10000 val_loss:3.6820 svd_entropy: attn_qk:H=0.7670,top10E=0.25,eRank=192.7,q75/q25=52.61 attn_vo:H=0.8345,top10E=0.05,eRank=405.3,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.89 vo_prod:H=0.6921,top10E=0.10,eRank=227.8,q75/q25=inf train_time:532443ms step_avg:95.08ms +[2025-08-22 22:39:36] [Rank 0] PRINT: step:5600/10000 val_loss:3.6820 svd_entropy: attn_qk:H=0.7670,top10E=0.25,eRank=192.7,q75/q25=52.61 attn_vo:H=0.8345,top10E=0.05,eRank=405.3,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.8,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.89 vo_prod:H=0.6921,top10E=0.10,eRank=227.8,q75/q25=inf train_time:532443ms step_avg:95.08ms +[2025-08-22 22:39:36] [Rank 0] step:5601/10000 train_time:532465ms step_avg:95.07ms +[2025-08-22 22:39:36] [Rank 0] step:5601/10000 train_time:532465ms step_avg:95.07ms +[2025-08-22 22:39:38] [Rank 0] step:5621/10000 train_time:534512ms step_avg:95.09ms +[2025-08-22 22:39:38] [Rank 0] step:5621/10000 train_time:534512ms step_avg:95.09ms +[2025-08-22 22:39:40] [Rank 0] step:5641/10000 train_time:536567ms step_avg:95.12ms +[2025-08-22 22:39:40] [Rank 0] step:5641/10000 train_time:536567ms step_avg:95.12ms +[2025-08-22 22:39:42] [Rank 0] step:5661/10000 train_time:538540ms step_avg:95.13ms +[2025-08-22 22:39:42] [Rank 0] step:5661/10000 train_time:538540ms step_avg:95.13ms +[2025-08-22 22:39:44] [Rank 0] step:5681/10000 train_time:540515ms step_avg:95.14ms +[2025-08-22 22:39:44] [Rank 0] step:5681/10000 train_time:540515ms step_avg:95.14ms +[2025-08-22 22:39:46] [Rank 0] step:5701/10000 train_time:542489ms step_avg:95.16ms +[2025-08-22 22:39:46] [Rank 0] step:5701/10000 train_time:542489ms step_avg:95.16ms +[2025-08-22 22:39:48] [Rank 0] step:5721/10000 train_time:544470ms step_avg:95.17ms +[2025-08-22 22:39:48] [Rank 0] step:5721/10000 train_time:544470ms step_avg:95.17ms +[2025-08-22 22:39:50] [Rank 0] step:5741/10000 train_time:546442ms step_avg:95.18ms +[2025-08-22 22:39:50] [Rank 0] step:5741/10000 train_time:546442ms step_avg:95.18ms +[2025-08-22 22:39:52] [Rank 0] step:5761/10000 train_time:548420ms step_avg:95.20ms +[2025-08-22 22:39:52] [Rank 0] step:5761/10000 train_time:548420ms step_avg:95.20ms +[2025-08-22 22:39:54] [Rank 0] step:5781/10000 train_time:550397ms step_avg:95.21ms +[2025-08-22 22:39:54] [Rank 0] step:5781/10000 train_time:550397ms step_avg:95.21ms +[2025-08-22 22:39:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:39:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:40:10] [Rank 0] PRINT: step:5800/10000 val_loss:3.6791 svd_entropy: attn_qk:H=0.7676,top10E=0.25,eRank=193.3,q75/q25=52.79 attn_vo:H=0.8345,top10E=0.05,eRank=405.4,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.89 vo_prod:H=0.6922,top10E=0.10,eRank=227.9,q75/q25=inf train_time:552377ms step_avg:95.24ms +[2025-08-22 22:40:10] [Rank 0] PRINT: step:5800/10000 val_loss:3.6791 svd_entropy: attn_qk:H=0.7676,top10E=0.25,eRank=193.3,q75/q25=52.79 attn_vo:H=0.8345,top10E=0.05,eRank=405.4,q75/q25=inf mlp_w1:H=0.9720,top10E=0.04,eRank=637.9,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.5,q75/q25=2.89 vo_prod:H=0.6922,top10E=0.10,eRank=227.9,q75/q25=inf train_time:552377ms step_avg:95.24ms +[2025-08-22 22:40:10] [Rank 0] step:5801/10000 train_time:552400ms step_avg:95.22ms +[2025-08-22 22:40:10] [Rank 0] step:5801/10000 train_time:552400ms step_avg:95.22ms +[2025-08-22 22:40:12] [Rank 0] step:5821/10000 train_time:554358ms step_avg:95.23ms +[2025-08-22 22:40:12] [Rank 0] step:5821/10000 train_time:554358ms step_avg:95.23ms +[2025-08-22 22:40:14] [Rank 0] step:5841/10000 train_time:556329ms step_avg:95.25ms +[2025-08-22 22:40:14] [Rank 0] step:5841/10000 train_time:556329ms step_avg:95.25ms +[2025-08-22 22:40:16] [Rank 0] step:5861/10000 train_time:558306ms step_avg:95.26ms +[2025-08-22 22:40:16] [Rank 0] step:5861/10000 train_time:558306ms step_avg:95.26ms +[2025-08-22 22:40:18] [Rank 0] step:5881/10000 train_time:560279ms step_avg:95.27ms +[2025-08-22 22:40:18] [Rank 0] step:5881/10000 train_time:560279ms step_avg:95.27ms +[2025-08-22 22:40:20] [Rank 0] step:5901/10000 train_time:562256ms step_avg:95.28ms +[2025-08-22 22:40:20] [Rank 0] step:5901/10000 train_time:562256ms step_avg:95.28ms +[2025-08-22 22:40:22] [Rank 0] step:5921/10000 train_time:564232ms step_avg:95.29ms +[2025-08-22 22:40:22] [Rank 0] step:5921/10000 train_time:564232ms step_avg:95.29ms +[2025-08-22 22:40:24] [Rank 0] step:5941/10000 train_time:566215ms step_avg:95.31ms +[2025-08-22 22:40:24] [Rank 0] step:5941/10000 train_time:566215ms step_avg:95.31ms +[2025-08-22 22:40:26] [Rank 0] step:5961/10000 train_time:568193ms step_avg:95.32ms +[2025-08-22 22:40:26] [Rank 0] step:5961/10000 train_time:568193ms step_avg:95.32ms +[2025-08-22 22:40:28] [Rank 0] step:5981/10000 train_time:570172ms step_avg:95.33ms +[2025-08-22 22:40:28] [Rank 0] step:5981/10000 train_time:570172ms step_avg:95.33ms +[2025-08-22 22:40:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:40:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:40:43] [Rank 0] PRINT: step:6000/10000 val_loss:3.6608 svd_entropy: attn_qk:H=0.7684,top10E=0.25,eRank=194.0,q75/q25=52.87 attn_vo:H=0.8345,top10E=0.05,eRank=405.5,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=2.89 vo_prod:H=0.6923,top10E=0.10,eRank=228.1,q75/q25=inf train_time:572154ms step_avg:95.36ms +[2025-08-22 22:40:43] [Rank 0] PRINT: step:6000/10000 val_loss:3.6608 svd_entropy: attn_qk:H=0.7684,top10E=0.25,eRank=194.0,q75/q25=52.87 attn_vo:H=0.8345,top10E=0.05,eRank=405.5,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.0,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=2.89 vo_prod:H=0.6923,top10E=0.10,eRank=228.1,q75/q25=inf train_time:572154ms step_avg:95.36ms +[2025-08-22 22:40:43] [Rank 0] step:6001/10000 train_time:572178ms step_avg:95.35ms +[2025-08-22 22:40:43] [Rank 0] step:6001/10000 train_time:572178ms step_avg:95.35ms +[2025-08-22 22:40:45] [Rank 0] step:6021/10000 train_time:574142ms step_avg:95.36ms +[2025-08-22 22:40:45] [Rank 0] step:6021/10000 train_time:574142ms step_avg:95.36ms +[2025-08-22 22:40:47] [Rank 0] step:6041/10000 train_time:576120ms step_avg:95.37ms +[2025-08-22 22:40:47] [Rank 0] step:6041/10000 train_time:576120ms step_avg:95.37ms +[2025-08-22 22:40:49] [Rank 0] step:6061/10000 train_time:578103ms step_avg:95.38ms +[2025-08-22 22:40:49] [Rank 0] step:6061/10000 train_time:578103ms step_avg:95.38ms +[2025-08-22 22:40:51] [Rank 0] step:6081/10000 train_time:580081ms step_avg:95.39ms +[2025-08-22 22:40:51] [Rank 0] step:6081/10000 train_time:580081ms step_avg:95.39ms +[2025-08-22 22:40:53] [Rank 0] step:6101/10000 train_time:582066ms step_avg:95.41ms +[2025-08-22 22:40:53] [Rank 0] step:6101/10000 train_time:582066ms step_avg:95.41ms +[2025-08-22 22:40:56] [Rank 0] step:6121/10000 train_time:584306ms step_avg:95.46ms +[2025-08-22 22:40:56] [Rank 0] step:6121/10000 train_time:584306ms step_avg:95.46ms +[2025-08-22 22:40:58] [Rank 0] step:6141/10000 train_time:586300ms step_avg:95.47ms +[2025-08-22 22:40:58] [Rank 0] step:6141/10000 train_time:586300ms step_avg:95.47ms +[2025-08-22 22:41:00] [Rank 0] step:6161/10000 train_time:588282ms step_avg:95.48ms +[2025-08-22 22:41:00] [Rank 0] step:6161/10000 train_time:588282ms step_avg:95.48ms +[2025-08-22 22:41:02] [Rank 0] step:6181/10000 train_time:590264ms step_avg:95.50ms +[2025-08-22 22:41:02] [Rank 0] step:6181/10000 train_time:590264ms step_avg:95.50ms +[2025-08-22 22:41:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:41:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:41:17] [Rank 0] PRINT: step:6200/10000 val_loss:3.6471 svd_entropy: attn_qk:H=0.7691,top10E=0.25,eRank=194.6,q75/q25=52.80 attn_vo:H=0.8346,top10E=0.05,eRank=405.6,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=2.88 vo_prod:H=0.6923,top10E=0.10,eRank=228.2,q75/q25=inf train_time:592254ms step_avg:95.52ms +[2025-08-22 22:41:17] [Rank 0] PRINT: step:6200/10000 val_loss:3.6471 svd_entropy: attn_qk:H=0.7691,top10E=0.25,eRank=194.6,q75/q25=52.80 attn_vo:H=0.8346,top10E=0.05,eRank=405.6,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.1,q75/q25=2.79 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=2.88 vo_prod:H=0.6923,top10E=0.10,eRank=228.2,q75/q25=inf train_time:592254ms step_avg:95.52ms +[2025-08-22 22:41:18] [Rank 0] step:6201/10000 train_time:592276ms step_avg:95.51ms +[2025-08-22 22:41:18] [Rank 0] step:6201/10000 train_time:592276ms step_avg:95.51ms +[2025-08-22 22:41:20] [Rank 0] step:6221/10000 train_time:594245ms step_avg:95.52ms +[2025-08-22 22:41:20] [Rank 0] step:6221/10000 train_time:594245ms step_avg:95.52ms +[2025-08-22 22:41:22] [Rank 0] step:6241/10000 train_time:596223ms step_avg:95.53ms +[2025-08-22 22:41:22] [Rank 0] step:6241/10000 train_time:596223ms step_avg:95.53ms +[2025-08-22 22:41:23] [Rank 0] step:6261/10000 train_time:598204ms step_avg:95.54ms +[2025-08-22 22:41:23] [Rank 0] step:6261/10000 train_time:598204ms step_avg:95.54ms +[2025-08-22 22:41:25] [Rank 0] step:6281/10000 train_time:600190ms step_avg:95.56ms +[2025-08-22 22:41:25] [Rank 0] step:6281/10000 train_time:600190ms step_avg:95.56ms +[2025-08-22 22:41:27] [Rank 0] step:6301/10000 train_time:602174ms step_avg:95.57ms +[2025-08-22 22:41:27] [Rank 0] step:6301/10000 train_time:602174ms step_avg:95.57ms +[2025-08-22 22:41:29] [Rank 0] step:6321/10000 train_time:604163ms step_avg:95.58ms +[2025-08-22 22:41:29] [Rank 0] step:6321/10000 train_time:604163ms step_avg:95.58ms +[2025-08-22 22:41:31] [Rank 0] step:6341/10000 train_time:606147ms step_avg:95.59ms +[2025-08-22 22:41:31] [Rank 0] step:6341/10000 train_time:606147ms step_avg:95.59ms +[2025-08-22 22:41:33] [Rank 0] step:6361/10000 train_time:608142ms step_avg:95.60ms +[2025-08-22 22:41:33] [Rank 0] step:6361/10000 train_time:608142ms step_avg:95.60ms +[2025-08-22 22:41:35] [Rank 0] step:6381/10000 train_time:610128ms step_avg:95.62ms +[2025-08-22 22:41:35] [Rank 0] step:6381/10000 train_time:610128ms step_avg:95.62ms +[2025-08-22 22:41:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:41:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:41:51] [Rank 0] PRINT: step:6400/10000 val_loss:3.6373 svd_entropy: attn_qk:H=0.7696,top10E=0.25,eRank=195.1,q75/q25=53.09 attn_vo:H=0.8346,top10E=0.05,eRank=405.6,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.78 mlp_w2:H=0.9671,top10E=0.05,eRank=617.7,q75/q25=2.88 vo_prod:H=0.6924,top10E=0.10,eRank=228.3,q75/q25=inf train_time:612114ms step_avg:95.64ms +[2025-08-22 22:41:51] [Rank 0] PRINT: step:6400/10000 val_loss:3.6373 svd_entropy: attn_qk:H=0.7696,top10E=0.25,eRank=195.1,q75/q25=53.09 attn_vo:H=0.8346,top10E=0.05,eRank=405.6,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.2,q75/q25=2.78 mlp_w2:H=0.9671,top10E=0.05,eRank=617.7,q75/q25=2.88 vo_prod:H=0.6924,top10E=0.10,eRank=228.3,q75/q25=inf train_time:612114ms step_avg:95.64ms +[2025-08-22 22:41:51] [Rank 0] step:6401/10000 train_time:612137ms step_avg:95.63ms +[2025-08-22 22:41:51] [Rank 0] step:6401/10000 train_time:612137ms step_avg:95.63ms +[2025-08-22 22:41:53] [Rank 0] step:6421/10000 train_time:614122ms step_avg:95.64ms +[2025-08-22 22:41:53] [Rank 0] step:6421/10000 train_time:614122ms step_avg:95.64ms +[2025-08-22 22:41:55] [Rank 0] step:6441/10000 train_time:616102ms step_avg:95.65ms +[2025-08-22 22:41:55] [Rank 0] step:6441/10000 train_time:616102ms step_avg:95.65ms +[2025-08-22 22:41:57] [Rank 0] step:6461/10000 train_time:618081ms step_avg:95.66ms +[2025-08-22 22:41:57] [Rank 0] step:6461/10000 train_time:618081ms step_avg:95.66ms +[2025-08-22 22:41:59] [Rank 0] step:6481/10000 train_time:620069ms step_avg:95.67ms +[2025-08-22 22:41:59] [Rank 0] step:6481/10000 train_time:620069ms step_avg:95.67ms +[2025-08-22 22:42:01] [Rank 0] step:6501/10000 train_time:622046ms step_avg:95.68ms +[2025-08-22 22:42:01] [Rank 0] step:6501/10000 train_time:622046ms step_avg:95.68ms +[2025-08-22 22:42:03] [Rank 0] step:6521/10000 train_time:624022ms step_avg:95.69ms +[2025-08-22 22:42:03] [Rank 0] step:6521/10000 train_time:624022ms step_avg:95.69ms +[2025-08-22 22:42:05] [Rank 0] step:6541/10000 train_time:626004ms step_avg:95.70ms +[2025-08-22 22:42:05] [Rank 0] step:6541/10000 train_time:626004ms step_avg:95.70ms +[2025-08-22 22:42:07] [Rank 0] step:6561/10000 train_time:627986ms step_avg:95.71ms +[2025-08-22 22:42:07] [Rank 0] step:6561/10000 train_time:627986ms step_avg:95.71ms +[2025-08-22 22:42:09] [Rank 0] step:6581/10000 train_time:629964ms step_avg:95.72ms +[2025-08-22 22:42:09] [Rank 0] step:6581/10000 train_time:629964ms step_avg:95.72ms +[2025-08-22 22:42:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:42:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:42:25] [Rank 0] PRINT: step:6600/10000 val_loss:3.6223 svd_entropy: attn_qk:H=0.7702,top10E=0.25,eRank=195.6,q75/q25=53.00 attn_vo:H=0.8347,top10E=0.05,eRank=405.8,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.78 mlp_w2:H=0.9671,top10E=0.05,eRank=617.7,q75/q25=2.88 vo_prod:H=0.6927,top10E=0.10,eRank=228.8,q75/q25=inf train_time:631950ms step_avg:95.75ms +[2025-08-22 22:42:25] [Rank 0] PRINT: step:6600/10000 val_loss:3.6223 svd_entropy: attn_qk:H=0.7702,top10E=0.25,eRank=195.6,q75/q25=53.00 attn_vo:H=0.8347,top10E=0.05,eRank=405.8,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.3,q75/q25=2.78 mlp_w2:H=0.9671,top10E=0.05,eRank=617.7,q75/q25=2.88 vo_prod:H=0.6927,top10E=0.10,eRank=228.8,q75/q25=inf train_time:631950ms step_avg:95.75ms +[2025-08-22 22:42:25] [Rank 0] step:6601/10000 train_time:631972ms step_avg:95.74ms +[2025-08-22 22:42:25] [Rank 0] step:6601/10000 train_time:631972ms step_avg:95.74ms +[2025-08-22 22:42:27] [Rank 0] step:6621/10000 train_time:633938ms step_avg:95.75ms +[2025-08-22 22:42:27] [Rank 0] step:6621/10000 train_time:633938ms step_avg:95.75ms +[2025-08-22 22:42:29] [Rank 0] step:6641/10000 train_time:635948ms step_avg:95.76ms +[2025-08-22 22:42:29] [Rank 0] step:6641/10000 train_time:635948ms step_avg:95.76ms +[2025-08-22 22:42:31] [Rank 0] step:6661/10000 train_time:637925ms step_avg:95.77ms +[2025-08-22 22:42:31] [Rank 0] step:6661/10000 train_time:637925ms step_avg:95.77ms +[2025-08-22 22:42:33] [Rank 0] step:6681/10000 train_time:639922ms step_avg:95.78ms +[2025-08-22 22:42:33] [Rank 0] step:6681/10000 train_time:639922ms step_avg:95.78ms +[2025-08-22 22:42:35] [Rank 0] step:6701/10000 train_time:641937ms step_avg:95.80ms +[2025-08-22 22:42:35] [Rank 0] step:6701/10000 train_time:641937ms step_avg:95.80ms +[2025-08-22 22:42:37] [Rank 0] step:6721/10000 train_time:643944ms step_avg:95.81ms +[2025-08-22 22:42:37] [Rank 0] step:6721/10000 train_time:643944ms step_avg:95.81ms +[2025-08-22 22:42:39] [Rank 0] step:6741/10000 train_time:645948ms step_avg:95.82ms +[2025-08-22 22:42:39] [Rank 0] step:6741/10000 train_time:645948ms step_avg:95.82ms +[2025-08-22 22:42:41] [Rank 0] step:6761/10000 train_time:647954ms step_avg:95.84ms +[2025-08-22 22:42:41] [Rank 0] step:6761/10000 train_time:647954ms step_avg:95.84ms +[2025-08-22 22:42:43] [Rank 0] step:6781/10000 train_time:649968ms step_avg:95.85ms +[2025-08-22 22:42:43] [Rank 0] step:6781/10000 train_time:649968ms step_avg:95.85ms +[2025-08-22 22:42:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:42:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:42:58] [Rank 0] PRINT: step:6800/10000 val_loss:3.6065 svd_entropy: attn_qk:H=0.7707,top10E=0.25,eRank=196.1,q75/q25=53.00 attn_vo:H=0.8347,top10E=0.05,eRank=405.9,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.7,q75/q25=2.88 vo_prod:H=0.6928,top10E=0.10,eRank=228.9,q75/q25=inf train_time:651984ms step_avg:95.88ms +[2025-08-22 22:42:58] [Rank 0] PRINT: step:6800/10000 val_loss:3.6065 svd_entropy: attn_qk:H=0.7707,top10E=0.25,eRank=196.1,q75/q25=53.00 attn_vo:H=0.8347,top10E=0.05,eRank=405.9,q75/q25=inf mlp_w1:H=0.9721,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.7,q75/q25=2.88 vo_prod:H=0.6928,top10E=0.10,eRank=228.9,q75/q25=inf train_time:651984ms step_avg:95.88ms +[2025-08-22 22:42:59] [Rank 0] step:6801/10000 train_time:652007ms step_avg:95.87ms +[2025-08-22 22:42:59] [Rank 0] step:6801/10000 train_time:652007ms step_avg:95.87ms +[2025-08-22 22:43:01] [Rank 0] step:6821/10000 train_time:654014ms step_avg:95.88ms +[2025-08-22 22:43:01] [Rank 0] step:6821/10000 train_time:654014ms step_avg:95.88ms +[2025-08-22 22:43:03] [Rank 0] step:6841/10000 train_time:656020ms step_avg:95.90ms +[2025-08-22 22:43:03] [Rank 0] step:6841/10000 train_time:656020ms step_avg:95.90ms +[2025-08-22 22:43:05] [Rank 0] step:6861/10000 train_time:658023ms step_avg:95.91ms +[2025-08-22 22:43:05] [Rank 0] step:6861/10000 train_time:658023ms step_avg:95.91ms +[2025-08-22 22:43:07] [Rank 0] step:6881/10000 train_time:660034ms step_avg:95.92ms +[2025-08-22 22:43:07] [Rank 0] step:6881/10000 train_time:660034ms step_avg:95.92ms +[2025-08-22 22:43:09] [Rank 0] step:6901/10000 train_time:662041ms step_avg:95.93ms +[2025-08-22 22:43:09] [Rank 0] step:6901/10000 train_time:662041ms step_avg:95.93ms +[2025-08-22 22:43:11] [Rank 0] step:6921/10000 train_time:664044ms step_avg:95.95ms +[2025-08-22 22:43:11] [Rank 0] step:6921/10000 train_time:664044ms step_avg:95.95ms +[2025-08-22 22:43:13] [Rank 0] step:6941/10000 train_time:666061ms step_avg:95.96ms +[2025-08-22 22:43:13] [Rank 0] step:6941/10000 train_time:666061ms step_avg:95.96ms +[2025-08-22 22:43:15] [Rank 0] step:6961/10000 train_time:668086ms step_avg:95.98ms +[2025-08-22 22:43:15] [Rank 0] step:6961/10000 train_time:668086ms step_avg:95.98ms +[2025-08-22 22:43:17] [Rank 0] step:6981/10000 train_time:670099ms step_avg:95.99ms +[2025-08-22 22:43:17] [Rank 0] step:6981/10000 train_time:670099ms step_avg:95.99ms +[2025-08-22 22:43:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:43:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:43:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.5917 svd_entropy: attn_qk:H=0.7711,top10E=0.25,eRank=196.5,q75/q25=52.98 attn_vo:H=0.8347,top10E=0.06,eRank=406.0,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.88 vo_prod:H=0.6928,top10E=0.10,eRank=229.1,q75/q25=inf train_time:672119ms step_avg:96.02ms +[2025-08-22 22:43:32] [Rank 0] PRINT: step:7000/10000 val_loss:3.5917 svd_entropy: attn_qk:H=0.7711,top10E=0.25,eRank=196.5,q75/q25=52.98 attn_vo:H=0.8347,top10E=0.06,eRank=406.0,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.4,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.8,q75/q25=2.88 vo_prod:H=0.6928,top10E=0.10,eRank=229.1,q75/q25=inf train_time:672119ms step_avg:96.02ms +[2025-08-22 22:43:32] [Rank 0] step:7001/10000 train_time:672142ms step_avg:96.01ms +[2025-08-22 22:43:32] [Rank 0] step:7001/10000 train_time:672142ms step_avg:96.01ms +[2025-08-22 22:43:34] [Rank 0] step:7021/10000 train_time:674161ms step_avg:96.02ms +[2025-08-22 22:43:34] [Rank 0] step:7021/10000 train_time:674161ms step_avg:96.02ms +[2025-08-22 22:43:36] [Rank 0] step:7041/10000 train_time:676172ms step_avg:96.03ms +[2025-08-22 22:43:36] [Rank 0] step:7041/10000 train_time:676172ms step_avg:96.03ms +[2025-08-22 22:43:38] [Rank 0] step:7061/10000 train_time:678184ms step_avg:96.05ms +[2025-08-22 22:43:38] [Rank 0] step:7061/10000 train_time:678184ms step_avg:96.05ms +[2025-08-22 22:43:40] [Rank 0] step:7081/10000 train_time:680198ms step_avg:96.06ms +[2025-08-22 22:43:40] [Rank 0] step:7081/10000 train_time:680198ms step_avg:96.06ms +[2025-08-22 22:43:42] [Rank 0] step:7101/10000 train_time:682218ms step_avg:96.07ms +[2025-08-22 22:43:42] [Rank 0] step:7101/10000 train_time:682218ms step_avg:96.07ms +[2025-08-22 22:43:45] [Rank 0] step:7121/10000 train_time:684231ms step_avg:96.09ms +[2025-08-22 22:43:45] [Rank 0] step:7121/10000 train_time:684231ms step_avg:96.09ms +[2025-08-22 22:43:47] [Rank 0] step:7141/10000 train_time:686244ms step_avg:96.10ms +[2025-08-22 22:43:47] [Rank 0] step:7141/10000 train_time:686244ms step_avg:96.10ms +[2025-08-22 22:43:49] [Rank 0] step:7161/10000 train_time:688261ms step_avg:96.11ms +[2025-08-22 22:43:49] [Rank 0] step:7161/10000 train_time:688261ms step_avg:96.11ms +[2025-08-22 22:43:51] [Rank 0] step:7181/10000 train_time:690351ms step_avg:96.14ms +[2025-08-22 22:43:51] [Rank 0] step:7181/10000 train_time:690351ms step_avg:96.14ms +[2025-08-22 22:43:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:43:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:44:06] [Rank 0] PRINT: step:7200/10000 val_loss:3.5802 svd_entropy: attn_qk:H=0.7716,top10E=0.25,eRank=196.9,q75/q25=53.16 attn_vo:H=0.8347,top10E=0.06,eRank=406.1,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.5,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=2.88 vo_prod:H=0.6930,top10E=0.10,eRank=229.4,q75/q25=inf train_time:692454ms step_avg:96.17ms +[2025-08-22 22:44:06] [Rank 0] PRINT: step:7200/10000 val_loss:3.5802 svd_entropy: attn_qk:H=0.7716,top10E=0.25,eRank=196.9,q75/q25=53.16 attn_vo:H=0.8347,top10E=0.06,eRank=406.1,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.5,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=2.88 vo_prod:H=0.6930,top10E=0.10,eRank=229.4,q75/q25=inf train_time:692454ms step_avg:96.17ms +[2025-08-22 22:44:07] [Rank 0] step:7201/10000 train_time:692477ms step_avg:96.16ms +[2025-08-22 22:44:07] [Rank 0] step:7201/10000 train_time:692477ms step_avg:96.16ms +[2025-08-22 22:44:09] [Rank 0] step:7221/10000 train_time:694500ms step_avg:96.18ms +[2025-08-22 22:44:09] [Rank 0] step:7221/10000 train_time:694500ms step_avg:96.18ms +[2025-08-22 22:44:11] [Rank 0] step:7241/10000 train_time:696507ms step_avg:96.19ms +[2025-08-22 22:44:11] [Rank 0] step:7241/10000 train_time:696507ms step_avg:96.19ms +[2025-08-22 22:44:13] [Rank 0] step:7261/10000 train_time:698509ms step_avg:96.20ms +[2025-08-22 22:44:13] [Rank 0] step:7261/10000 train_time:698509ms step_avg:96.20ms +[2025-08-22 22:44:15] [Rank 0] step:7281/10000 train_time:700526ms step_avg:96.21ms +[2025-08-22 22:44:15] [Rank 0] step:7281/10000 train_time:700526ms step_avg:96.21ms +[2025-08-22 22:44:17] [Rank 0] step:7301/10000 train_time:702532ms step_avg:96.22ms +[2025-08-22 22:44:17] [Rank 0] step:7301/10000 train_time:702532ms step_avg:96.22ms +[2025-08-22 22:44:19] [Rank 0] step:7321/10000 train_time:704554ms step_avg:96.24ms +[2025-08-22 22:44:19] [Rank 0] step:7321/10000 train_time:704554ms step_avg:96.24ms +[2025-08-22 22:44:21] [Rank 0] step:7341/10000 train_time:706562ms step_avg:96.25ms +[2025-08-22 22:44:21] [Rank 0] step:7341/10000 train_time:706562ms step_avg:96.25ms +[2025-08-22 22:44:23] [Rank 0] step:7361/10000 train_time:708580ms step_avg:96.26ms +[2025-08-22 22:44:23] [Rank 0] step:7361/10000 train_time:708580ms step_avg:96.26ms +[2025-08-22 22:44:25] [Rank 0] step:7381/10000 train_time:710600ms step_avg:96.27ms +[2025-08-22 22:44:25] [Rank 0] step:7381/10000 train_time:710600ms step_avg:96.27ms +[2025-08-22 22:44:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:44:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:44:40] [Rank 0] PRINT: step:7400/10000 val_loss:3.5654 svd_entropy: attn_qk:H=0.7719,top10E=0.25,eRank=197.2,q75/q25=53.00 attn_vo:H=0.8348,top10E=0.06,eRank=406.1,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.6,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=2.88 vo_prod:H=0.6930,top10E=0.10,eRank=229.5,q75/q25=inf train_time:712599ms step_avg:96.30ms +[2025-08-22 22:44:40] [Rank 0] PRINT: step:7400/10000 val_loss:3.5654 svd_entropy: attn_qk:H=0.7719,top10E=0.25,eRank=197.2,q75/q25=53.00 attn_vo:H=0.8348,top10E=0.06,eRank=406.1,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.6,q75/q25=2.78 mlp_w2:H=0.9672,top10E=0.05,eRank=617.9,q75/q25=2.88 vo_prod:H=0.6930,top10E=0.10,eRank=229.5,q75/q25=inf train_time:712599ms step_avg:96.30ms +[2025-08-22 22:44:40] [Rank 0] step:7401/10000 train_time:712622ms step_avg:96.29ms +[2025-08-22 22:44:40] [Rank 0] step:7401/10000 train_time:712622ms step_avg:96.29ms +[2025-08-22 22:44:42] [Rank 0] step:7421/10000 train_time:714640ms step_avg:96.30ms +[2025-08-22 22:44:42] [Rank 0] step:7421/10000 train_time:714640ms step_avg:96.30ms +[2025-08-22 22:44:44] [Rank 0] step:7441/10000 train_time:716644ms step_avg:96.31ms +[2025-08-22 22:44:44] [Rank 0] step:7441/10000 train_time:716644ms step_avg:96.31ms +[2025-08-22 22:44:46] [Rank 0] step:7461/10000 train_time:718651ms step_avg:96.32ms +[2025-08-22 22:44:46] [Rank 0] step:7461/10000 train_time:718651ms step_avg:96.32ms +[2025-08-22 22:44:48] [Rank 0] step:7481/10000 train_time:720673ms step_avg:96.33ms +[2025-08-22 22:44:48] [Rank 0] step:7481/10000 train_time:720673ms step_avg:96.33ms +[2025-08-22 22:44:51] [Rank 0] step:7501/10000 train_time:722688ms step_avg:96.35ms +[2025-08-22 22:44:51] [Rank 0] step:7501/10000 train_time:722688ms step_avg:96.35ms +[2025-08-22 22:44:53] [Rank 0] step:7521/10000 train_time:724703ms step_avg:96.36ms +[2025-08-22 22:44:53] [Rank 0] step:7521/10000 train_time:724703ms step_avg:96.36ms +[2025-08-22 22:44:55] [Rank 0] step:7541/10000 train_time:726796ms step_avg:96.38ms +[2025-08-22 22:44:55] [Rank 0] step:7541/10000 train_time:726796ms step_avg:96.38ms +[2025-08-22 22:44:57] [Rank 0] step:7561/10000 train_time:728872ms step_avg:96.40ms +[2025-08-22 22:44:57] [Rank 0] step:7561/10000 train_time:728872ms step_avg:96.40ms +[2025-08-22 22:44:59] [Rank 0] step:7581/10000 train_time:730891ms step_avg:96.41ms +[2025-08-22 22:44:59] [Rank 0] step:7581/10000 train_time:730891ms step_avg:96.41ms +[2025-08-22 22:45:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:45:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:45:14] [Rank 0] PRINT: step:7600/10000 val_loss:3.5549 svd_entropy: attn_qk:H=0.7724,top10E=0.25,eRank=197.6,q75/q25=52.76 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.7,q75/q25=2.77 mlp_w2:H=0.9672,top10E=0.05,eRank=618.1,q75/q25=2.88 vo_prod:H=0.6931,top10E=0.10,eRank=229.6,q75/q25=inf train_time:732910ms step_avg:96.44ms +[2025-08-22 22:45:14] [Rank 0] PRINT: step:7600/10000 val_loss:3.5549 svd_entropy: attn_qk:H=0.7724,top10E=0.25,eRank=197.6,q75/q25=52.76 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.7,q75/q25=2.77 mlp_w2:H=0.9672,top10E=0.05,eRank=618.1,q75/q25=2.88 vo_prod:H=0.6931,top10E=0.10,eRank=229.6,q75/q25=inf train_time:732910ms step_avg:96.44ms +[2025-08-22 22:45:14] [Rank 0] step:7601/10000 train_time:732933ms step_avg:96.43ms +[2025-08-22 22:45:14] [Rank 0] step:7601/10000 train_time:732933ms step_avg:96.43ms +[2025-08-22 22:45:16] [Rank 0] step:7621/10000 train_time:734943ms step_avg:96.44ms +[2025-08-22 22:45:16] [Rank 0] step:7621/10000 train_time:734943ms step_avg:96.44ms +[2025-08-22 22:45:18] [Rank 0] step:7641/10000 train_time:736947ms step_avg:96.45ms +[2025-08-22 22:45:18] [Rank 0] step:7641/10000 train_time:736947ms step_avg:96.45ms +[2025-08-22 22:45:20] [Rank 0] step:7661/10000 train_time:738960ms step_avg:96.46ms +[2025-08-22 22:45:20] [Rank 0] step:7661/10000 train_time:738960ms step_avg:96.46ms +[2025-08-22 22:45:22] [Rank 0] step:7681/10000 train_time:740969ms step_avg:96.47ms +[2025-08-22 22:45:22] [Rank 0] step:7681/10000 train_time:740969ms step_avg:96.47ms +[2025-08-22 22:45:24] [Rank 0] step:7701/10000 train_time:742976ms step_avg:96.48ms +[2025-08-22 22:45:24] [Rank 0] step:7701/10000 train_time:742976ms step_avg:96.48ms +[2025-08-22 22:45:26] [Rank 0] step:7721/10000 train_time:744996ms step_avg:96.49ms +[2025-08-22 22:45:26] [Rank 0] step:7721/10000 train_time:744996ms step_avg:96.49ms +[2025-08-22 22:45:28] [Rank 0] step:7741/10000 train_time:747030ms step_avg:96.50ms +[2025-08-22 22:45:28] [Rank 0] step:7741/10000 train_time:747030ms step_avg:96.50ms +[2025-08-22 22:45:30] [Rank 0] step:7761/10000 train_time:749051ms step_avg:96.51ms +[2025-08-22 22:45:30] [Rank 0] step:7761/10000 train_time:749051ms step_avg:96.51ms +[2025-08-22 22:45:33] [Rank 0] step:7781/10000 train_time:751067ms step_avg:96.53ms +[2025-08-22 22:45:33] [Rank 0] step:7781/10000 train_time:751067ms step_avg:96.53ms +[2025-08-22 22:45:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:45:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:45:48] [Rank 0] PRINT: step:7800/10000 val_loss:3.5424 svd_entropy: attn_qk:H=0.7727,top10E=0.25,eRank=197.9,q75/q25=52.60 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.8,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.1,q75/q25=2.88 vo_prod:H=0.6932,top10E=0.10,eRank=229.8,q75/q25=inf train_time:753096ms step_avg:96.55ms +[2025-08-22 22:45:48] [Rank 0] PRINT: step:7800/10000 val_loss:3.5424 svd_entropy: attn_qk:H=0.7727,top10E=0.25,eRank=197.9,q75/q25=52.60 attn_vo:H=0.8348,top10E=0.06,eRank=406.2,q75/q25=inf mlp_w1:H=0.9722,top10E=0.04,eRank=638.8,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.1,q75/q25=2.88 vo_prod:H=0.6932,top10E=0.10,eRank=229.8,q75/q25=inf train_time:753096ms step_avg:96.55ms +[2025-08-22 22:45:48] [Rank 0] step:7801/10000 train_time:753120ms step_avg:96.54ms +[2025-08-22 22:45:48] [Rank 0] step:7801/10000 train_time:753120ms step_avg:96.54ms +[2025-08-22 22:45:50] [Rank 0] step:7821/10000 train_time:755123ms step_avg:96.55ms +[2025-08-22 22:45:50] [Rank 0] step:7821/10000 train_time:755123ms step_avg:96.55ms +[2025-08-22 22:45:52] [Rank 0] step:7841/10000 train_time:757122ms step_avg:96.56ms +[2025-08-22 22:45:52] [Rank 0] step:7841/10000 train_time:757122ms step_avg:96.56ms +[2025-08-22 22:45:54] [Rank 0] step:7861/10000 train_time:759138ms step_avg:96.57ms +[2025-08-22 22:45:54] [Rank 0] step:7861/10000 train_time:759138ms step_avg:96.57ms +[2025-08-22 22:45:56] [Rank 0] step:7881/10000 train_time:761154ms step_avg:96.58ms +[2025-08-22 22:45:56] [Rank 0] step:7881/10000 train_time:761154ms step_avg:96.58ms +[2025-08-22 22:45:58] [Rank 0] step:7901/10000 train_time:763242ms step_avg:96.60ms +[2025-08-22 22:45:58] [Rank 0] step:7901/10000 train_time:763242ms step_avg:96.60ms +[2025-08-22 22:46:01] [Rank 0] step:7921/10000 train_time:765336ms step_avg:96.62ms +[2025-08-22 22:46:01] [Rank 0] step:7921/10000 train_time:765336ms step_avg:96.62ms +[2025-08-22 22:46:03] [Rank 0] step:7941/10000 train_time:767360ms step_avg:96.63ms +[2025-08-22 22:46:03] [Rank 0] step:7941/10000 train_time:767360ms step_avg:96.63ms +[2025-08-22 22:46:05] [Rank 0] step:7961/10000 train_time:769381ms step_avg:96.64ms +[2025-08-22 22:46:05] [Rank 0] step:7961/10000 train_time:769381ms step_avg:96.64ms +[2025-08-22 22:46:07] [Rank 0] step:7981/10000 train_time:771390ms step_avg:96.65ms +[2025-08-22 22:46:07] [Rank 0] step:7981/10000 train_time:771390ms step_avg:96.65ms +[2025-08-22 22:46:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:46:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:46:22] [Rank 0] PRINT: step:8000/10000 val_loss:3.5258 svd_entropy: attn_qk:H=0.7730,top10E=0.25,eRank=198.2,q75/q25=52.71 attn_vo:H=0.8348,top10E=0.05,eRank=406.3,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=638.9,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.2,q75/q25=2.87 vo_prod:H=0.6933,top10E=0.10,eRank=230.0,q75/q25=inf train_time:773417ms step_avg:96.68ms +[2025-08-22 22:46:22] [Rank 0] PRINT: step:8000/10000 val_loss:3.5258 svd_entropy: attn_qk:H=0.7730,top10E=0.25,eRank=198.2,q75/q25=52.71 attn_vo:H=0.8348,top10E=0.05,eRank=406.3,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=638.9,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.2,q75/q25=2.87 vo_prod:H=0.6933,top10E=0.10,eRank=230.0,q75/q25=inf train_time:773417ms step_avg:96.68ms +[2025-08-22 22:46:22] [Rank 0] step:8001/10000 train_time:773440ms step_avg:96.67ms +[2025-08-22 22:46:22] [Rank 0] step:8001/10000 train_time:773440ms step_avg:96.67ms +[2025-08-22 22:46:24] [Rank 0] step:8021/10000 train_time:775449ms step_avg:96.68ms +[2025-08-22 22:46:24] [Rank 0] step:8021/10000 train_time:775449ms step_avg:96.68ms +[2025-08-22 22:46:26] [Rank 0] step:8041/10000 train_time:777473ms step_avg:96.69ms +[2025-08-22 22:46:26] [Rank 0] step:8041/10000 train_time:777473ms step_avg:96.69ms +[2025-08-22 22:46:28] [Rank 0] step:8061/10000 train_time:779491ms step_avg:96.70ms +[2025-08-22 22:46:28] [Rank 0] step:8061/10000 train_time:779491ms step_avg:96.70ms +[2025-08-22 22:46:30] [Rank 0] step:8081/10000 train_time:781494ms step_avg:96.71ms +[2025-08-22 22:46:30] [Rank 0] step:8081/10000 train_time:781494ms step_avg:96.71ms +[2025-08-22 22:46:32] [Rank 0] step:8101/10000 train_time:783519ms step_avg:96.72ms +[2025-08-22 22:46:32] [Rank 0] step:8101/10000 train_time:783519ms step_avg:96.72ms +[2025-08-22 22:46:35] [Rank 0] step:8121/10000 train_time:785536ms step_avg:96.73ms +[2025-08-22 22:46:35] [Rank 0] step:8121/10000 train_time:785536ms step_avg:96.73ms +[2025-08-22 22:46:37] [Rank 0] step:8141/10000 train_time:788191ms step_avg:96.82ms +[2025-08-22 22:46:37] [Rank 0] step:8141/10000 train_time:788191ms step_avg:96.82ms +[2025-08-22 22:46:39] [Rank 0] step:8161/10000 train_time:790226ms step_avg:96.83ms +[2025-08-22 22:46:39] [Rank 0] step:8161/10000 train_time:790226ms step_avg:96.83ms +[2025-08-22 22:46:41] [Rank 0] step:8181/10000 train_time:792275ms step_avg:96.84ms +[2025-08-22 22:46:41] [Rank 0] step:8181/10000 train_time:792275ms step_avg:96.84ms +[2025-08-22 22:46:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:46:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:46:57] [Rank 0] PRINT: step:8200/10000 val_loss:3.5142 svd_entropy: attn_qk:H=0.7733,top10E=0.25,eRank=198.5,q75/q25=52.70 attn_vo:H=0.8349,top10E=0.06,eRank=406.4,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.4,q75/q25=2.87 vo_prod:H=0.6934,top10E=0.10,eRank=230.2,q75/q25=inf train_time:794348ms step_avg:96.87ms +[2025-08-22 22:46:57] [Rank 0] PRINT: step:8200/10000 val_loss:3.5142 svd_entropy: attn_qk:H=0.7733,top10E=0.25,eRank=198.5,q75/q25=52.70 attn_vo:H=0.8349,top10E=0.06,eRank=406.4,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.4,q75/q25=2.87 vo_prod:H=0.6934,top10E=0.10,eRank=230.2,q75/q25=inf train_time:794348ms step_avg:96.87ms +[2025-08-22 22:46:57] [Rank 0] step:8201/10000 train_time:794370ms step_avg:96.86ms +[2025-08-22 22:46:57] [Rank 0] step:8201/10000 train_time:794370ms step_avg:96.86ms +[2025-08-22 22:46:59] [Rank 0] step:8221/10000 train_time:796425ms step_avg:96.88ms +[2025-08-22 22:46:59] [Rank 0] step:8221/10000 train_time:796425ms step_avg:96.88ms +[2025-08-22 22:47:01] [Rank 0] step:8241/10000 train_time:798528ms step_avg:96.90ms +[2025-08-22 22:47:01] [Rank 0] step:8241/10000 train_time:798528ms step_avg:96.90ms +[2025-08-22 22:47:03] [Rank 0] step:8261/10000 train_time:800653ms step_avg:96.92ms +[2025-08-22 22:47:03] [Rank 0] step:8261/10000 train_time:800653ms step_avg:96.92ms +[2025-08-22 22:47:05] [Rank 0] step:8281/10000 train_time:802692ms step_avg:96.93ms +[2025-08-22 22:47:05] [Rank 0] step:8281/10000 train_time:802692ms step_avg:96.93ms +[2025-08-22 22:47:07] [Rank 0] step:8301/10000 train_time:804734ms step_avg:96.94ms +[2025-08-22 22:47:07] [Rank 0] step:8301/10000 train_time:804734ms step_avg:96.94ms +[2025-08-22 22:47:10] [Rank 0] step:8321/10000 train_time:806770ms step_avg:96.96ms +[2025-08-22 22:47:10] [Rank 0] step:8321/10000 train_time:806770ms step_avg:96.96ms +[2025-08-22 22:47:12] [Rank 0] step:8341/10000 train_time:808821ms step_avg:96.97ms +[2025-08-22 22:47:12] [Rank 0] step:8341/10000 train_time:808821ms step_avg:96.97ms +[2025-08-22 22:47:14] [Rank 0] step:8361/10000 train_time:810861ms step_avg:96.98ms +[2025-08-22 22:47:14] [Rank 0] step:8361/10000 train_time:810861ms step_avg:96.98ms +[2025-08-22 22:47:16] [Rank 0] step:8381/10000 train_time:812899ms step_avg:96.99ms +[2025-08-22 22:47:16] [Rank 0] step:8381/10000 train_time:812899ms step_avg:96.99ms +[2025-08-22 22:47:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:47:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:47:31] [Rank 0] PRINT: step:8400/10000 val_loss:3.5019 svd_entropy: attn_qk:H=0.7735,top10E=0.24,eRank=198.7,q75/q25=52.69 attn_vo:H=0.8349,top10E=0.06,eRank=406.5,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.5,q75/q25=2.87 vo_prod:H=0.6935,top10E=0.10,eRank=230.4,q75/q25=inf train_time:814940ms step_avg:97.02ms +[2025-08-22 22:47:31] [Rank 0] PRINT: step:8400/10000 val_loss:3.5019 svd_entropy: attn_qk:H=0.7735,top10E=0.24,eRank=198.7,q75/q25=52.69 attn_vo:H=0.8349,top10E=0.06,eRank=406.5,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.0,q75/q25=2.77 mlp_w2:H=0.9673,top10E=0.05,eRank=618.5,q75/q25=2.87 vo_prod:H=0.6935,top10E=0.10,eRank=230.4,q75/q25=inf train_time:814940ms step_avg:97.02ms +[2025-08-22 22:47:31] [Rank 0] step:8401/10000 train_time:814964ms step_avg:97.01ms +[2025-08-22 22:47:31] [Rank 0] step:8401/10000 train_time:814964ms step_avg:97.01ms +[2025-08-22 22:47:33] [Rank 0] step:8421/10000 train_time:816983ms step_avg:97.02ms +[2025-08-22 22:47:33] [Rank 0] step:8421/10000 train_time:816983ms step_avg:97.02ms +[2025-08-22 22:47:36] [Rank 0] step:8441/10000 train_time:819015ms step_avg:97.03ms +[2025-08-22 22:47:36] [Rank 0] step:8441/10000 train_time:819015ms step_avg:97.03ms +[2025-08-22 22:47:38] [Rank 0] step:8461/10000 train_time:821044ms step_avg:97.04ms +[2025-08-22 22:47:38] [Rank 0] step:8461/10000 train_time:821044ms step_avg:97.04ms +[2025-08-22 22:47:40] [Rank 0] step:8481/10000 train_time:823086ms step_avg:97.05ms +[2025-08-22 22:47:40] [Rank 0] step:8481/10000 train_time:823086ms step_avg:97.05ms +[2025-08-22 22:47:42] [Rank 0] step:8501/10000 train_time:825147ms step_avg:97.06ms +[2025-08-22 22:47:42] [Rank 0] step:8501/10000 train_time:825147ms step_avg:97.06ms +[2025-08-22 22:47:44] [Rank 0] step:8521/10000 train_time:827189ms step_avg:97.08ms +[2025-08-22 22:47:44] [Rank 0] step:8521/10000 train_time:827189ms step_avg:97.08ms +[2025-08-22 22:47:46] [Rank 0] step:8541/10000 train_time:829245ms step_avg:97.09ms +[2025-08-22 22:47:46] [Rank 0] step:8541/10000 train_time:829245ms step_avg:97.09ms +[2025-08-22 22:47:48] [Rank 0] step:8561/10000 train_time:831294ms step_avg:97.10ms +[2025-08-22 22:47:48] [Rank 0] step:8561/10000 train_time:831294ms step_avg:97.10ms +[2025-08-22 22:47:50] [Rank 0] step:8581/10000 train_time:833337ms step_avg:97.11ms +[2025-08-22 22:47:50] [Rank 0] step:8581/10000 train_time:833337ms step_avg:97.11ms +[2025-08-22 22:47:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:47:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:48:06] [Rank 0] PRINT: step:8600/10000 val_loss:3.4921 svd_entropy: attn_qk:H=0.7738,top10E=0.24,eRank=199.0,q75/q25=52.82 attn_vo:H=0.8349,top10E=0.06,eRank=406.5,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.6,q75/q25=2.87 vo_prod:H=0.6936,top10E=0.10,eRank=230.5,q75/q25=inf train_time:835376ms step_avg:97.14ms +[2025-08-22 22:48:06] [Rank 0] PRINT: step:8600/10000 val_loss:3.4921 svd_entropy: attn_qk:H=0.7738,top10E=0.24,eRank=199.0,q75/q25=52.82 attn_vo:H=0.8349,top10E=0.06,eRank=406.5,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.6,q75/q25=2.87 vo_prod:H=0.6936,top10E=0.10,eRank=230.5,q75/q25=inf train_time:835376ms step_avg:97.14ms +[2025-08-22 22:48:06] [Rank 0] step:8601/10000 train_time:835399ms step_avg:97.13ms +[2025-08-22 22:48:06] [Rank 0] step:8601/10000 train_time:835399ms step_avg:97.13ms +[2025-08-22 22:48:08] [Rank 0] step:8621/10000 train_time:837450ms step_avg:97.14ms +[2025-08-22 22:48:08] [Rank 0] step:8621/10000 train_time:837450ms step_avg:97.14ms +[2025-08-22 22:48:10] [Rank 0] step:8641/10000 train_time:839488ms step_avg:97.15ms +[2025-08-22 22:48:10] [Rank 0] step:8641/10000 train_time:839488ms step_avg:97.15ms +[2025-08-22 22:48:12] [Rank 0] step:8661/10000 train_time:841528ms step_avg:97.16ms +[2025-08-22 22:48:12] [Rank 0] step:8661/10000 train_time:841528ms step_avg:97.16ms +[2025-08-22 22:48:14] [Rank 0] step:8681/10000 train_time:843574ms step_avg:97.17ms +[2025-08-22 22:48:14] [Rank 0] step:8681/10000 train_time:843574ms step_avg:97.17ms +[2025-08-22 22:48:16] [Rank 0] step:8701/10000 train_time:845609ms step_avg:97.19ms +[2025-08-22 22:48:16] [Rank 0] step:8701/10000 train_time:845609ms step_avg:97.19ms +[2025-08-22 22:48:18] [Rank 0] step:8721/10000 train_time:847655ms step_avg:97.20ms +[2025-08-22 22:48:18] [Rank 0] step:8721/10000 train_time:847655ms step_avg:97.20ms +[2025-08-22 22:48:20] [Rank 0] step:8741/10000 train_time:849689ms step_avg:97.21ms +[2025-08-22 22:48:20] [Rank 0] step:8741/10000 train_time:849689ms step_avg:97.21ms +[2025-08-22 22:48:22] [Rank 0] step:8761/10000 train_time:851731ms step_avg:97.22ms +[2025-08-22 22:48:22] [Rank 0] step:8761/10000 train_time:851731ms step_avg:97.22ms +[2025-08-22 22:48:24] [Rank 0] step:8781/10000 train_time:853784ms step_avg:97.23ms +[2025-08-22 22:48:24] [Rank 0] step:8781/10000 train_time:853784ms step_avg:97.23ms +[2025-08-22 22:48:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:48:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:48:40] [Rank 0] PRINT: step:8800/10000 val_loss:3.4807 svd_entropy: attn_qk:H=0.7740,top10E=0.24,eRank=199.1,q75/q25=52.70 attn_vo:H=0.8349,top10E=0.06,eRank=406.6,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.7,q75/q25=2.86 vo_prod:H=0.6937,top10E=0.10,eRank=230.7,q75/q25=inf train_time:855833ms step_avg:97.25ms +[2025-08-22 22:48:40] [Rank 0] PRINT: step:8800/10000 val_loss:3.4807 svd_entropy: attn_qk:H=0.7740,top10E=0.24,eRank=199.1,q75/q25=52.70 attn_vo:H=0.8349,top10E=0.06,eRank=406.6,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.7,q75/q25=2.86 vo_prod:H=0.6937,top10E=0.10,eRank=230.7,q75/q25=inf train_time:855833ms step_avg:97.25ms +[2025-08-22 22:48:40] [Rank 0] step:8801/10000 train_time:855856ms step_avg:97.25ms +[2025-08-22 22:48:40] [Rank 0] step:8801/10000 train_time:855856ms step_avg:97.25ms +[2025-08-22 22:48:42] [Rank 0] step:8821/10000 train_time:857884ms step_avg:97.25ms +[2025-08-22 22:48:42] [Rank 0] step:8821/10000 train_time:857884ms step_avg:97.25ms +[2025-08-22 22:48:44] [Rank 0] step:8841/10000 train_time:859947ms step_avg:97.27ms +[2025-08-22 22:48:44] [Rank 0] step:8841/10000 train_time:859947ms step_avg:97.27ms +[2025-08-22 22:48:46] [Rank 0] step:8861/10000 train_time:861984ms step_avg:97.28ms +[2025-08-22 22:48:46] [Rank 0] step:8861/10000 train_time:861984ms step_avg:97.28ms +[2025-08-22 22:48:48] [Rank 0] step:8881/10000 train_time:864027ms step_avg:97.29ms +[2025-08-22 22:48:48] [Rank 0] step:8881/10000 train_time:864027ms step_avg:97.29ms +[2025-08-22 22:48:50] [Rank 0] step:8901/10000 train_time:866071ms step_avg:97.30ms +[2025-08-22 22:48:50] [Rank 0] step:8901/10000 train_time:866071ms step_avg:97.30ms +[2025-08-22 22:48:52] [Rank 0] step:8921/10000 train_time:868133ms step_avg:97.31ms +[2025-08-22 22:48:52] [Rank 0] step:8921/10000 train_time:868133ms step_avg:97.31ms +[2025-08-22 22:48:54] [Rank 0] step:8941/10000 train_time:870182ms step_avg:97.32ms +[2025-08-22 22:48:54] [Rank 0] step:8941/10000 train_time:870182ms step_avg:97.32ms +[2025-08-22 22:48:56] [Rank 0] step:8961/10000 train_time:872230ms step_avg:97.34ms +[2025-08-22 22:48:56] [Rank 0] step:8961/10000 train_time:872230ms step_avg:97.34ms +[2025-08-22 22:48:58] [Rank 0] step:8981/10000 train_time:874275ms step_avg:97.35ms +[2025-08-22 22:48:58] [Rank 0] step:8981/10000 train_time:874275ms step_avg:97.35ms +[2025-08-22 22:49:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:49:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:49:14] [Rank 0] PRINT: step:9000/10000 val_loss:3.4701 svd_entropy: attn_qk:H=0.7742,top10E=0.24,eRank=199.3,q75/q25=52.50 attn_vo:H=0.8350,top10E=0.06,eRank=406.7,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.8,q75/q25=2.87 vo_prod:H=0.6938,top10E=0.10,eRank=230.8,q75/q25=inf train_time:876323ms step_avg:97.37ms +[2025-08-22 22:49:14] [Rank 0] PRINT: step:9000/10000 val_loss:3.4701 svd_entropy: attn_qk:H=0.7742,top10E=0.24,eRank=199.3,q75/q25=52.50 attn_vo:H=0.8350,top10E=0.06,eRank=406.7,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.1,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.8,q75/q25=2.87 vo_prod:H=0.6938,top10E=0.10,eRank=230.8,q75/q25=inf train_time:876323ms step_avg:97.37ms +[2025-08-22 22:49:14] [Rank 0] step:9001/10000 train_time:876345ms step_avg:97.36ms +[2025-08-22 22:49:14] [Rank 0] step:9001/10000 train_time:876345ms step_avg:97.36ms +[2025-08-22 22:49:16] [Rank 0] step:9021/10000 train_time:878387ms step_avg:97.37ms +[2025-08-22 22:49:16] [Rank 0] step:9021/10000 train_time:878387ms step_avg:97.37ms +[2025-08-22 22:49:18] [Rank 0] step:9041/10000 train_time:880428ms step_avg:97.38ms +[2025-08-22 22:49:18] [Rank 0] step:9041/10000 train_time:880428ms step_avg:97.38ms +[2025-08-22 22:49:20] [Rank 0] step:9061/10000 train_time:882476ms step_avg:97.39ms +[2025-08-22 22:49:20] [Rank 0] step:9061/10000 train_time:882476ms step_avg:97.39ms +[2025-08-22 22:49:22] [Rank 0] step:9081/10000 train_time:884528ms step_avg:97.40ms +[2025-08-22 22:49:22] [Rank 0] step:9081/10000 train_time:884528ms step_avg:97.40ms +[2025-08-22 22:49:24] [Rank 0] step:9101/10000 train_time:886591ms step_avg:97.42ms +[2025-08-22 22:49:24] [Rank 0] step:9101/10000 train_time:886591ms step_avg:97.42ms +[2025-08-22 22:49:26] [Rank 0] step:9121/10000 train_time:888637ms step_avg:97.43ms +[2025-08-22 22:49:26] [Rank 0] step:9121/10000 train_time:888637ms step_avg:97.43ms +[2025-08-22 22:49:29] [Rank 0] step:9141/10000 train_time:890675ms step_avg:97.44ms +[2025-08-22 22:49:29] [Rank 0] step:9141/10000 train_time:890675ms step_avg:97.44ms +[2025-08-22 22:49:31] [Rank 0] step:9161/10000 train_time:892715ms step_avg:97.45ms +[2025-08-22 22:49:31] [Rank 0] step:9161/10000 train_time:892715ms step_avg:97.45ms +[2025-08-22 22:49:33] [Rank 0] step:9181/10000 train_time:894797ms step_avg:97.46ms +[2025-08-22 22:49:33] [Rank 0] step:9181/10000 train_time:894797ms step_avg:97.46ms +[2025-08-22 22:49:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:49:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:49:48] [Rank 0] PRINT: step:9200/10000 val_loss:3.4607 svd_entropy: attn_qk:H=0.7744,top10E=0.24,eRank=199.5,q75/q25=52.67 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.9,q75/q25=2.86 vo_prod:H=0.6938,top10E=0.10,eRank=231.0,q75/q25=inf train_time:896842ms step_avg:97.48ms +[2025-08-22 22:49:48] [Rank 0] PRINT: step:9200/10000 val_loss:3.4607 svd_entropy: attn_qk:H=0.7744,top10E=0.24,eRank=199.5,q75/q25=52.67 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.77 mlp_w2:H=0.9674,top10E=0.05,eRank=618.9,q75/q25=2.86 vo_prod:H=0.6938,top10E=0.10,eRank=231.0,q75/q25=inf train_time:896842ms step_avg:97.48ms +[2025-08-22 22:49:48] [Rank 0] step:9201/10000 train_time:896865ms step_avg:97.47ms +[2025-08-22 22:49:48] [Rank 0] step:9201/10000 train_time:896865ms step_avg:97.47ms +[2025-08-22 22:49:51] [Rank 0] step:9221/10000 train_time:898923ms step_avg:97.49ms +[2025-08-22 22:49:51] [Rank 0] step:9221/10000 train_time:898923ms step_avg:97.49ms +[2025-08-22 22:49:53] [Rank 0] step:9241/10000 train_time:900973ms step_avg:97.50ms +[2025-08-22 22:49:53] [Rank 0] step:9241/10000 train_time:900973ms step_avg:97.50ms +[2025-08-22 22:49:55] [Rank 0] step:9261/10000 train_time:903023ms step_avg:97.51ms +[2025-08-22 22:49:55] [Rank 0] step:9261/10000 train_time:903023ms step_avg:97.51ms +[2025-08-22 22:49:57] [Rank 0] step:9281/10000 train_time:905061ms step_avg:97.52ms +[2025-08-22 22:49:57] [Rank 0] step:9281/10000 train_time:905061ms step_avg:97.52ms +[2025-08-22 22:49:59] [Rank 0] step:9301/10000 train_time:907098ms step_avg:97.53ms +[2025-08-22 22:49:59] [Rank 0] step:9301/10000 train_time:907098ms step_avg:97.53ms +[2025-08-22 22:50:01] [Rank 0] step:9321/10000 train_time:909145ms step_avg:97.54ms +[2025-08-22 22:50:01] [Rank 0] step:9321/10000 train_time:909145ms step_avg:97.54ms +[2025-08-22 22:50:03] [Rank 0] step:9341/10000 train_time:911192ms step_avg:97.55ms +[2025-08-22 22:50:03] [Rank 0] step:9341/10000 train_time:911192ms step_avg:97.55ms +[2025-08-22 22:50:05] [Rank 0] step:9361/10000 train_time:913240ms step_avg:97.56ms +[2025-08-22 22:50:05] [Rank 0] step:9361/10000 train_time:913240ms step_avg:97.56ms +[2025-08-22 22:50:07] [Rank 0] step:9381/10000 train_time:915299ms step_avg:97.57ms +[2025-08-22 22:50:07] [Rank 0] step:9381/10000 train_time:915299ms step_avg:97.57ms +[2025-08-22 22:50:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:50:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:50:23] [Rank 0] PRINT: step:9400/10000 val_loss:3.4516 svd_entropy: attn_qk:H=0.7745,top10E=0.24,eRank=199.6,q75/q25=52.55 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=2.86 vo_prod:H=0.6939,top10E=0.10,eRank=231.1,q75/q25=inf train_time:917354ms step_avg:97.59ms +[2025-08-22 22:50:23] [Rank 0] PRINT: step:9400/10000 val_loss:3.4516 svd_entropy: attn_qk:H=0.7745,top10E=0.24,eRank=199.6,q75/q25=52.55 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=2.86 vo_prod:H=0.6939,top10E=0.10,eRank=231.1,q75/q25=inf train_time:917354ms step_avg:97.59ms +[2025-08-22 22:50:23] [Rank 0] step:9401/10000 train_time:917378ms step_avg:97.58ms +[2025-08-22 22:50:23] [Rank 0] step:9401/10000 train_time:917378ms step_avg:97.58ms +[2025-08-22 22:50:25] [Rank 0] step:9421/10000 train_time:919423ms step_avg:97.59ms +[2025-08-22 22:50:25] [Rank 0] step:9421/10000 train_time:919423ms step_avg:97.59ms +[2025-08-22 22:50:27] [Rank 0] step:9441/10000 train_time:921462ms step_avg:97.60ms +[2025-08-22 22:50:27] [Rank 0] step:9441/10000 train_time:921462ms step_avg:97.60ms +[2025-08-22 22:50:29] [Rank 0] step:9461/10000 train_time:923510ms step_avg:97.61ms +[2025-08-22 22:50:29] [Rank 0] step:9461/10000 train_time:923510ms step_avg:97.61ms +[2025-08-22 22:50:31] [Rank 0] step:9481/10000 train_time:925558ms step_avg:97.62ms +[2025-08-22 22:50:31] [Rank 0] step:9481/10000 train_time:925558ms step_avg:97.62ms +[2025-08-22 22:50:33] [Rank 0] step:9501/10000 train_time:927611ms step_avg:97.63ms +[2025-08-22 22:50:33] [Rank 0] step:9501/10000 train_time:927611ms step_avg:97.63ms +[2025-08-22 22:50:35] [Rank 0] step:9521/10000 train_time:929648ms step_avg:97.64ms +[2025-08-22 22:50:35] [Rank 0] step:9521/10000 train_time:929648ms step_avg:97.64ms +[2025-08-22 22:50:37] [Rank 0] step:9541/10000 train_time:931692ms step_avg:97.65ms +[2025-08-22 22:50:37] [Rank 0] step:9541/10000 train_time:931692ms step_avg:97.65ms +[2025-08-22 22:50:39] [Rank 0] step:9561/10000 train_time:933729ms step_avg:97.66ms +[2025-08-22 22:50:39] [Rank 0] step:9561/10000 train_time:933729ms step_avg:97.66ms +[2025-08-22 22:50:41] [Rank 0] step:9581/10000 train_time:935776ms step_avg:97.67ms +[2025-08-22 22:50:41] [Rank 0] step:9581/10000 train_time:935776ms step_avg:97.67ms +[2025-08-22 22:50:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:50:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:50:57] [Rank 0] PRINT: step:9600/10000 val_loss:3.4432 svd_entropy: attn_qk:H=0.7746,top10E=0.24,eRank=199.7,q75/q25=52.64 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=619.0,q75/q25=2.86 vo_prod:H=0.6940,top10E=0.10,eRank=231.2,q75/q25=inf train_time:937840ms step_avg:97.69ms +[2025-08-22 22:50:57] [Rank 0] PRINT: step:9600/10000 val_loss:3.4432 svd_entropy: attn_qk:H=0.7746,top10E=0.24,eRank=199.7,q75/q25=52.64 attn_vo:H=0.8350,top10E=0.06,eRank=406.8,q75/q25=inf mlp_w1:H=0.9723,top10E=0.04,eRank=639.2,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=619.0,q75/q25=2.86 vo_prod:H=0.6940,top10E=0.10,eRank=231.2,q75/q25=inf train_time:937840ms step_avg:97.69ms +[2025-08-22 22:50:57] [Rank 0] step:9601/10000 train_time:937862ms step_avg:97.68ms +[2025-08-22 22:50:57] [Rank 0] step:9601/10000 train_time:937862ms step_avg:97.68ms +[2025-08-22 22:50:59] [Rank 0] step:9621/10000 train_time:939906ms step_avg:97.69ms +[2025-08-22 22:50:59] [Rank 0] step:9621/10000 train_time:939906ms step_avg:97.69ms +[2025-08-22 22:51:01] [Rank 0] step:9641/10000 train_time:941955ms step_avg:97.70ms +[2025-08-22 22:51:01] [Rank 0] step:9641/10000 train_time:941955ms step_avg:97.70ms +[2025-08-22 22:51:03] [Rank 0] step:9661/10000 train_time:944028ms step_avg:97.72ms +[2025-08-22 22:51:03] [Rank 0] step:9661/10000 train_time:944028ms step_avg:97.72ms +[2025-08-22 22:51:06] [Rank 0] step:9681/10000 train_time:946093ms step_avg:97.73ms +[2025-08-22 22:51:06] [Rank 0] step:9681/10000 train_time:946093ms step_avg:97.73ms +[2025-08-22 22:51:08] [Rank 0] step:9701/10000 train_time:948175ms step_avg:97.74ms +[2025-08-22 22:51:08] [Rank 0] step:9701/10000 train_time:948175ms step_avg:97.74ms +[2025-08-22 22:51:10] [Rank 0] step:9721/10000 train_time:950248ms step_avg:97.75ms +[2025-08-22 22:51:10] [Rank 0] step:9721/10000 train_time:950248ms step_avg:97.75ms +[2025-08-22 22:51:12] [Rank 0] step:9741/10000 train_time:952336ms step_avg:97.77ms +[2025-08-22 22:51:12] [Rank 0] step:9741/10000 train_time:952336ms step_avg:97.77ms +[2025-08-22 22:51:14] [Rank 0] step:9761/10000 train_time:954486ms step_avg:97.79ms +[2025-08-22 22:51:14] [Rank 0] step:9761/10000 train_time:954486ms step_avg:97.79ms +[2025-08-22 22:51:16] [Rank 0] step:9781/10000 train_time:956636ms step_avg:97.81ms +[2025-08-22 22:51:16] [Rank 0] step:9781/10000 train_time:956636ms step_avg:97.81ms +[2025-08-22 22:51:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:51:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:51:32] [Rank 0] PRINT: step:9800/10000 val_loss:3.4351 svd_entropy: attn_qk:H=0.7747,top10E=0.24,eRank=199.7,q75/q25=52.64 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=619.1,q75/q25=2.86 vo_prod:H=0.6940,top10E=0.10,eRank=231.3,q75/q25=inf train_time:958730ms step_avg:97.83ms +[2025-08-22 22:51:32] [Rank 0] PRINT: step:9800/10000 val_loss:3.4351 svd_entropy: attn_qk:H=0.7747,top10E=0.24,eRank=199.7,q75/q25=52.64 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=619.1,q75/q25=2.86 vo_prod:H=0.6940,top10E=0.10,eRank=231.3,q75/q25=inf train_time:958730ms step_avg:97.83ms +[2025-08-22 22:51:32] [Rank 0] step:9801/10000 train_time:958752ms step_avg:97.82ms +[2025-08-22 22:51:32] [Rank 0] step:9801/10000 train_time:958752ms step_avg:97.82ms +[2025-08-22 22:51:34] [Rank 0] step:9821/10000 train_time:960826ms step_avg:97.83ms +[2025-08-22 22:51:34] [Rank 0] step:9821/10000 train_time:960826ms step_avg:97.83ms +[2025-08-22 22:51:36] [Rank 0] step:9841/10000 train_time:962905ms step_avg:97.85ms +[2025-08-22 22:51:36] [Rank 0] step:9841/10000 train_time:962905ms step_avg:97.85ms +[2025-08-22 22:51:38] [Rank 0] step:9861/10000 train_time:964966ms step_avg:97.86ms +[2025-08-22 22:51:38] [Rank 0] step:9861/10000 train_time:964966ms step_avg:97.86ms +[2025-08-22 22:51:41] [Rank 0] step:9881/10000 train_time:967030ms step_avg:97.87ms +[2025-08-22 22:51:41] [Rank 0] step:9881/10000 train_time:967030ms step_avg:97.87ms +[2025-08-22 22:51:43] [Rank 0] step:9901/10000 train_time:969113ms step_avg:97.88ms +[2025-08-22 22:51:43] [Rank 0] step:9901/10000 train_time:969113ms step_avg:97.88ms +[2025-08-22 22:51:45] [Rank 0] step:9921/10000 train_time:971181ms step_avg:97.89ms +[2025-08-22 22:51:45] [Rank 0] step:9921/10000 train_time:971181ms step_avg:97.89ms +[2025-08-22 22:51:47] [Rank 0] step:9941/10000 train_time:973262ms step_avg:97.90ms +[2025-08-22 22:51:47] [Rank 0] step:9941/10000 train_time:973262ms step_avg:97.90ms +[2025-08-22 22:51:49] [Rank 0] step:9961/10000 train_time:975327ms step_avg:97.91ms +[2025-08-22 22:51:49] [Rank 0] step:9961/10000 train_time:975327ms step_avg:97.91ms +[2025-08-22 22:51:51] [Rank 0] step:9981/10000 train_time:977408ms step_avg:97.93ms +[2025-08-22 22:51:51] [Rank 0] step:9981/10000 train_time:977408ms step_avg:97.93ms +[2025-08-22 22:51:53] [Rank 0] step:10000/10000 train_time:979383ms step_avg:97.94ms +[2025-08-22 22:51:53] [Rank 0] step:10000/10000 train_time:979383ms step_avg:97.94ms +[2025-08-22 22:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:52:07] [Rank 0] PRINT: step:10000/10000 val_loss:3.4280 svd_entropy: attn_qk:H=0.7747,top10E=0.24,eRank=199.8,q75/q25=52.62 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=619.1,q75/q25=2.86 vo_prod:H=0.6940,top10E=0.10,eRank=231.3,q75/q25=inf train_time:979494ms step_avg:97.95ms +[2025-08-22 22:52:07] [Rank 0] PRINT: step:10000/10000 val_loss:3.4280 svd_entropy: attn_qk:H=0.7747,top10E=0.24,eRank=199.8,q75/q25=52.62 attn_vo:H=0.8350,top10E=0.06,eRank=406.9,q75/q25=inf mlp_w1:H=0.9724,top10E=0.04,eRank=639.3,q75/q25=2.76 mlp_w2:H=0.9675,top10E=0.05,eRank=619.1,q75/q25=2.86 vo_prod:H=0.6940,top10E=0.10,eRank=231.3,q75/q25=inf train_time:979494ms step_avg:97.95ms +[2025-08-22 22:52:07] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 22:52:07 2025 --- +[2025-08-22 22:52:07] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 22:52:07 2025 --- +[2025-08-22 22:52:07] [Rank 0] PRINT: Peak memory allocated: 11123 MiB reserved: 16956 MiB +[2025-08-22 22:52:07] [Rank 0] PRINT: Peak memory allocated: 11123 MiB reserved: 16956 MiB diff --git a/logs_svd_gated/mode_8_param_gated_seed_41/config.json b/logs_svd_gated/mode_8_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..4a25424f11f7b4f4d63cf9cac2dd92e5bbc80735 --- /dev/null +++ b/logs_svd_gated/mode_8_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 8, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "bed1992c-c1c4-4415-888e-dfc621b2308d", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_8_param_gated_seed_41/training_log_bed1992c-c1c4-4415-888e-dfc621b2308d.txt b/logs_svd_gated/mode_8_param_gated_seed_41/training_log_bed1992c-c1c4-4415-888e-dfc621b2308d.txt new file mode 100644 index 0000000000000000000000000000000000000000..fe560cccd00b4c292e003a6c86213e6921b56a92 --- /dev/null +++ b/logs_svd_gated/mode_8_param_gated_seed_41/training_log_bed1992c-c1c4-4415-888e-dfc621b2308d.txt @@ -0,0 +1,2926 @@ +[2025-08-22 12:25:01] [Rank 0] PRINT: --- Script Start: Fri Aug 22 12:25:01 2025 --- +[2025-08-22 12:25:01] [Rank 0] PRINT: --- Script Start: Fri Aug 22 12:25:01 2025 --- +[2025-08-22 12:25:01] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=8, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 12:25:01] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=8, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 12:25:01] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 12:25:01] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 12:25:01] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 12:25:01] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 12:25:01] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_8_param_gated_seed_41 +[2025-08-22 12:25:01] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_8_param_gated_seed_41 +[2025-08-22 12:25:01] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 12:25:01] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 12:25:01] [Rank 0] PRINT: Constructing model... +[2025-08-22 12:25:01] [Rank 0] PRINT: Constructing model... +[2025-08-22 12:25:03] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 12:25:03] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 12:25:03] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 12:25:03] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 12:25:03] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 12:25:03] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 12:25:03] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-08-22 12:25:03] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-08-22 12:25:03] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 12:25:03] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 12:25:03] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 12:25:03] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 12:25:03] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-08-22 12:25:03] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-08-22 12:25:03] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 12:25:03] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 12:25:03] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 12:25:03] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 12:25:03] [Rank 0] PRINT: Starting warmup... +[2025-08-22 12:25:03] [Rank 0] PRINT: Starting warmup... +[2025-08-22 12:25:48] [Rank 0] PRINT: Warmup complete. +[2025-08-22 12:25:48] [Rank 0] PRINT: Warmup complete. +[2025-08-22 12:25:49] [Rank 0] PRINT: Starting training... +[2025-08-22 12:25:49] [Rank 0] PRINT: Starting training... +[2025-08-22 12:25:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:25:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:26:06] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 12:26:06] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 12:26:08] [Rank 0] step:21/10000 train_time:1768ms step_avg:84.20ms +[2025-08-22 12:26:08] [Rank 0] step:21/10000 train_time:1768ms step_avg:84.20ms +[2025-08-22 12:26:10] [Rank 0] step:41/10000 train_time:3553ms step_avg:86.65ms +[2025-08-22 12:26:10] [Rank 0] step:41/10000 train_time:3553ms step_avg:86.65ms +[2025-08-22 12:26:12] [Rank 0] step:61/10000 train_time:5394ms step_avg:88.42ms +[2025-08-22 12:26:12] [Rank 0] step:61/10000 train_time:5394ms step_avg:88.42ms +[2025-08-22 12:26:13] [Rank 0] step:81/10000 train_time:7131ms step_avg:88.04ms +[2025-08-22 12:26:13] [Rank 0] step:81/10000 train_time:7131ms step_avg:88.04ms +[2025-08-22 12:26:15] [Rank 0] step:101/10000 train_time:8870ms step_avg:87.82ms +[2025-08-22 12:26:15] [Rank 0] step:101/10000 train_time:8870ms step_avg:87.82ms +[2025-08-22 12:26:17] [Rank 0] step:121/10000 train_time:10608ms step_avg:87.67ms +[2025-08-22 12:26:17] [Rank 0] step:121/10000 train_time:10608ms step_avg:87.67ms +[2025-08-22 12:26:18] [Rank 0] step:141/10000 train_time:12348ms step_avg:87.58ms +[2025-08-22 12:26:18] [Rank 0] step:141/10000 train_time:12348ms step_avg:87.58ms +[2025-08-22 12:26:20] [Rank 0] step:161/10000 train_time:14090ms step_avg:87.51ms +[2025-08-22 12:26:20] [Rank 0] step:161/10000 train_time:14090ms step_avg:87.51ms +[2025-08-22 12:26:22] [Rank 0] step:181/10000 train_time:15831ms step_avg:87.46ms +[2025-08-22 12:26:22] [Rank 0] step:181/10000 train_time:15831ms step_avg:87.46ms +[2025-08-22 12:26:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:26:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:26:37] [Rank 0] PRINT: step:200/10000 val_loss:5.8464 svd_entropy: attn_qk:H=0.7115,top10E=0.39,eRank=153.3,q75/q25=18.86 attn_vo:H=0.7176,top10E=0.17,eRank=193.1,q75/q25=inf mlp_w1:H=0.6470,top10E=0.50,eRank=83.2,q75/q25=7.74 mlp_w2:H=0.8696,top10E=0.11,eRank=330.5,q75/q25=13.88 vo_prod:H=0.4937,top10E=0.39,eRank=54.8,q75/q25=inf train_time:17585ms step_avg:87.92ms +[2025-08-22 12:26:37] [Rank 0] PRINT: step:200/10000 val_loss:5.8464 svd_entropy: attn_qk:H=0.7115,top10E=0.39,eRank=153.3,q75/q25=18.86 attn_vo:H=0.7176,top10E=0.17,eRank=193.1,q75/q25=inf mlp_w1:H=0.6470,top10E=0.50,eRank=83.2,q75/q25=7.74 mlp_w2:H=0.8696,top10E=0.11,eRank=330.5,q75/q25=13.88 vo_prod:H=0.4937,top10E=0.39,eRank=54.8,q75/q25=inf train_time:17585ms step_avg:87.92ms +[2025-08-22 12:26:37] [Rank 0] step:201/10000 train_time:17607ms step_avg:87.60ms +[2025-08-22 12:26:37] [Rank 0] step:201/10000 train_time:17607ms step_avg:87.60ms +[2025-08-22 12:26:39] [Rank 0] step:221/10000 train_time:19331ms step_avg:87.47ms +[2025-08-22 12:26:39] [Rank 0] step:221/10000 train_time:19331ms step_avg:87.47ms +[2025-08-22 12:26:41] [Rank 0] step:241/10000 train_time:21066ms step_avg:87.41ms +[2025-08-22 12:26:41] [Rank 0] step:241/10000 train_time:21066ms step_avg:87.41ms +[2025-08-22 12:26:43] [Rank 0] step:261/10000 train_time:22801ms step_avg:87.36ms +[2025-08-22 12:26:43] [Rank 0] step:261/10000 train_time:22801ms step_avg:87.36ms +[2025-08-22 12:26:44] [Rank 0] step:281/10000 train_time:24536ms step_avg:87.32ms +[2025-08-22 12:26:44] [Rank 0] step:281/10000 train_time:24536ms step_avg:87.32ms +[2025-08-22 12:26:46] [Rank 0] step:301/10000 train_time:26272ms step_avg:87.28ms +[2025-08-22 12:26:46] [Rank 0] step:301/10000 train_time:26272ms step_avg:87.28ms +[2025-08-22 12:26:48] [Rank 0] step:321/10000 train_time:28008ms step_avg:87.25ms +[2025-08-22 12:26:48] [Rank 0] step:321/10000 train_time:28008ms step_avg:87.25ms +[2025-08-22 12:26:50] [Rank 0] step:341/10000 train_time:29744ms step_avg:87.23ms +[2025-08-22 12:26:50] [Rank 0] step:341/10000 train_time:29744ms step_avg:87.23ms +[2025-08-22 12:26:51] [Rank 0] step:361/10000 train_time:31483ms step_avg:87.21ms +[2025-08-22 12:26:51] [Rank 0] step:361/10000 train_time:31483ms step_avg:87.21ms +[2025-08-22 12:26:53] [Rank 0] step:381/10000 train_time:33219ms step_avg:87.19ms +[2025-08-22 12:26:53] [Rank 0] step:381/10000 train_time:33219ms step_avg:87.19ms +[2025-08-22 12:26:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:26:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:27:08] [Rank 0] PRINT: step:400/10000 val_loss:5.3363 svd_entropy: attn_qk:H=0.7234,top10E=0.34,eRank=159.2,q75/q25=29.42 attn_vo:H=0.7432,top10E=0.13,eRank=224.1,q75/q25=inf mlp_w1:H=0.8057,top10E=0.27,eRank=214.2,q75/q25=6.93 mlp_w2:H=0.9565,top10E=0.05,eRank=577.2,q75/q25=3.64 vo_prod:H=0.5489,top10E=0.27,eRank=79.6,q75/q25=inf train_time:34969ms step_avg:87.42ms +[2025-08-22 12:27:08] [Rank 0] PRINT: step:400/10000 val_loss:5.3363 svd_entropy: attn_qk:H=0.7234,top10E=0.34,eRank=159.2,q75/q25=29.42 attn_vo:H=0.7432,top10E=0.13,eRank=224.1,q75/q25=inf mlp_w1:H=0.8057,top10E=0.27,eRank=214.2,q75/q25=6.93 mlp_w2:H=0.9565,top10E=0.05,eRank=577.2,q75/q25=3.64 vo_prod:H=0.5489,top10E=0.27,eRank=79.6,q75/q25=inf train_time:34969ms step_avg:87.42ms +[2025-08-22 12:27:08] [Rank 0] step:401/10000 train_time:34989ms step_avg:87.25ms +[2025-08-22 12:27:08] [Rank 0] step:401/10000 train_time:34989ms step_avg:87.25ms +[2025-08-22 12:27:10] [Rank 0] step:421/10000 train_time:36710ms step_avg:87.20ms +[2025-08-22 12:27:10] [Rank 0] step:421/10000 train_time:36710ms step_avg:87.20ms +[2025-08-22 12:27:12] [Rank 0] step:441/10000 train_time:38440ms step_avg:87.16ms +[2025-08-22 12:27:12] [Rank 0] step:441/10000 train_time:38440ms step_avg:87.16ms +[2025-08-22 12:27:14] [Rank 0] step:461/10000 train_time:40225ms step_avg:87.26ms +[2025-08-22 12:27:14] [Rank 0] step:461/10000 train_time:40225ms step_avg:87.26ms +[2025-08-22 12:27:15] [Rank 0] step:481/10000 train_time:41972ms step_avg:87.26ms +[2025-08-22 12:27:15] [Rank 0] step:481/10000 train_time:41972ms step_avg:87.26ms +[2025-08-22 12:27:17] [Rank 0] step:501/10000 train_time:43703ms step_avg:87.23ms +[2025-08-22 12:27:17] [Rank 0] step:501/10000 train_time:43703ms step_avg:87.23ms +[2025-08-22 12:27:19] [Rank 0] step:521/10000 train_time:45436ms step_avg:87.21ms +[2025-08-22 12:27:19] [Rank 0] step:521/10000 train_time:45436ms step_avg:87.21ms +[2025-08-22 12:27:21] [Rank 0] step:541/10000 train_time:47172ms step_avg:87.19ms +[2025-08-22 12:27:21] [Rank 0] step:541/10000 train_time:47172ms step_avg:87.19ms +[2025-08-22 12:27:22] [Rank 0] step:561/10000 train_time:48908ms step_avg:87.18ms +[2025-08-22 12:27:22] [Rank 0] step:561/10000 train_time:48908ms step_avg:87.18ms +[2025-08-22 12:27:24] [Rank 0] step:581/10000 train_time:50643ms step_avg:87.17ms +[2025-08-22 12:27:24] [Rank 0] step:581/10000 train_time:50643ms step_avg:87.17ms +[2025-08-22 12:27:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:27:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:27:39] [Rank 0] PRINT: step:600/10000 val_loss:5.0772 svd_entropy: attn_qk:H=0.7360,top10E=0.31,eRank=167.1,q75/q25=35.82 attn_vo:H=0.7773,top10E=0.10,eRank=275.3,q75/q25=inf mlp_w1:H=0.8447,top10E=0.21,eRank=276.4,q75/q25=5.67 mlp_w2:H=0.9670,top10E=0.04,eRank=617.4,q75/q25=3.04 vo_prod:H=0.5950,top10E=0.20,eRank=108.9,q75/q25=inf train_time:52393ms step_avg:87.32ms +[2025-08-22 12:27:39] [Rank 0] PRINT: step:600/10000 val_loss:5.0772 svd_entropy: attn_qk:H=0.7360,top10E=0.31,eRank=167.1,q75/q25=35.82 attn_vo:H=0.7773,top10E=0.10,eRank=275.3,q75/q25=inf mlp_w1:H=0.8447,top10E=0.21,eRank=276.4,q75/q25=5.67 mlp_w2:H=0.9670,top10E=0.04,eRank=617.4,q75/q25=3.04 vo_prod:H=0.5950,top10E=0.20,eRank=108.9,q75/q25=inf train_time:52393ms step_avg:87.32ms +[2025-08-22 12:27:40] [Rank 0] step:601/10000 train_time:52415ms step_avg:87.21ms +[2025-08-22 12:27:40] [Rank 0] step:601/10000 train_time:52415ms step_avg:87.21ms +[2025-08-22 12:27:41] [Rank 0] step:621/10000 train_time:54138ms step_avg:87.18ms +[2025-08-22 12:27:41] [Rank 0] step:621/10000 train_time:54138ms step_avg:87.18ms +[2025-08-22 12:27:43] [Rank 0] step:641/10000 train_time:55872ms step_avg:87.16ms +[2025-08-22 12:27:43] [Rank 0] step:641/10000 train_time:55872ms step_avg:87.16ms +[2025-08-22 12:27:45] [Rank 0] step:661/10000 train_time:57608ms step_avg:87.15ms +[2025-08-22 12:27:45] [Rank 0] step:661/10000 train_time:57608ms step_avg:87.15ms +[2025-08-22 12:27:46] [Rank 0] step:681/10000 train_time:59344ms step_avg:87.14ms +[2025-08-22 12:27:46] [Rank 0] step:681/10000 train_time:59344ms step_avg:87.14ms +[2025-08-22 12:27:48] [Rank 0] step:701/10000 train_time:61081ms step_avg:87.13ms +[2025-08-22 12:27:48] [Rank 0] step:701/10000 train_time:61081ms step_avg:87.13ms +[2025-08-22 12:27:50] [Rank 0] step:721/10000 train_time:62817ms step_avg:87.13ms +[2025-08-22 12:27:50] [Rank 0] step:721/10000 train_time:62817ms step_avg:87.13ms +[2025-08-22 12:27:52] [Rank 0] step:741/10000 train_time:64554ms step_avg:87.12ms +[2025-08-22 12:27:52] [Rank 0] step:741/10000 train_time:64554ms step_avg:87.12ms +[2025-08-22 12:27:53] [Rank 0] step:761/10000 train_time:66306ms step_avg:87.13ms +[2025-08-22 12:27:53] [Rank 0] step:761/10000 train_time:66306ms step_avg:87.13ms +[2025-08-22 12:27:55] [Rank 0] step:781/10000 train_time:68058ms step_avg:87.14ms +[2025-08-22 12:27:55] [Rank 0] step:781/10000 train_time:68058ms step_avg:87.14ms +[2025-08-22 12:27:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:27:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:28:11] [Rank 0] PRINT: step:800/10000 val_loss:4.8423 svd_entropy: attn_qk:H=0.7454,top10E=0.29,eRank=173.7,q75/q25=40.46 attn_vo:H=0.7945,top10E=0.08,eRank=307.5,q75/q25=inf mlp_w1:H=0.8601,top10E=0.19,eRank=306.1,q75/q25=5.29 mlp_w2:H=0.9683,top10E=0.04,eRank=622.6,q75/q25=2.98 vo_prod:H=0.6214,top10E=0.17,eRank=131.8,q75/q25=inf train_time:69822ms step_avg:87.28ms +[2025-08-22 12:28:11] [Rank 0] PRINT: step:800/10000 val_loss:4.8423 svd_entropy: attn_qk:H=0.7454,top10E=0.29,eRank=173.7,q75/q25=40.46 attn_vo:H=0.7945,top10E=0.08,eRank=307.5,q75/q25=inf mlp_w1:H=0.8601,top10E=0.19,eRank=306.1,q75/q25=5.29 mlp_w2:H=0.9683,top10E=0.04,eRank=622.6,q75/q25=2.98 vo_prod:H=0.6214,top10E=0.17,eRank=131.8,q75/q25=inf train_time:69822ms step_avg:87.28ms +[2025-08-22 12:28:11] [Rank 0] step:801/10000 train_time:69842ms step_avg:87.19ms +[2025-08-22 12:28:11] [Rank 0] step:801/10000 train_time:69842ms step_avg:87.19ms +[2025-08-22 12:28:12] [Rank 0] step:821/10000 train_time:71577ms step_avg:87.18ms +[2025-08-22 12:28:12] [Rank 0] step:821/10000 train_time:71577ms step_avg:87.18ms +[2025-08-22 12:28:14] [Rank 0] step:841/10000 train_time:73323ms step_avg:87.19ms +[2025-08-22 12:28:14] [Rank 0] step:841/10000 train_time:73323ms step_avg:87.19ms +[2025-08-22 12:28:16] [Rank 0] step:861/10000 train_time:75147ms step_avg:87.28ms +[2025-08-22 12:28:16] [Rank 0] step:861/10000 train_time:75147ms step_avg:87.28ms +[2025-08-22 12:28:18] [Rank 0] step:881/10000 train_time:76957ms step_avg:87.35ms +[2025-08-22 12:28:18] [Rank 0] step:881/10000 train_time:76957ms step_avg:87.35ms +[2025-08-22 12:28:20] [Rank 0] step:901/10000 train_time:78705ms step_avg:87.35ms +[2025-08-22 12:28:20] [Rank 0] step:901/10000 train_time:78705ms step_avg:87.35ms +[2025-08-22 12:28:21] [Rank 0] step:921/10000 train_time:80455ms step_avg:87.36ms +[2025-08-22 12:28:21] [Rank 0] step:921/10000 train_time:80455ms step_avg:87.36ms +[2025-08-22 12:28:23] [Rank 0] step:941/10000 train_time:82204ms step_avg:87.36ms +[2025-08-22 12:28:23] [Rank 0] step:941/10000 train_time:82204ms step_avg:87.36ms +[2025-08-22 12:28:25] [Rank 0] step:961/10000 train_time:83954ms step_avg:87.36ms +[2025-08-22 12:28:25] [Rank 0] step:961/10000 train_time:83954ms step_avg:87.36ms +[2025-08-22 12:28:27] [Rank 0] step:981/10000 train_time:85704ms step_avg:87.36ms +[2025-08-22 12:28:27] [Rank 0] step:981/10000 train_time:85704ms step_avg:87.36ms +[2025-08-22 12:28:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:28:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:28:42] [Rank 0] PRINT: step:1000/10000 val_loss:4.6626 svd_entropy: attn_qk:H=0.7528,top10E=0.28,eRank=179.3,q75/q25=42.28 attn_vo:H=0.8055,top10E=0.08,eRank=330.8,q75/q25=inf mlp_w1:H=0.8704,top10E=0.18,eRank=327.6,q75/q25=5.12 mlp_w2:H=0.9688,top10E=0.04,eRank=624.5,q75/q25=2.96 vo_prod:H=0.6398,top10E=0.15,eRank=151.1,q75/q25=inf train_time:87466ms step_avg:87.47ms +[2025-08-22 12:28:42] [Rank 0] PRINT: step:1000/10000 val_loss:4.6626 svd_entropy: attn_qk:H=0.7528,top10E=0.28,eRank=179.3,q75/q25=42.28 attn_vo:H=0.8055,top10E=0.08,eRank=330.8,q75/q25=inf mlp_w1:H=0.8704,top10E=0.18,eRank=327.6,q75/q25=5.12 mlp_w2:H=0.9688,top10E=0.04,eRank=624.5,q75/q25=2.96 vo_prod:H=0.6398,top10E=0.15,eRank=151.1,q75/q25=inf train_time:87466ms step_avg:87.47ms +[2025-08-22 12:28:42] [Rank 0] step:1001/10000 train_time:87486ms step_avg:87.40ms +[2025-08-22 12:28:42] [Rank 0] step:1001/10000 train_time:87486ms step_avg:87.40ms +[2025-08-22 12:28:44] [Rank 0] step:1021/10000 train_time:89237ms step_avg:87.40ms +[2025-08-22 12:28:44] [Rank 0] step:1021/10000 train_time:89237ms step_avg:87.40ms +[2025-08-22 12:28:45] [Rank 0] step:1041/10000 train_time:90981ms step_avg:87.40ms +[2025-08-22 12:28:45] [Rank 0] step:1041/10000 train_time:90981ms step_avg:87.40ms +[2025-08-22 12:28:47] [Rank 0] step:1061/10000 train_time:92726ms step_avg:87.40ms +[2025-08-22 12:28:47] [Rank 0] step:1061/10000 train_time:92726ms step_avg:87.40ms +[2025-08-22 12:28:49] [Rank 0] step:1081/10000 train_time:94474ms step_avg:87.39ms +[2025-08-22 12:28:49] [Rank 0] step:1081/10000 train_time:94474ms step_avg:87.39ms +[2025-08-22 12:28:51] [Rank 0] step:1101/10000 train_time:96221ms step_avg:87.39ms +[2025-08-22 12:28:51] [Rank 0] step:1101/10000 train_time:96221ms step_avg:87.39ms +[2025-08-22 12:28:52] [Rank 0] step:1121/10000 train_time:97968ms step_avg:87.39ms +[2025-08-22 12:28:52] [Rank 0] step:1121/10000 train_time:97968ms step_avg:87.39ms +[2025-08-22 12:28:54] [Rank 0] step:1141/10000 train_time:99715ms step_avg:87.39ms +[2025-08-22 12:28:54] [Rank 0] step:1141/10000 train_time:99715ms step_avg:87.39ms +[2025-08-22 12:28:56] [Rank 0] step:1161/10000 train_time:101464ms step_avg:87.39ms +[2025-08-22 12:28:56] [Rank 0] step:1161/10000 train_time:101464ms step_avg:87.39ms +[2025-08-22 12:28:58] [Rank 0] step:1181/10000 train_time:103213ms step_avg:87.39ms +[2025-08-22 12:28:58] [Rank 0] step:1181/10000 train_time:103213ms step_avg:87.39ms +[2025-08-22 12:28:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:28:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:29:13] [Rank 0] PRINT: step:1200/10000 val_loss:4.5047 svd_entropy: attn_qk:H=0.7580,top10E=0.27,eRank=183.5,q75/q25=44.34 attn_vo:H=0.8120,top10E=0.07,eRank=345.8,q75/q25=inf mlp_w1:H=0.8777,top10E=0.17,eRank=344.0,q75/q25=5.02 mlp_w2:H=0.9690,top10E=0.04,eRank=625.2,q75/q25=2.94 vo_prod:H=0.6503,top10E=0.14,eRank=163.4,q75/q25=inf train_time:104975ms step_avg:87.48ms +[2025-08-22 12:29:13] [Rank 0] PRINT: step:1200/10000 val_loss:4.5047 svd_entropy: attn_qk:H=0.7580,top10E=0.27,eRank=183.5,q75/q25=44.34 attn_vo:H=0.8120,top10E=0.07,eRank=345.8,q75/q25=inf mlp_w1:H=0.8777,top10E=0.17,eRank=344.0,q75/q25=5.02 mlp_w2:H=0.9690,top10E=0.04,eRank=625.2,q75/q25=2.94 vo_prod:H=0.6503,top10E=0.14,eRank=163.4,q75/q25=inf train_time:104975ms step_avg:87.48ms +[2025-08-22 12:29:13] [Rank 0] step:1201/10000 train_time:104996ms step_avg:87.42ms +[2025-08-22 12:29:13] [Rank 0] step:1201/10000 train_time:104996ms step_avg:87.42ms +[2025-08-22 12:29:15] [Rank 0] step:1221/10000 train_time:106744ms step_avg:87.42ms +[2025-08-22 12:29:15] [Rank 0] step:1221/10000 train_time:106744ms step_avg:87.42ms +[2025-08-22 12:29:17] [Rank 0] step:1241/10000 train_time:108489ms step_avg:87.42ms +[2025-08-22 12:29:17] [Rank 0] step:1241/10000 train_time:108489ms step_avg:87.42ms +[2025-08-22 12:29:18] [Rank 0] step:1261/10000 train_time:110235ms step_avg:87.42ms +[2025-08-22 12:29:18] [Rank 0] step:1261/10000 train_time:110235ms step_avg:87.42ms +[2025-08-22 12:29:20] [Rank 0] step:1281/10000 train_time:112025ms step_avg:87.45ms +[2025-08-22 12:29:20] [Rank 0] step:1281/10000 train_time:112025ms step_avg:87.45ms +[2025-08-22 12:29:22] [Rank 0] step:1301/10000 train_time:113879ms step_avg:87.53ms +[2025-08-22 12:29:22] [Rank 0] step:1301/10000 train_time:113879ms step_avg:87.53ms +[2025-08-22 12:29:24] [Rank 0] step:1321/10000 train_time:115624ms step_avg:87.53ms +[2025-08-22 12:29:24] [Rank 0] step:1321/10000 train_time:115624ms step_avg:87.53ms +[2025-08-22 12:29:26] [Rank 0] step:1341/10000 train_time:117371ms step_avg:87.52ms +[2025-08-22 12:29:26] [Rank 0] step:1341/10000 train_time:117371ms step_avg:87.52ms +[2025-08-22 12:29:27] [Rank 0] step:1361/10000 train_time:119119ms step_avg:87.52ms +[2025-08-22 12:29:27] [Rank 0] step:1361/10000 train_time:119119ms step_avg:87.52ms +[2025-08-22 12:29:29] [Rank 0] step:1381/10000 train_time:120867ms step_avg:87.52ms +[2025-08-22 12:29:29] [Rank 0] step:1381/10000 train_time:120867ms step_avg:87.52ms +[2025-08-22 12:29:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:29:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:29:44] [Rank 0] PRINT: step:1400/10000 val_loss:4.4225 svd_entropy: attn_qk:H=0.7622,top10E=0.27,eRank=187.0,q75/q25=45.91 attn_vo:H=0.8160,top10E=0.07,eRank=355.5,q75/q25=inf mlp_w1:H=0.8833,top10E=0.16,eRank=357.0,q75/q25=4.94 mlp_w2:H=0.9691,top10E=0.05,eRank=625.8,q75/q25=2.92 vo_prod:H=0.6565,top10E=0.13,eRank=171.4,q75/q25=inf train_time:122629ms step_avg:87.59ms +[2025-08-22 12:29:44] [Rank 0] PRINT: step:1400/10000 val_loss:4.4225 svd_entropy: attn_qk:H=0.7622,top10E=0.27,eRank=187.0,q75/q25=45.91 attn_vo:H=0.8160,top10E=0.07,eRank=355.5,q75/q25=inf mlp_w1:H=0.8833,top10E=0.16,eRank=357.0,q75/q25=4.94 mlp_w2:H=0.9691,top10E=0.05,eRank=625.8,q75/q25=2.92 vo_prod:H=0.6565,top10E=0.13,eRank=171.4,q75/q25=inf train_time:122629ms step_avg:87.59ms +[2025-08-22 12:29:45] [Rank 0] step:1401/10000 train_time:122647ms step_avg:87.54ms +[2025-08-22 12:29:45] [Rank 0] step:1401/10000 train_time:122647ms step_avg:87.54ms +[2025-08-22 12:29:46] [Rank 0] step:1421/10000 train_time:124387ms step_avg:87.53ms +[2025-08-22 12:29:46] [Rank 0] step:1421/10000 train_time:124387ms step_avg:87.53ms +[2025-08-22 12:29:48] [Rank 0] step:1441/10000 train_time:126134ms step_avg:87.53ms +[2025-08-22 12:29:48] [Rank 0] step:1441/10000 train_time:126134ms step_avg:87.53ms +[2025-08-22 12:29:50] [Rank 0] step:1461/10000 train_time:127883ms step_avg:87.53ms +[2025-08-22 12:29:50] [Rank 0] step:1461/10000 train_time:127883ms step_avg:87.53ms +[2025-08-22 12:29:52] [Rank 0] step:1481/10000 train_time:129629ms step_avg:87.53ms +[2025-08-22 12:29:52] [Rank 0] step:1481/10000 train_time:129629ms step_avg:87.53ms +[2025-08-22 12:29:53] [Rank 0] step:1501/10000 train_time:131387ms step_avg:87.53ms +[2025-08-22 12:29:53] [Rank 0] step:1501/10000 train_time:131387ms step_avg:87.53ms +[2025-08-22 12:29:55] [Rank 0] step:1521/10000 train_time:133144ms step_avg:87.54ms +[2025-08-22 12:29:55] [Rank 0] step:1521/10000 train_time:133144ms step_avg:87.54ms +[2025-08-22 12:29:57] [Rank 0] step:1541/10000 train_time:134904ms step_avg:87.54ms +[2025-08-22 12:29:57] [Rank 0] step:1541/10000 train_time:134904ms step_avg:87.54ms +[2025-08-22 12:29:59] [Rank 0] step:1561/10000 train_time:136664ms step_avg:87.55ms +[2025-08-22 12:29:59] [Rank 0] step:1561/10000 train_time:136664ms step_avg:87.55ms +[2025-08-22 12:30:00] [Rank 0] step:1581/10000 train_time:138426ms step_avg:87.56ms +[2025-08-22 12:30:00] [Rank 0] step:1581/10000 train_time:138426ms step_avg:87.56ms +[2025-08-22 12:30:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:30:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:30:16] [Rank 0] PRINT: step:1600/10000 val_loss:4.3244 svd_entropy: attn_qk:H=0.7656,top10E=0.26,eRank=189.9,q75/q25=46.89 attn_vo:H=0.8188,top10E=0.07,eRank=362.4,q75/q25=inf mlp_w1:H=0.8876,top10E=0.16,eRank=367.6,q75/q25=4.88 mlp_w2:H=0.9692,top10E=0.05,eRank=626.0,q75/q25=2.92 vo_prod:H=0.6611,top10E=0.13,eRank=177.6,q75/q25=inf train_time:140199ms step_avg:87.62ms +[2025-08-22 12:30:16] [Rank 0] PRINT: step:1600/10000 val_loss:4.3244 svd_entropy: attn_qk:H=0.7656,top10E=0.26,eRank=189.9,q75/q25=46.89 attn_vo:H=0.8188,top10E=0.07,eRank=362.4,q75/q25=inf mlp_w1:H=0.8876,top10E=0.16,eRank=367.6,q75/q25=4.88 mlp_w2:H=0.9692,top10E=0.05,eRank=626.0,q75/q25=2.92 vo_prod:H=0.6611,top10E=0.13,eRank=177.6,q75/q25=inf train_time:140199ms step_avg:87.62ms +[2025-08-22 12:30:16] [Rank 0] step:1601/10000 train_time:140220ms step_avg:87.58ms +[2025-08-22 12:30:16] [Rank 0] step:1601/10000 train_time:140220ms step_avg:87.58ms +[2025-08-22 12:30:18] [Rank 0] step:1621/10000 train_time:141969ms step_avg:87.58ms +[2025-08-22 12:30:18] [Rank 0] step:1621/10000 train_time:141969ms step_avg:87.58ms +[2025-08-22 12:30:20] [Rank 0] step:1641/10000 train_time:143726ms step_avg:87.58ms +[2025-08-22 12:30:20] [Rank 0] step:1641/10000 train_time:143726ms step_avg:87.58ms +[2025-08-22 12:30:21] [Rank 0] step:1661/10000 train_time:145484ms step_avg:87.59ms +[2025-08-22 12:30:21] [Rank 0] step:1661/10000 train_time:145484ms step_avg:87.59ms +[2025-08-22 12:30:23] [Rank 0] step:1681/10000 train_time:147325ms step_avg:87.64ms +[2025-08-22 12:30:23] [Rank 0] step:1681/10000 train_time:147325ms step_avg:87.64ms +[2025-08-22 12:30:25] [Rank 0] step:1701/10000 train_time:149128ms step_avg:87.67ms +[2025-08-22 12:30:25] [Rank 0] step:1701/10000 train_time:149128ms step_avg:87.67ms +[2025-08-22 12:30:27] [Rank 0] step:1721/10000 train_time:150887ms step_avg:87.67ms +[2025-08-22 12:30:27] [Rank 0] step:1721/10000 train_time:150887ms step_avg:87.67ms +[2025-08-22 12:30:29] [Rank 0] step:1741/10000 train_time:152646ms step_avg:87.68ms +[2025-08-22 12:30:29] [Rank 0] step:1741/10000 train_time:152646ms step_avg:87.68ms +[2025-08-22 12:30:30] [Rank 0] step:1761/10000 train_time:154406ms step_avg:87.68ms +[2025-08-22 12:30:30] [Rank 0] step:1761/10000 train_time:154406ms step_avg:87.68ms +[2025-08-22 12:30:32] [Rank 0] step:1781/10000 train_time:156167ms step_avg:87.68ms +[2025-08-22 12:30:32] [Rank 0] step:1781/10000 train_time:156167ms step_avg:87.68ms +[2025-08-22 12:30:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:30:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:30:48] [Rank 0] PRINT: step:1800/10000 val_loss:4.2510 svd_entropy: attn_qk:H=0.7684,top10E=0.26,eRank=192.4,q75/q25=47.45 attn_vo:H=0.8209,top10E=0.06,eRank=367.7,q75/q25=inf mlp_w1:H=0.8912,top10E=0.15,eRank=376.6,q75/q25=4.81 mlp_w2:H=0.9692,top10E=0.05,eRank=626.1,q75/q25=2.91 vo_prod:H=0.6646,top10E=0.12,eRank=182.5,q75/q25=inf train_time:157939ms step_avg:87.74ms +[2025-08-22 12:30:48] [Rank 0] PRINT: step:1800/10000 val_loss:4.2510 svd_entropy: attn_qk:H=0.7684,top10E=0.26,eRank=192.4,q75/q25=47.45 attn_vo:H=0.8209,top10E=0.06,eRank=367.7,q75/q25=inf mlp_w1:H=0.8912,top10E=0.15,eRank=376.6,q75/q25=4.81 mlp_w2:H=0.9692,top10E=0.05,eRank=626.1,q75/q25=2.91 vo_prod:H=0.6646,top10E=0.12,eRank=182.5,q75/q25=inf train_time:157939ms step_avg:87.74ms +[2025-08-22 12:30:48] [Rank 0] step:1801/10000 train_time:157959ms step_avg:87.71ms +[2025-08-22 12:30:48] [Rank 0] step:1801/10000 train_time:157959ms step_avg:87.71ms +[2025-08-22 12:30:50] [Rank 0] step:1821/10000 train_time:159715ms step_avg:87.71ms +[2025-08-22 12:30:50] [Rank 0] step:1821/10000 train_time:159715ms step_avg:87.71ms +[2025-08-22 12:30:51] [Rank 0] step:1841/10000 train_time:161472ms step_avg:87.71ms +[2025-08-22 12:30:51] [Rank 0] step:1841/10000 train_time:161472ms step_avg:87.71ms +[2025-08-22 12:30:53] [Rank 0] step:1861/10000 train_time:163231ms step_avg:87.71ms +[2025-08-22 12:30:53] [Rank 0] step:1861/10000 train_time:163231ms step_avg:87.71ms +[2025-08-22 12:30:55] [Rank 0] step:1881/10000 train_time:164991ms step_avg:87.71ms +[2025-08-22 12:30:55] [Rank 0] step:1881/10000 train_time:164991ms step_avg:87.71ms +[2025-08-22 12:30:57] [Rank 0] step:1901/10000 train_time:166750ms step_avg:87.72ms +[2025-08-22 12:30:57] [Rank 0] step:1901/10000 train_time:166750ms step_avg:87.72ms +[2025-08-22 12:30:58] [Rank 0] step:1921/10000 train_time:168511ms step_avg:87.72ms +[2025-08-22 12:30:58] [Rank 0] step:1921/10000 train_time:168511ms step_avg:87.72ms +[2025-08-22 12:31:00] [Rank 0] step:1941/10000 train_time:170270ms step_avg:87.72ms +[2025-08-22 12:31:00] [Rank 0] step:1941/10000 train_time:170270ms step_avg:87.72ms +[2025-08-22 12:31:02] [Rank 0] step:1961/10000 train_time:172032ms step_avg:87.73ms +[2025-08-22 12:31:02] [Rank 0] step:1961/10000 train_time:172032ms step_avg:87.73ms +[2025-08-22 12:31:04] [Rank 0] step:1981/10000 train_time:173793ms step_avg:87.73ms +[2025-08-22 12:31:04] [Rank 0] step:1981/10000 train_time:173793ms step_avg:87.73ms +[2025-08-22 12:31:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:31:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:31:19] [Rank 0] PRINT: step:2000/10000 val_loss:4.2171 svd_entropy: attn_qk:H=0.7712,top10E=0.26,eRank=194.9,q75/q25=48.26 attn_vo:H=0.8225,top10E=0.06,eRank=371.9,q75/q25=inf mlp_w1:H=0.8942,top10E=0.15,eRank=384.3,q75/q25=4.77 mlp_w2:H=0.9692,top10E=0.05,eRank=626.0,q75/q25=2.91 vo_prod:H=0.6676,top10E=0.12,eRank=186.8,q75/q25=inf train_time:175569ms step_avg:87.78ms +[2025-08-22 12:31:19] [Rank 0] PRINT: step:2000/10000 val_loss:4.2171 svd_entropy: attn_qk:H=0.7712,top10E=0.26,eRank=194.9,q75/q25=48.26 attn_vo:H=0.8225,top10E=0.06,eRank=371.9,q75/q25=inf mlp_w1:H=0.8942,top10E=0.15,eRank=384.3,q75/q25=4.77 mlp_w2:H=0.9692,top10E=0.05,eRank=626.0,q75/q25=2.91 vo_prod:H=0.6676,top10E=0.12,eRank=186.8,q75/q25=inf train_time:175569ms step_avg:87.78ms +[2025-08-22 12:31:20] [Rank 0] step:2001/10000 train_time:175589ms step_avg:87.75ms +[2025-08-22 12:31:20] [Rank 0] step:2001/10000 train_time:175589ms step_avg:87.75ms +[2025-08-22 12:31:21] [Rank 0] step:2021/10000 train_time:177325ms step_avg:87.74ms +[2025-08-22 12:31:21] [Rank 0] step:2021/10000 train_time:177325ms step_avg:87.74ms +[2025-08-22 12:31:23] [Rank 0] step:2041/10000 train_time:179117ms step_avg:87.76ms +[2025-08-22 12:31:23] [Rank 0] step:2041/10000 train_time:179117ms step_avg:87.76ms +[2025-08-22 12:31:25] [Rank 0] step:2061/10000 train_time:180879ms step_avg:87.76ms +[2025-08-22 12:31:25] [Rank 0] step:2061/10000 train_time:180879ms step_avg:87.76ms +[2025-08-22 12:31:27] [Rank 0] step:2081/10000 train_time:182735ms step_avg:87.81ms +[2025-08-22 12:31:27] [Rank 0] step:2081/10000 train_time:182735ms step_avg:87.81ms +[2025-08-22 12:31:28] [Rank 0] step:2101/10000 train_time:184525ms step_avg:87.83ms +[2025-08-22 12:31:28] [Rank 0] step:2101/10000 train_time:184525ms step_avg:87.83ms +[2025-08-22 12:31:30] [Rank 0] step:2121/10000 train_time:186284ms step_avg:87.83ms +[2025-08-22 12:31:30] [Rank 0] step:2121/10000 train_time:186284ms step_avg:87.83ms +[2025-08-22 12:31:32] [Rank 0] step:2141/10000 train_time:188047ms step_avg:87.83ms +[2025-08-22 12:31:32] [Rank 0] step:2141/10000 train_time:188047ms step_avg:87.83ms +[2025-08-22 12:31:34] [Rank 0] step:2161/10000 train_time:189811ms step_avg:87.83ms +[2025-08-22 12:31:34] [Rank 0] step:2161/10000 train_time:189811ms step_avg:87.83ms +[2025-08-22 12:31:36] [Rank 0] step:2181/10000 train_time:191575ms step_avg:87.84ms +[2025-08-22 12:31:36] [Rank 0] step:2181/10000 train_time:191575ms step_avg:87.84ms +[2025-08-22 12:31:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:31:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:31:51] [Rank 0] PRINT: step:2200/10000 val_loss:4.1653 svd_entropy: attn_qk:H=0.7734,top10E=0.25,eRank=196.9,q75/q25=48.38 attn_vo:H=0.8237,top10E=0.06,eRank=375.1,q75/q25=inf mlp_w1:H=0.8966,top10E=0.14,eRank=390.7,q75/q25=4.71 mlp_w2:H=0.9692,top10E=0.05,eRank=625.8,q75/q25=2.91 vo_prod:H=0.6697,top10E=0.12,eRank=190.0,q75/q25=inf train_time:193352ms step_avg:87.89ms +[2025-08-22 12:31:51] [Rank 0] PRINT: step:2200/10000 val_loss:4.1653 svd_entropy: attn_qk:H=0.7734,top10E=0.25,eRank=196.9,q75/q25=48.38 attn_vo:H=0.8237,top10E=0.06,eRank=375.1,q75/q25=inf mlp_w1:H=0.8966,top10E=0.14,eRank=390.7,q75/q25=4.71 mlp_w2:H=0.9692,top10E=0.05,eRank=625.8,q75/q25=2.91 vo_prod:H=0.6697,top10E=0.12,eRank=190.0,q75/q25=inf train_time:193352ms step_avg:87.89ms +[2025-08-22 12:31:51] [Rank 0] step:2201/10000 train_time:193372ms step_avg:87.86ms +[2025-08-22 12:31:51] [Rank 0] step:2201/10000 train_time:193372ms step_avg:87.86ms +[2025-08-22 12:31:53] [Rank 0] step:2221/10000 train_time:195125ms step_avg:87.85ms +[2025-08-22 12:31:53] [Rank 0] step:2221/10000 train_time:195125ms step_avg:87.85ms +[2025-08-22 12:31:55] [Rank 0] step:2241/10000 train_time:196920ms step_avg:87.87ms +[2025-08-22 12:31:55] [Rank 0] step:2241/10000 train_time:196920ms step_avg:87.87ms +[2025-08-22 12:31:57] [Rank 0] step:2261/10000 train_time:198722ms step_avg:87.89ms +[2025-08-22 12:31:57] [Rank 0] step:2261/10000 train_time:198722ms step_avg:87.89ms +[2025-08-22 12:31:58] [Rank 0] step:2281/10000 train_time:200525ms step_avg:87.91ms +[2025-08-22 12:31:58] [Rank 0] step:2281/10000 train_time:200525ms step_avg:87.91ms +[2025-08-22 12:32:00] [Rank 0] step:2301/10000 train_time:202328ms step_avg:87.93ms +[2025-08-22 12:32:00] [Rank 0] step:2301/10000 train_time:202328ms step_avg:87.93ms +[2025-08-22 12:32:02] [Rank 0] step:2321/10000 train_time:204131ms step_avg:87.95ms +[2025-08-22 12:32:02] [Rank 0] step:2321/10000 train_time:204131ms step_avg:87.95ms +[2025-08-22 12:32:04] [Rank 0] step:2341/10000 train_time:205933ms step_avg:87.97ms +[2025-08-22 12:32:04] [Rank 0] step:2341/10000 train_time:205933ms step_avg:87.97ms +[2025-08-22 12:32:06] [Rank 0] step:2361/10000 train_time:207738ms step_avg:87.99ms +[2025-08-22 12:32:06] [Rank 0] step:2361/10000 train_time:207738ms step_avg:87.99ms +[2025-08-22 12:32:07] [Rank 0] step:2381/10000 train_time:209544ms step_avg:88.01ms +[2025-08-22 12:32:07] [Rank 0] step:2381/10000 train_time:209544ms step_avg:88.01ms +[2025-08-22 12:32:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:32:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:32:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.1057 svd_entropy: attn_qk:H=0.7748,top10E=0.25,eRank=198.3,q75/q25=48.21 attn_vo:H=0.8247,top10E=0.06,eRank=377.7,q75/q25=inf mlp_w1:H=0.8989,top10E=0.14,eRank=396.7,q75/q25=4.66 mlp_w2:H=0.9691,top10E=0.05,eRank=625.7,q75/q25=2.91 vo_prod:H=0.6717,top10E=0.12,eRank=192.9,q75/q25=inf train_time:211362ms step_avg:88.07ms +[2025-08-22 12:32:23] [Rank 0] PRINT: step:2400/10000 val_loss:4.1057 svd_entropy: attn_qk:H=0.7748,top10E=0.25,eRank=198.3,q75/q25=48.21 attn_vo:H=0.8247,top10E=0.06,eRank=377.7,q75/q25=inf mlp_w1:H=0.8989,top10E=0.14,eRank=396.7,q75/q25=4.66 mlp_w2:H=0.9691,top10E=0.05,eRank=625.7,q75/q25=2.91 vo_prod:H=0.6717,top10E=0.12,eRank=192.9,q75/q25=inf train_time:211362ms step_avg:88.07ms +[2025-08-22 12:32:23] [Rank 0] step:2401/10000 train_time:211381ms step_avg:88.04ms +[2025-08-22 12:32:23] [Rank 0] step:2401/10000 train_time:211381ms step_avg:88.04ms +[2025-08-22 12:32:25] [Rank 0] step:2421/10000 train_time:213164ms step_avg:88.05ms +[2025-08-22 12:32:25] [Rank 0] step:2421/10000 train_time:213164ms step_avg:88.05ms +[2025-08-22 12:32:27] [Rank 0] step:2441/10000 train_time:214964ms step_avg:88.06ms +[2025-08-22 12:32:27] [Rank 0] step:2441/10000 train_time:214964ms step_avg:88.06ms +[2025-08-22 12:32:29] [Rank 0] step:2461/10000 train_time:216834ms step_avg:88.11ms +[2025-08-22 12:32:29] [Rank 0] step:2461/10000 train_time:216834ms step_avg:88.11ms +[2025-08-22 12:32:31] [Rank 0] step:2481/10000 train_time:218660ms step_avg:88.13ms +[2025-08-22 12:32:31] [Rank 0] step:2481/10000 train_time:218660ms step_avg:88.13ms +[2025-08-22 12:32:32] [Rank 0] step:2501/10000 train_time:220463ms step_avg:88.15ms +[2025-08-22 12:32:32] [Rank 0] step:2501/10000 train_time:220463ms step_avg:88.15ms +[2025-08-22 12:32:34] [Rank 0] step:2521/10000 train_time:222267ms step_avg:88.17ms +[2025-08-22 12:32:34] [Rank 0] step:2521/10000 train_time:222267ms step_avg:88.17ms +[2025-08-22 12:32:36] [Rank 0] step:2541/10000 train_time:224070ms step_avg:88.18ms +[2025-08-22 12:32:36] [Rank 0] step:2541/10000 train_time:224070ms step_avg:88.18ms +[2025-08-22 12:32:38] [Rank 0] step:2561/10000 train_time:225875ms step_avg:88.20ms +[2025-08-22 12:32:38] [Rank 0] step:2561/10000 train_time:225875ms step_avg:88.20ms +[2025-08-22 12:32:40] [Rank 0] step:2581/10000 train_time:227679ms step_avg:88.21ms +[2025-08-22 12:32:40] [Rank 0] step:2581/10000 train_time:227679ms step_avg:88.21ms +[2025-08-22 12:32:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:32:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:32:55] [Rank 0] PRINT: step:2600/10000 val_loss:4.0740 svd_entropy: attn_qk:H=0.7766,top10E=0.25,eRank=200.0,q75/q25=48.17 attn_vo:H=0.8256,top10E=0.06,eRank=380.0,q75/q25=inf mlp_w1:H=0.9009,top10E=0.14,eRank=402.2,q75/q25=4.62 mlp_w2:H=0.9691,top10E=0.05,eRank=625.4,q75/q25=2.90 vo_prod:H=0.6734,top10E=0.11,eRank=195.5,q75/q25=inf train_time:229498ms step_avg:88.27ms +[2025-08-22 12:32:55] [Rank 0] PRINT: step:2600/10000 val_loss:4.0740 svd_entropy: attn_qk:H=0.7766,top10E=0.25,eRank=200.0,q75/q25=48.17 attn_vo:H=0.8256,top10E=0.06,eRank=380.0,q75/q25=inf mlp_w1:H=0.9009,top10E=0.14,eRank=402.2,q75/q25=4.62 mlp_w2:H=0.9691,top10E=0.05,eRank=625.4,q75/q25=2.90 vo_prod:H=0.6734,top10E=0.11,eRank=195.5,q75/q25=inf train_time:229498ms step_avg:88.27ms +[2025-08-22 12:32:55] [Rank 0] step:2601/10000 train_time:229520ms step_avg:88.24ms +[2025-08-22 12:32:55] [Rank 0] step:2601/10000 train_time:229520ms step_avg:88.24ms +[2025-08-22 12:32:57] [Rank 0] step:2621/10000 train_time:231316ms step_avg:88.25ms +[2025-08-22 12:32:57] [Rank 0] step:2621/10000 train_time:231316ms step_avg:88.25ms +[2025-08-22 12:32:59] [Rank 0] step:2641/10000 train_time:233120ms step_avg:88.27ms +[2025-08-22 12:32:59] [Rank 0] step:2641/10000 train_time:233120ms step_avg:88.27ms +[2025-08-22 12:33:01] [Rank 0] step:2661/10000 train_time:234925ms step_avg:88.28ms +[2025-08-22 12:33:01] [Rank 0] step:2661/10000 train_time:234925ms step_avg:88.28ms +[2025-08-22 12:33:03] [Rank 0] step:2681/10000 train_time:236728ms step_avg:88.30ms +[2025-08-22 12:33:03] [Rank 0] step:2681/10000 train_time:236728ms step_avg:88.30ms +[2025-08-22 12:33:04] [Rank 0] step:2701/10000 train_time:238534ms step_avg:88.31ms +[2025-08-22 12:33:04] [Rank 0] step:2701/10000 train_time:238534ms step_avg:88.31ms +[2025-08-22 12:33:06] [Rank 0] step:2721/10000 train_time:240339ms step_avg:88.33ms +[2025-08-22 12:33:06] [Rank 0] step:2721/10000 train_time:240339ms step_avg:88.33ms +[2025-08-22 12:33:08] [Rank 0] step:2741/10000 train_time:242144ms step_avg:88.34ms +[2025-08-22 12:33:08] [Rank 0] step:2741/10000 train_time:242144ms step_avg:88.34ms +[2025-08-22 12:33:10] [Rank 0] step:2761/10000 train_time:243950ms step_avg:88.36ms +[2025-08-22 12:33:10] [Rank 0] step:2761/10000 train_time:243950ms step_avg:88.36ms +[2025-08-22 12:33:12] [Rank 0] step:2781/10000 train_time:245756ms step_avg:88.37ms +[2025-08-22 12:33:12] [Rank 0] step:2781/10000 train_time:245756ms step_avg:88.37ms +[2025-08-22 12:33:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:33:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:33:27] [Rank 0] PRINT: step:2800/10000 val_loss:4.0499 svd_entropy: attn_qk:H=0.7783,top10E=0.25,eRank=201.6,q75/q25=48.11 attn_vo:H=0.8263,top10E=0.06,eRank=382.1,q75/q25=inf mlp_w1:H=0.9027,top10E=0.14,eRank=407.1,q75/q25=4.58 mlp_w2:H=0.9690,top10E=0.05,eRank=625.0,q75/q25=2.91 vo_prod:H=0.6751,top10E=0.11,eRank=198.1,q75/q25=inf train_time:247575ms step_avg:88.42ms +[2025-08-22 12:33:27] [Rank 0] PRINT: step:2800/10000 val_loss:4.0499 svd_entropy: attn_qk:H=0.7783,top10E=0.25,eRank=201.6,q75/q25=48.11 attn_vo:H=0.8263,top10E=0.06,eRank=382.1,q75/q25=inf mlp_w1:H=0.9027,top10E=0.14,eRank=407.1,q75/q25=4.58 mlp_w2:H=0.9690,top10E=0.05,eRank=625.0,q75/q25=2.91 vo_prod:H=0.6751,top10E=0.11,eRank=198.1,q75/q25=inf train_time:247575ms step_avg:88.42ms +[2025-08-22 12:33:27] [Rank 0] step:2801/10000 train_time:247596ms step_avg:88.40ms +[2025-08-22 12:33:27] [Rank 0] step:2801/10000 train_time:247596ms step_avg:88.40ms +[2025-08-22 12:33:29] [Rank 0] step:2821/10000 train_time:249400ms step_avg:88.41ms +[2025-08-22 12:33:29] [Rank 0] step:2821/10000 train_time:249400ms step_avg:88.41ms +[2025-08-22 12:33:31] [Rank 0] step:2841/10000 train_time:251202ms step_avg:88.42ms +[2025-08-22 12:33:31] [Rank 0] step:2841/10000 train_time:251202ms step_avg:88.42ms +[2025-08-22 12:33:33] [Rank 0] step:2861/10000 train_time:253085ms step_avg:88.46ms +[2025-08-22 12:33:33] [Rank 0] step:2861/10000 train_time:253085ms step_avg:88.46ms +[2025-08-22 12:33:35] [Rank 0] step:2881/10000 train_time:254931ms step_avg:88.49ms +[2025-08-22 12:33:35] [Rank 0] step:2881/10000 train_time:254931ms step_avg:88.49ms +[2025-08-22 12:33:37] [Rank 0] step:2901/10000 train_time:256733ms step_avg:88.50ms +[2025-08-22 12:33:37] [Rank 0] step:2901/10000 train_time:256733ms step_avg:88.50ms +[2025-08-22 12:33:38] [Rank 0] step:2921/10000 train_time:258537ms step_avg:88.51ms +[2025-08-22 12:33:38] [Rank 0] step:2921/10000 train_time:258537ms step_avg:88.51ms +[2025-08-22 12:33:40] [Rank 0] step:2941/10000 train_time:260340ms step_avg:88.52ms +[2025-08-22 12:33:40] [Rank 0] step:2941/10000 train_time:260340ms step_avg:88.52ms +[2025-08-22 12:33:42] [Rank 0] step:2961/10000 train_time:262144ms step_avg:88.53ms +[2025-08-22 12:33:42] [Rank 0] step:2961/10000 train_time:262144ms step_avg:88.53ms +[2025-08-22 12:33:44] [Rank 0] step:2981/10000 train_time:263955ms step_avg:88.55ms +[2025-08-22 12:33:44] [Rank 0] step:2981/10000 train_time:263955ms step_avg:88.55ms +[2025-08-22 12:33:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:33:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:34:00] [Rank 0] PRINT: step:3000/10000 val_loss:4.0151 svd_entropy: attn_qk:H=0.7798,top10E=0.25,eRank=203.1,q75/q25=48.07 attn_vo:H=0.8270,top10E=0.06,eRank=383.9,q75/q25=inf mlp_w1:H=0.9044,top10E=0.14,eRank=411.6,q75/q25=4.55 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.91 vo_prod:H=0.6765,top10E=0.11,eRank=200.3,q75/q25=inf train_time:265782ms step_avg:88.59ms +[2025-08-22 12:34:00] [Rank 0] PRINT: step:3000/10000 val_loss:4.0151 svd_entropy: attn_qk:H=0.7798,top10E=0.25,eRank=203.1,q75/q25=48.07 attn_vo:H=0.8270,top10E=0.06,eRank=383.9,q75/q25=inf mlp_w1:H=0.9044,top10E=0.14,eRank=411.6,q75/q25=4.55 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.91 vo_prod:H=0.6765,top10E=0.11,eRank=200.3,q75/q25=inf train_time:265782ms step_avg:88.59ms +[2025-08-22 12:34:00] [Rank 0] step:3001/10000 train_time:265803ms step_avg:88.57ms +[2025-08-22 12:34:00] [Rank 0] step:3001/10000 train_time:265803ms step_avg:88.57ms +[2025-08-22 12:34:02] [Rank 0] step:3021/10000 train_time:267598ms step_avg:88.58ms +[2025-08-22 12:34:02] [Rank 0] step:3021/10000 train_time:267598ms step_avg:88.58ms +[2025-08-22 12:34:03] [Rank 0] step:3041/10000 train_time:269408ms step_avg:88.59ms +[2025-08-22 12:34:03] [Rank 0] step:3041/10000 train_time:269408ms step_avg:88.59ms +[2025-08-22 12:34:05] [Rank 0] step:3061/10000 train_time:271218ms step_avg:88.60ms +[2025-08-22 12:34:05] [Rank 0] step:3061/10000 train_time:271218ms step_avg:88.60ms +[2025-08-22 12:34:07] [Rank 0] step:3081/10000 train_time:273028ms step_avg:88.62ms +[2025-08-22 12:34:07] [Rank 0] step:3081/10000 train_time:273028ms step_avg:88.62ms +[2025-08-22 12:34:09] [Rank 0] step:3101/10000 train_time:274840ms step_avg:88.63ms +[2025-08-22 12:34:09] [Rank 0] step:3101/10000 train_time:274840ms step_avg:88.63ms +[2025-08-22 12:34:11] [Rank 0] step:3121/10000 train_time:276650ms step_avg:88.64ms +[2025-08-22 12:34:11] [Rank 0] step:3121/10000 train_time:276650ms step_avg:88.64ms +[2025-08-22 12:34:12] [Rank 0] step:3141/10000 train_time:278464ms step_avg:88.65ms +[2025-08-22 12:34:12] [Rank 0] step:3141/10000 train_time:278464ms step_avg:88.65ms +[2025-08-22 12:34:14] [Rank 0] step:3161/10000 train_time:280276ms step_avg:88.67ms +[2025-08-22 12:34:14] [Rank 0] step:3161/10000 train_time:280276ms step_avg:88.67ms +[2025-08-22 12:34:16] [Rank 0] step:3181/10000 train_time:282088ms step_avg:88.68ms +[2025-08-22 12:34:16] [Rank 0] step:3181/10000 train_time:282088ms step_avg:88.68ms +[2025-08-22 12:34:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:34:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:34:32] [Rank 0] PRINT: step:3200/10000 val_loss:3.9896 svd_entropy: attn_qk:H=0.7813,top10E=0.24,eRank=204.6,q75/q25=48.03 attn_vo:H=0.8275,top10E=0.06,eRank=385.4,q75/q25=inf mlp_w1:H=0.9058,top10E=0.13,eRank=415.7,q75/q25=4.52 mlp_w2:H=0.9688,top10E=0.05,eRank=624.4,q75/q25=2.91 vo_prod:H=0.6778,top10E=0.11,eRank=202.4,q75/q25=inf train_time:283915ms step_avg:88.72ms +[2025-08-22 12:34:32] [Rank 0] PRINT: step:3200/10000 val_loss:3.9896 svd_entropy: attn_qk:H=0.7813,top10E=0.24,eRank=204.6,q75/q25=48.03 attn_vo:H=0.8275,top10E=0.06,eRank=385.4,q75/q25=inf mlp_w1:H=0.9058,top10E=0.13,eRank=415.7,q75/q25=4.52 mlp_w2:H=0.9688,top10E=0.05,eRank=624.4,q75/q25=2.91 vo_prod:H=0.6778,top10E=0.11,eRank=202.4,q75/q25=inf train_time:283915ms step_avg:88.72ms +[2025-08-22 12:34:32] [Rank 0] step:3201/10000 train_time:283935ms step_avg:88.70ms +[2025-08-22 12:34:32] [Rank 0] step:3201/10000 train_time:283935ms step_avg:88.70ms +[2025-08-22 12:34:34] [Rank 0] step:3221/10000 train_time:285736ms step_avg:88.71ms +[2025-08-22 12:34:34] [Rank 0] step:3221/10000 train_time:285736ms step_avg:88.71ms +[2025-08-22 12:34:36] [Rank 0] step:3241/10000 train_time:287610ms step_avg:88.74ms +[2025-08-22 12:34:36] [Rank 0] step:3241/10000 train_time:287610ms step_avg:88.74ms +[2025-08-22 12:34:37] [Rank 0] step:3261/10000 train_time:289457ms step_avg:88.76ms +[2025-08-22 12:34:37] [Rank 0] step:3261/10000 train_time:289457ms step_avg:88.76ms +[2025-08-22 12:34:39] [Rank 0] step:3281/10000 train_time:291269ms step_avg:88.77ms +[2025-08-22 12:34:39] [Rank 0] step:3281/10000 train_time:291269ms step_avg:88.77ms +[2025-08-22 12:34:41] [Rank 0] step:3301/10000 train_time:293080ms step_avg:88.79ms +[2025-08-22 12:34:41] [Rank 0] step:3301/10000 train_time:293080ms step_avg:88.79ms +[2025-08-22 12:34:43] [Rank 0] step:3321/10000 train_time:294893ms step_avg:88.80ms +[2025-08-22 12:34:43] [Rank 0] step:3321/10000 train_time:294893ms step_avg:88.80ms +[2025-08-22 12:34:45] [Rank 0] step:3341/10000 train_time:296707ms step_avg:88.81ms +[2025-08-22 12:34:45] [Rank 0] step:3341/10000 train_time:296707ms step_avg:88.81ms +[2025-08-22 12:34:46] [Rank 0] step:3361/10000 train_time:298518ms step_avg:88.82ms +[2025-08-22 12:34:46] [Rank 0] step:3361/10000 train_time:298518ms step_avg:88.82ms +[2025-08-22 12:34:48] [Rank 0] step:3381/10000 train_time:300331ms step_avg:88.83ms +[2025-08-22 12:34:48] [Rank 0] step:3381/10000 train_time:300331ms step_avg:88.83ms +[2025-08-22 12:34:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:34:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:35:04] [Rank 0] PRINT: step:3400/10000 val_loss:3.9705 svd_entropy: attn_qk:H=0.7828,top10E=0.24,eRank=206.1,q75/q25=48.11 attn_vo:H=0.8280,top10E=0.06,eRank=386.8,q75/q25=inf mlp_w1:H=0.9072,top10E=0.13,eRank=419.5,q75/q25=4.49 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.92 vo_prod:H=0.6790,top10E=0.11,eRank=204.4,q75/q25=inf train_time:302158ms step_avg:88.87ms +[2025-08-22 12:35:04] [Rank 0] PRINT: step:3400/10000 val_loss:3.9705 svd_entropy: attn_qk:H=0.7828,top10E=0.24,eRank=206.1,q75/q25=48.11 attn_vo:H=0.8280,top10E=0.06,eRank=386.8,q75/q25=inf mlp_w1:H=0.9072,top10E=0.13,eRank=419.5,q75/q25=4.49 mlp_w2:H=0.9687,top10E=0.05,eRank=624.1,q75/q25=2.92 vo_prod:H=0.6790,top10E=0.11,eRank=204.4,q75/q25=inf train_time:302158ms step_avg:88.87ms +[2025-08-22 12:35:04] [Rank 0] step:3401/10000 train_time:302178ms step_avg:88.85ms +[2025-08-22 12:35:04] [Rank 0] step:3401/10000 train_time:302178ms step_avg:88.85ms +[2025-08-22 12:35:06] [Rank 0] step:3421/10000 train_time:303972ms step_avg:88.85ms +[2025-08-22 12:35:06] [Rank 0] step:3421/10000 train_time:303972ms step_avg:88.85ms +[2025-08-22 12:35:08] [Rank 0] step:3441/10000 train_time:305785ms step_avg:88.87ms +[2025-08-22 12:35:08] [Rank 0] step:3441/10000 train_time:305785ms step_avg:88.87ms +[2025-08-22 12:35:09] [Rank 0] step:3461/10000 train_time:307599ms step_avg:88.88ms +[2025-08-22 12:35:09] [Rank 0] step:3461/10000 train_time:307599ms step_avg:88.88ms +[2025-08-22 12:35:11] [Rank 0] step:3481/10000 train_time:309413ms step_avg:88.89ms +[2025-08-22 12:35:11] [Rank 0] step:3481/10000 train_time:309413ms step_avg:88.89ms +[2025-08-22 12:35:13] [Rank 0] step:3501/10000 train_time:311229ms step_avg:88.90ms +[2025-08-22 12:35:13] [Rank 0] step:3501/10000 train_time:311229ms step_avg:88.90ms +[2025-08-22 12:35:15] [Rank 0] step:3521/10000 train_time:313044ms step_avg:88.91ms +[2025-08-22 12:35:15] [Rank 0] step:3521/10000 train_time:313044ms step_avg:88.91ms +[2025-08-22 12:35:17] [Rank 0] step:3541/10000 train_time:314860ms step_avg:88.92ms +[2025-08-22 12:35:17] [Rank 0] step:3541/10000 train_time:314860ms step_avg:88.92ms +[2025-08-22 12:35:18] [Rank 0] step:3561/10000 train_time:316676ms step_avg:88.93ms +[2025-08-22 12:35:18] [Rank 0] step:3561/10000 train_time:316676ms step_avg:88.93ms +[2025-08-22 12:35:20] [Rank 0] step:3581/10000 train_time:318496ms step_avg:88.94ms +[2025-08-22 12:35:20] [Rank 0] step:3581/10000 train_time:318496ms step_avg:88.94ms +[2025-08-22 12:35:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:35:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:35:36] [Rank 0] PRINT: step:3600/10000 val_loss:3.9593 svd_entropy: attn_qk:H=0.7842,top10E=0.24,eRank=207.5,q75/q25=47.78 attn_vo:H=0.8285,top10E=0.06,eRank=388.0,q75/q25=inf mlp_w1:H=0.9084,top10E=0.13,eRank=423.0,q75/q25=4.46 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.92 vo_prod:H=0.6802,top10E=0.11,eRank=206.1,q75/q25=inf train_time:320370ms step_avg:88.99ms +[2025-08-22 12:35:36] [Rank 0] PRINT: step:3600/10000 val_loss:3.9593 svd_entropy: attn_qk:H=0.7842,top10E=0.24,eRank=207.5,q75/q25=47.78 attn_vo:H=0.8285,top10E=0.06,eRank=388.0,q75/q25=inf mlp_w1:H=0.9084,top10E=0.13,eRank=423.0,q75/q25=4.46 mlp_w2:H=0.9686,top10E=0.05,eRank=623.7,q75/q25=2.92 vo_prod:H=0.6802,top10E=0.11,eRank=206.1,q75/q25=inf train_time:320370ms step_avg:88.99ms +[2025-08-22 12:35:36] [Rank 0] step:3601/10000 train_time:320389ms step_avg:88.97ms +[2025-08-22 12:35:36] [Rank 0] step:3601/10000 train_time:320389ms step_avg:88.97ms +[2025-08-22 12:35:38] [Rank 0] step:3621/10000 train_time:322202ms step_avg:88.98ms +[2025-08-22 12:35:38] [Rank 0] step:3621/10000 train_time:322202ms step_avg:88.98ms +[2025-08-22 12:35:40] [Rank 0] step:3641/10000 train_time:324114ms step_avg:89.02ms +[2025-08-22 12:35:40] [Rank 0] step:3641/10000 train_time:324114ms step_avg:89.02ms +[2025-08-22 12:35:41] [Rank 0] step:3661/10000 train_time:325953ms step_avg:89.03ms +[2025-08-22 12:35:41] [Rank 0] step:3661/10000 train_time:325953ms step_avg:89.03ms +[2025-08-22 12:35:43] [Rank 0] step:3681/10000 train_time:327765ms step_avg:89.04ms +[2025-08-22 12:35:43] [Rank 0] step:3681/10000 train_time:327765ms step_avg:89.04ms +[2025-08-22 12:35:45] [Rank 0] step:3701/10000 train_time:329579ms step_avg:89.05ms +[2025-08-22 12:35:45] [Rank 0] step:3701/10000 train_time:329579ms step_avg:89.05ms +[2025-08-22 12:35:47] [Rank 0] step:3721/10000 train_time:331421ms step_avg:89.07ms +[2025-08-22 12:35:47] [Rank 0] step:3721/10000 train_time:331421ms step_avg:89.07ms +[2025-08-22 12:35:49] [Rank 0] step:3741/10000 train_time:333271ms step_avg:89.09ms +[2025-08-22 12:35:49] [Rank 0] step:3741/10000 train_time:333271ms step_avg:89.09ms +[2025-08-22 12:35:51] [Rank 0] step:3761/10000 train_time:335122ms step_avg:89.10ms +[2025-08-22 12:35:51] [Rank 0] step:3761/10000 train_time:335122ms step_avg:89.10ms +[2025-08-22 12:35:52] [Rank 0] step:3781/10000 train_time:336974ms step_avg:89.12ms +[2025-08-22 12:35:52] [Rank 0] step:3781/10000 train_time:336974ms step_avg:89.12ms +[2025-08-22 12:35:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:35:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:36:08] [Rank 0] PRINT: step:3800/10000 val_loss:3.9254 svd_entropy: attn_qk:H=0.7852,top10E=0.24,eRank=208.5,q75/q25=47.47 attn_vo:H=0.8289,top10E=0.06,eRank=389.1,q75/q25=inf mlp_w1:H=0.9096,top10E=0.13,eRank=426.2,q75/q25=4.43 mlp_w2:H=0.9685,top10E=0.05,eRank=623.2,q75/q25=2.93 vo_prod:H=0.6811,top10E=0.11,eRank=207.7,q75/q25=inf train_time:338840ms step_avg:89.17ms +[2025-08-22 12:36:08] [Rank 0] PRINT: step:3800/10000 val_loss:3.9254 svd_entropy: attn_qk:H=0.7852,top10E=0.24,eRank=208.5,q75/q25=47.47 attn_vo:H=0.8289,top10E=0.06,eRank=389.1,q75/q25=inf mlp_w1:H=0.9096,top10E=0.13,eRank=426.2,q75/q25=4.43 mlp_w2:H=0.9685,top10E=0.05,eRank=623.2,q75/q25=2.93 vo_prod:H=0.6811,top10E=0.11,eRank=207.7,q75/q25=inf train_time:338840ms step_avg:89.17ms +[2025-08-22 12:36:08] [Rank 0] step:3801/10000 train_time:338860ms step_avg:89.15ms +[2025-08-22 12:36:08] [Rank 0] step:3801/10000 train_time:338860ms step_avg:89.15ms +[2025-08-22 12:36:10] [Rank 0] step:3821/10000 train_time:340702ms step_avg:89.17ms +[2025-08-22 12:36:10] [Rank 0] step:3821/10000 train_time:340702ms step_avg:89.17ms +[2025-08-22 12:36:12] [Rank 0] step:3841/10000 train_time:342551ms step_avg:89.18ms +[2025-08-22 12:36:12] [Rank 0] step:3841/10000 train_time:342551ms step_avg:89.18ms +[2025-08-22 12:36:14] [Rank 0] step:3861/10000 train_time:344398ms step_avg:89.20ms +[2025-08-22 12:36:14] [Rank 0] step:3861/10000 train_time:344398ms step_avg:89.20ms +[2025-08-22 12:36:16] [Rank 0] step:3881/10000 train_time:346243ms step_avg:89.22ms +[2025-08-22 12:36:16] [Rank 0] step:3881/10000 train_time:346243ms step_avg:89.22ms +[2025-08-22 12:36:18] [Rank 0] step:3901/10000 train_time:348091ms step_avg:89.23ms +[2025-08-22 12:36:18] [Rank 0] step:3901/10000 train_time:348091ms step_avg:89.23ms +[2025-08-22 12:36:19] [Rank 0] step:3921/10000 train_time:349942ms step_avg:89.25ms +[2025-08-22 12:36:19] [Rank 0] step:3921/10000 train_time:349942ms step_avg:89.25ms +[2025-08-22 12:36:21] [Rank 0] step:3941/10000 train_time:351790ms step_avg:89.26ms +[2025-08-22 12:36:21] [Rank 0] step:3941/10000 train_time:351790ms step_avg:89.26ms +[2025-08-22 12:36:23] [Rank 0] step:3961/10000 train_time:353639ms step_avg:89.28ms +[2025-08-22 12:36:23] [Rank 0] step:3961/10000 train_time:353639ms step_avg:89.28ms +[2025-08-22 12:36:25] [Rank 0] step:3981/10000 train_time:355487ms step_avg:89.30ms +[2025-08-22 12:36:25] [Rank 0] step:3981/10000 train_time:355487ms step_avg:89.30ms +[2025-08-22 12:36:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:36:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:36:41] [Rank 0] PRINT: step:4000/10000 val_loss:3.9049 svd_entropy: attn_qk:H=0.7863,top10E=0.24,eRank=209.7,q75/q25=47.21 attn_vo:H=0.8291,top10E=0.06,eRank=389.9,q75/q25=inf mlp_w1:H=0.9107,top10E=0.13,eRank=429.3,q75/q25=4.40 mlp_w2:H=0.9684,top10E=0.05,eRank=622.9,q75/q25=2.93 vo_prod:H=0.6819,top10E=0.10,eRank=209.1,q75/q25=inf train_time:357348ms step_avg:89.34ms +[2025-08-22 12:36:41] [Rank 0] PRINT: step:4000/10000 val_loss:3.9049 svd_entropy: attn_qk:H=0.7863,top10E=0.24,eRank=209.7,q75/q25=47.21 attn_vo:H=0.8291,top10E=0.06,eRank=389.9,q75/q25=inf mlp_w1:H=0.9107,top10E=0.13,eRank=429.3,q75/q25=4.40 mlp_w2:H=0.9684,top10E=0.05,eRank=622.9,q75/q25=2.93 vo_prod:H=0.6819,top10E=0.10,eRank=209.1,q75/q25=inf train_time:357348ms step_avg:89.34ms +[2025-08-22 12:36:41] [Rank 0] step:4001/10000 train_time:357370ms step_avg:89.32ms +[2025-08-22 12:36:41] [Rank 0] step:4001/10000 train_time:357370ms step_avg:89.32ms +[2025-08-22 12:36:43] [Rank 0] step:4021/10000 train_time:359252ms step_avg:89.34ms +[2025-08-22 12:36:43] [Rank 0] step:4021/10000 train_time:359252ms step_avg:89.34ms +[2025-08-22 12:36:45] [Rank 0] step:4041/10000 train_time:361105ms step_avg:89.36ms +[2025-08-22 12:36:45] [Rank 0] step:4041/10000 train_time:361105ms step_avg:89.36ms +[2025-08-22 12:36:47] [Rank 0] step:4061/10000 train_time:362950ms step_avg:89.37ms +[2025-08-22 12:36:47] [Rank 0] step:4061/10000 train_time:362950ms step_avg:89.37ms +[2025-08-22 12:36:48] [Rank 0] step:4081/10000 train_time:364837ms step_avg:89.40ms +[2025-08-22 12:36:48] [Rank 0] step:4081/10000 train_time:364837ms step_avg:89.40ms +[2025-08-22 12:36:50] [Rank 0] step:4101/10000 train_time:366685ms step_avg:89.41ms +[2025-08-22 12:36:50] [Rank 0] step:4101/10000 train_time:366685ms step_avg:89.41ms +[2025-08-22 12:36:52] [Rank 0] step:4121/10000 train_time:368532ms step_avg:89.43ms +[2025-08-22 12:36:52] [Rank 0] step:4121/10000 train_time:368532ms step_avg:89.43ms +[2025-08-22 12:36:54] [Rank 0] step:4141/10000 train_time:370384ms step_avg:89.44ms +[2025-08-22 12:36:54] [Rank 0] step:4141/10000 train_time:370384ms step_avg:89.44ms +[2025-08-22 12:36:56] [Rank 0] step:4161/10000 train_time:372231ms step_avg:89.46ms +[2025-08-22 12:36:56] [Rank 0] step:4161/10000 train_time:372231ms step_avg:89.46ms +[2025-08-22 12:36:58] [Rank 0] step:4181/10000 train_time:374082ms step_avg:89.47ms +[2025-08-22 12:36:58] [Rank 0] step:4181/10000 train_time:374082ms step_avg:89.47ms +[2025-08-22 12:36:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:36:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:37:13] [Rank 0] PRINT: step:4200/10000 val_loss:3.8931 svd_entropy: attn_qk:H=0.7875,top10E=0.24,eRank=210.9,q75/q25=47.01 attn_vo:H=0.8294,top10E=0.06,eRank=390.7,q75/q25=inf mlp_w1:H=0.9116,top10E=0.13,eRank=431.9,q75/q25=4.38 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.93 vo_prod:H=0.6827,top10E=0.10,eRank=210.4,q75/q25=inf train_time:375946ms step_avg:89.51ms +[2025-08-22 12:37:13] [Rank 0] PRINT: step:4200/10000 val_loss:3.8931 svd_entropy: attn_qk:H=0.7875,top10E=0.24,eRank=210.9,q75/q25=47.01 attn_vo:H=0.8294,top10E=0.06,eRank=390.7,q75/q25=inf mlp_w1:H=0.9116,top10E=0.13,eRank=431.9,q75/q25=4.38 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.93 vo_prod:H=0.6827,top10E=0.10,eRank=210.4,q75/q25=inf train_time:375946ms step_avg:89.51ms +[2025-08-22 12:37:14] [Rank 0] step:4201/10000 train_time:375965ms step_avg:89.49ms +[2025-08-22 12:37:14] [Rank 0] step:4201/10000 train_time:375965ms step_avg:89.49ms +[2025-08-22 12:37:15] [Rank 0] step:4221/10000 train_time:377805ms step_avg:89.51ms +[2025-08-22 12:37:15] [Rank 0] step:4221/10000 train_time:377805ms step_avg:89.51ms +[2025-08-22 12:37:17] [Rank 0] step:4241/10000 train_time:379655ms step_avg:89.52ms +[2025-08-22 12:37:17] [Rank 0] step:4241/10000 train_time:379655ms step_avg:89.52ms +[2025-08-22 12:37:19] [Rank 0] step:4261/10000 train_time:381500ms step_avg:89.53ms +[2025-08-22 12:37:19] [Rank 0] step:4261/10000 train_time:381500ms step_avg:89.53ms +[2025-08-22 12:37:21] [Rank 0] step:4281/10000 train_time:383349ms step_avg:89.55ms +[2025-08-22 12:37:21] [Rank 0] step:4281/10000 train_time:383349ms step_avg:89.55ms +[2025-08-22 12:37:23] [Rank 0] step:4301/10000 train_time:385196ms step_avg:89.56ms +[2025-08-22 12:37:23] [Rank 0] step:4301/10000 train_time:385196ms step_avg:89.56ms +[2025-08-22 12:37:25] [Rank 0] step:4321/10000 train_time:387047ms step_avg:89.57ms +[2025-08-22 12:37:25] [Rank 0] step:4321/10000 train_time:387047ms step_avg:89.57ms +[2025-08-22 12:37:26] [Rank 0] step:4341/10000 train_time:388895ms step_avg:89.59ms +[2025-08-22 12:37:26] [Rank 0] step:4341/10000 train_time:388895ms step_avg:89.59ms +[2025-08-22 12:37:28] [Rank 0] step:4361/10000 train_time:390746ms step_avg:89.60ms +[2025-08-22 12:37:28] [Rank 0] step:4361/10000 train_time:390746ms step_avg:89.60ms +[2025-08-22 12:37:30] [Rank 0] step:4381/10000 train_time:392596ms step_avg:89.61ms +[2025-08-22 12:37:30] [Rank 0] step:4381/10000 train_time:392596ms step_avg:89.61ms +[2025-08-22 12:37:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:37:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:37:46] [Rank 0] PRINT: step:4400/10000 val_loss:3.8787 svd_entropy: attn_qk:H=0.7885,top10E=0.24,eRank=212.0,q75/q25=46.76 attn_vo:H=0.8297,top10E=0.06,eRank=391.5,q75/q25=inf mlp_w1:H=0.9125,top10E=0.13,eRank=434.6,q75/q25=4.36 mlp_w2:H=0.9682,top10E=0.05,eRank=622.1,q75/q25=2.94 vo_prod:H=0.6835,top10E=0.10,eRank=211.7,q75/q25=inf train_time:394459ms step_avg:89.65ms +[2025-08-22 12:37:46] [Rank 0] PRINT: step:4400/10000 val_loss:3.8787 svd_entropy: attn_qk:H=0.7885,top10E=0.24,eRank=212.0,q75/q25=46.76 attn_vo:H=0.8297,top10E=0.06,eRank=391.5,q75/q25=inf mlp_w1:H=0.9125,top10E=0.13,eRank=434.6,q75/q25=4.36 mlp_w2:H=0.9682,top10E=0.05,eRank=622.1,q75/q25=2.94 vo_prod:H=0.6835,top10E=0.10,eRank=211.7,q75/q25=inf train_time:394459ms step_avg:89.65ms +[2025-08-22 12:37:46] [Rank 0] step:4401/10000 train_time:394480ms step_avg:89.63ms +[2025-08-22 12:37:46] [Rank 0] step:4401/10000 train_time:394480ms step_avg:89.63ms +[2025-08-22 12:37:48] [Rank 0] step:4421/10000 train_time:396311ms step_avg:89.64ms +[2025-08-22 12:37:48] [Rank 0] step:4421/10000 train_time:396311ms step_avg:89.64ms +[2025-08-22 12:37:50] [Rank 0] step:4441/10000 train_time:398154ms step_avg:89.65ms +[2025-08-22 12:37:50] [Rank 0] step:4441/10000 train_time:398154ms step_avg:89.65ms +[2025-08-22 12:37:52] [Rank 0] step:4461/10000 train_time:400006ms step_avg:89.67ms +[2025-08-22 12:37:52] [Rank 0] step:4461/10000 train_time:400006ms step_avg:89.67ms +[2025-08-22 12:37:53] [Rank 0] step:4481/10000 train_time:401860ms step_avg:89.68ms +[2025-08-22 12:37:53] [Rank 0] step:4481/10000 train_time:401860ms step_avg:89.68ms +[2025-08-22 12:37:55] [Rank 0] step:4501/10000 train_time:403712ms step_avg:89.69ms +[2025-08-22 12:37:55] [Rank 0] step:4501/10000 train_time:403712ms step_avg:89.69ms +[2025-08-22 12:37:57] [Rank 0] step:4521/10000 train_time:405564ms step_avg:89.71ms +[2025-08-22 12:37:57] [Rank 0] step:4521/10000 train_time:405564ms step_avg:89.71ms +[2025-08-22 12:37:59] [Rank 0] step:4541/10000 train_time:407417ms step_avg:89.72ms +[2025-08-22 12:37:59] [Rank 0] step:4541/10000 train_time:407417ms step_avg:89.72ms +[2025-08-22 12:38:01] [Rank 0] step:4561/10000 train_time:409271ms step_avg:89.73ms +[2025-08-22 12:38:01] [Rank 0] step:4561/10000 train_time:409271ms step_avg:89.73ms +[2025-08-22 12:38:03] [Rank 0] step:4581/10000 train_time:411126ms step_avg:89.75ms +[2025-08-22 12:38:03] [Rank 0] step:4581/10000 train_time:411126ms step_avg:89.75ms +[2025-08-22 12:38:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:38:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:38:19] [Rank 0] PRINT: step:4600/10000 val_loss:3.8589 svd_entropy: attn_qk:H=0.7895,top10E=0.23,eRank=213.0,q75/q25=46.84 attn_vo:H=0.8300,top10E=0.06,eRank=392.3,q75/q25=inf mlp_w1:H=0.9134,top10E=0.13,eRank=437.2,q75/q25=4.34 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.94 vo_prod:H=0.6843,top10E=0.10,eRank=213.0,q75/q25=inf train_time:412995ms step_avg:89.78ms +[2025-08-22 12:38:19] [Rank 0] PRINT: step:4600/10000 val_loss:3.8589 svd_entropy: attn_qk:H=0.7895,top10E=0.23,eRank=213.0,q75/q25=46.84 attn_vo:H=0.8300,top10E=0.06,eRank=392.3,q75/q25=inf mlp_w1:H=0.9134,top10E=0.13,eRank=437.2,q75/q25=4.34 mlp_w2:H=0.9681,top10E=0.05,eRank=621.7,q75/q25=2.94 vo_prod:H=0.6843,top10E=0.10,eRank=213.0,q75/q25=inf train_time:412995ms step_avg:89.78ms +[2025-08-22 12:38:19] [Rank 0] step:4601/10000 train_time:413014ms step_avg:89.77ms +[2025-08-22 12:38:19] [Rank 0] step:4601/10000 train_time:413014ms step_avg:89.77ms +[2025-08-22 12:38:21] [Rank 0] step:4621/10000 train_time:414865ms step_avg:89.78ms +[2025-08-22 12:38:21] [Rank 0] step:4621/10000 train_time:414865ms step_avg:89.78ms +[2025-08-22 12:38:22] [Rank 0] step:4641/10000 train_time:416759ms step_avg:89.80ms +[2025-08-22 12:38:22] [Rank 0] step:4641/10000 train_time:416759ms step_avg:89.80ms +[2025-08-22 12:38:24] [Rank 0] step:4661/10000 train_time:418612ms step_avg:89.81ms +[2025-08-22 12:38:24] [Rank 0] step:4661/10000 train_time:418612ms step_avg:89.81ms +[2025-08-22 12:38:26] [Rank 0] step:4681/10000 train_time:420466ms step_avg:89.82ms +[2025-08-22 12:38:26] [Rank 0] step:4681/10000 train_time:420466ms step_avg:89.82ms +[2025-08-22 12:38:28] [Rank 0] step:4701/10000 train_time:422320ms step_avg:89.84ms +[2025-08-22 12:38:28] [Rank 0] step:4701/10000 train_time:422320ms step_avg:89.84ms +[2025-08-22 12:38:30] [Rank 0] step:4721/10000 train_time:424173ms step_avg:89.85ms +[2025-08-22 12:38:30] [Rank 0] step:4721/10000 train_time:424173ms step_avg:89.85ms +[2025-08-22 12:38:32] [Rank 0] step:4741/10000 train_time:426028ms step_avg:89.86ms +[2025-08-22 12:38:32] [Rank 0] step:4741/10000 train_time:426028ms step_avg:89.86ms +[2025-08-22 12:38:34] [Rank 0] step:4761/10000 train_time:427883ms step_avg:89.87ms +[2025-08-22 12:38:34] [Rank 0] step:4761/10000 train_time:427883ms step_avg:89.87ms +[2025-08-22 12:38:35] [Rank 0] step:4781/10000 train_time:429737ms step_avg:89.88ms +[2025-08-22 12:38:35] [Rank 0] step:4781/10000 train_time:429737ms step_avg:89.88ms +[2025-08-22 12:38:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:38:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:38:51] [Rank 0] PRINT: step:4800/10000 val_loss:3.8531 svd_entropy: attn_qk:H=0.7905,top10E=0.23,eRank=214.0,q75/q25=46.60 attn_vo:H=0.8302,top10E=0.06,eRank=393.0,q75/q25=inf mlp_w1:H=0.9142,top10E=0.13,eRank=439.6,q75/q25=4.32 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.95 vo_prod:H=0.6849,top10E=0.10,eRank=214.1,q75/q25=inf train_time:431607ms step_avg:89.92ms +[2025-08-22 12:38:51] [Rank 0] PRINT: step:4800/10000 val_loss:3.8531 svd_entropy: attn_qk:H=0.7905,top10E=0.23,eRank=214.0,q75/q25=46.60 attn_vo:H=0.8302,top10E=0.06,eRank=393.0,q75/q25=inf mlp_w1:H=0.9142,top10E=0.13,eRank=439.6,q75/q25=4.32 mlp_w2:H=0.9680,top10E=0.05,eRank=621.3,q75/q25=2.95 vo_prod:H=0.6849,top10E=0.10,eRank=214.1,q75/q25=inf train_time:431607ms step_avg:89.92ms +[2025-08-22 12:38:51] [Rank 0] step:4801/10000 train_time:431627ms step_avg:89.90ms +[2025-08-22 12:38:51] [Rank 0] step:4801/10000 train_time:431627ms step_avg:89.90ms +[2025-08-22 12:38:53] [Rank 0] step:4821/10000 train_time:433483ms step_avg:89.92ms +[2025-08-22 12:38:53] [Rank 0] step:4821/10000 train_time:433483ms step_avg:89.92ms +[2025-08-22 12:38:55] [Rank 0] step:4841/10000 train_time:435333ms step_avg:89.93ms +[2025-08-22 12:38:55] [Rank 0] step:4841/10000 train_time:435333ms step_avg:89.93ms +[2025-08-22 12:38:57] [Rank 0] step:4861/10000 train_time:437186ms step_avg:89.94ms +[2025-08-22 12:38:57] [Rank 0] step:4861/10000 train_time:437186ms step_avg:89.94ms +[2025-08-22 12:38:59] [Rank 0] step:4881/10000 train_time:439039ms step_avg:89.95ms +[2025-08-22 12:38:59] [Rank 0] step:4881/10000 train_time:439039ms step_avg:89.95ms +[2025-08-22 12:39:01] [Rank 0] step:4901/10000 train_time:440888ms step_avg:89.96ms +[2025-08-22 12:39:01] [Rank 0] step:4901/10000 train_time:440888ms step_avg:89.96ms +[2025-08-22 12:39:02] [Rank 0] step:4921/10000 train_time:442745ms step_avg:89.97ms +[2025-08-22 12:39:02] [Rank 0] step:4921/10000 train_time:442745ms step_avg:89.97ms +[2025-08-22 12:39:04] [Rank 0] step:4941/10000 train_time:444603ms step_avg:89.98ms +[2025-08-22 12:39:04] [Rank 0] step:4941/10000 train_time:444603ms step_avg:89.98ms +[2025-08-22 12:39:06] [Rank 0] step:4961/10000 train_time:446459ms step_avg:89.99ms +[2025-08-22 12:39:06] [Rank 0] step:4961/10000 train_time:446459ms step_avg:89.99ms +[2025-08-22 12:39:08] [Rank 0] step:4981/10000 train_time:448318ms step_avg:90.01ms +[2025-08-22 12:39:08] [Rank 0] step:4981/10000 train_time:448318ms step_avg:90.01ms +[2025-08-22 12:39:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:39:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:39:24] [Rank 0] PRINT: step:5000/10000 val_loss:3.8364 svd_entropy: attn_qk:H=0.7915,top10E=0.23,eRank=215.1,q75/q25=46.32 attn_vo:H=0.8304,top10E=0.06,eRank=393.6,q75/q25=inf mlp_w1:H=0.9150,top10E=0.12,eRank=441.8,q75/q25=4.30 mlp_w2:H=0.9679,top10E=0.05,eRank=620.9,q75/q25=2.95 vo_prod:H=0.6855,top10E=0.10,eRank=215.1,q75/q25=inf train_time:450192ms step_avg:90.04ms +[2025-08-22 12:39:24] [Rank 0] PRINT: step:5000/10000 val_loss:3.8364 svd_entropy: attn_qk:H=0.7915,top10E=0.23,eRank=215.1,q75/q25=46.32 attn_vo:H=0.8304,top10E=0.06,eRank=393.6,q75/q25=inf mlp_w1:H=0.9150,top10E=0.12,eRank=441.8,q75/q25=4.30 mlp_w2:H=0.9679,top10E=0.05,eRank=620.9,q75/q25=2.95 vo_prod:H=0.6855,top10E=0.10,eRank=215.1,q75/q25=inf train_time:450192ms step_avg:90.04ms +[2025-08-22 12:39:24] [Rank 0] step:5001/10000 train_time:450212ms step_avg:90.02ms +[2025-08-22 12:39:24] [Rank 0] step:5001/10000 train_time:450212ms step_avg:90.02ms +[2025-08-22 12:39:26] [Rank 0] step:5021/10000 train_time:452065ms step_avg:90.03ms +[2025-08-22 12:39:26] [Rank 0] step:5021/10000 train_time:452065ms step_avg:90.03ms +[2025-08-22 12:39:28] [Rank 0] step:5041/10000 train_time:453924ms step_avg:90.05ms +[2025-08-22 12:39:28] [Rank 0] step:5041/10000 train_time:453924ms step_avg:90.05ms +[2025-08-22 12:39:30] [Rank 0] step:5061/10000 train_time:455777ms step_avg:90.06ms +[2025-08-22 12:39:30] [Rank 0] step:5061/10000 train_time:455777ms step_avg:90.06ms +[2025-08-22 12:39:31] [Rank 0] step:5081/10000 train_time:457636ms step_avg:90.07ms +[2025-08-22 12:39:31] [Rank 0] step:5081/10000 train_time:457636ms step_avg:90.07ms +[2025-08-22 12:39:33] [Rank 0] step:5101/10000 train_time:459494ms step_avg:90.08ms +[2025-08-22 12:39:33] [Rank 0] step:5101/10000 train_time:459494ms step_avg:90.08ms +[2025-08-22 12:39:35] [Rank 0] step:5121/10000 train_time:461354ms step_avg:90.09ms +[2025-08-22 12:39:35] [Rank 0] step:5121/10000 train_time:461354ms step_avg:90.09ms +[2025-08-22 12:39:37] [Rank 0] step:5141/10000 train_time:463217ms step_avg:90.10ms +[2025-08-22 12:39:37] [Rank 0] step:5141/10000 train_time:463217ms step_avg:90.10ms +[2025-08-22 12:39:39] [Rank 0] step:5161/10000 train_time:465075ms step_avg:90.11ms +[2025-08-22 12:39:39] [Rank 0] step:5161/10000 train_time:465075ms step_avg:90.11ms +[2025-08-22 12:39:41] [Rank 0] step:5181/10000 train_time:466937ms step_avg:90.12ms +[2025-08-22 12:39:41] [Rank 0] step:5181/10000 train_time:466937ms step_avg:90.12ms +[2025-08-22 12:39:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:39:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:39:57] [Rank 0] PRINT: step:5200/10000 val_loss:3.8255 svd_entropy: attn_qk:H=0.7924,top10E=0.23,eRank=216.0,q75/q25=46.17 attn_vo:H=0.8306,top10E=0.06,eRank=394.1,q75/q25=inf mlp_w1:H=0.9158,top10E=0.12,eRank=444.0,q75/q25=4.28 mlp_w2:H=0.9678,top10E=0.05,eRank=620.5,q75/q25=2.96 vo_prod:H=0.6860,top10E=0.10,eRank=216.0,q75/q25=inf train_time:468836ms step_avg:90.16ms +[2025-08-22 12:39:57] [Rank 0] PRINT: step:5200/10000 val_loss:3.8255 svd_entropy: attn_qk:H=0.7924,top10E=0.23,eRank=216.0,q75/q25=46.17 attn_vo:H=0.8306,top10E=0.06,eRank=394.1,q75/q25=inf mlp_w1:H=0.9158,top10E=0.12,eRank=444.0,q75/q25=4.28 mlp_w2:H=0.9678,top10E=0.05,eRank=620.5,q75/q25=2.96 vo_prod:H=0.6860,top10E=0.10,eRank=216.0,q75/q25=inf train_time:468836ms step_avg:90.16ms +[2025-08-22 12:39:57] [Rank 0] step:5201/10000 train_time:468858ms step_avg:90.15ms +[2025-08-22 12:39:57] [Rank 0] step:5201/10000 train_time:468858ms step_avg:90.15ms +[2025-08-22 12:39:59] [Rank 0] step:5221/10000 train_time:470738ms step_avg:90.16ms +[2025-08-22 12:39:59] [Rank 0] step:5221/10000 train_time:470738ms step_avg:90.16ms +[2025-08-22 12:40:01] [Rank 0] step:5241/10000 train_time:472623ms step_avg:90.18ms +[2025-08-22 12:40:01] [Rank 0] step:5241/10000 train_time:472623ms step_avg:90.18ms +[2025-08-22 12:40:02] [Rank 0] step:5261/10000 train_time:474507ms step_avg:90.19ms +[2025-08-22 12:40:02] [Rank 0] step:5261/10000 train_time:474507ms step_avg:90.19ms +[2025-08-22 12:40:04] [Rank 0] step:5281/10000 train_time:476393ms step_avg:90.21ms +[2025-08-22 12:40:04] [Rank 0] step:5281/10000 train_time:476393ms step_avg:90.21ms +[2025-08-22 12:40:06] [Rank 0] step:5301/10000 train_time:478285ms step_avg:90.23ms +[2025-08-22 12:40:06] [Rank 0] step:5301/10000 train_time:478285ms step_avg:90.23ms +[2025-08-22 12:40:08] [Rank 0] step:5321/10000 train_time:480171ms step_avg:90.24ms +[2025-08-22 12:40:08] [Rank 0] step:5321/10000 train_time:480171ms step_avg:90.24ms +[2025-08-22 12:40:10] [Rank 0] step:5341/10000 train_time:482058ms step_avg:90.26ms +[2025-08-22 12:40:10] [Rank 0] step:5341/10000 train_time:482058ms step_avg:90.26ms +[2025-08-22 12:40:12] [Rank 0] step:5361/10000 train_time:483946ms step_avg:90.27ms +[2025-08-22 12:40:12] [Rank 0] step:5361/10000 train_time:483946ms step_avg:90.27ms +[2025-08-22 12:40:14] [Rank 0] step:5381/10000 train_time:485834ms step_avg:90.29ms +[2025-08-22 12:40:14] [Rank 0] step:5381/10000 train_time:485834ms step_avg:90.29ms +[2025-08-22 12:40:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:40:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:40:30] [Rank 0] PRINT: step:5400/10000 val_loss:3.8118 svd_entropy: attn_qk:H=0.7931,top10E=0.23,eRank=216.8,q75/q25=45.86 attn_vo:H=0.8308,top10E=0.06,eRank=394.6,q75/q25=inf mlp_w1:H=0.9164,top10E=0.12,eRank=446.0,q75/q25=4.26 mlp_w2:H=0.9677,top10E=0.05,eRank=620.2,q75/q25=2.96 vo_prod:H=0.6866,top10E=0.10,eRank=216.9,q75/q25=inf train_time:487733ms step_avg:90.32ms +[2025-08-22 12:40:30] [Rank 0] PRINT: step:5400/10000 val_loss:3.8118 svd_entropy: attn_qk:H=0.7931,top10E=0.23,eRank=216.8,q75/q25=45.86 attn_vo:H=0.8308,top10E=0.06,eRank=394.6,q75/q25=inf mlp_w1:H=0.9164,top10E=0.12,eRank=446.0,q75/q25=4.26 mlp_w2:H=0.9677,top10E=0.05,eRank=620.2,q75/q25=2.96 vo_prod:H=0.6866,top10E=0.10,eRank=216.9,q75/q25=inf train_time:487733ms step_avg:90.32ms +[2025-08-22 12:40:30] [Rank 0] step:5401/10000 train_time:487753ms step_avg:90.31ms +[2025-08-22 12:40:30] [Rank 0] step:5401/10000 train_time:487753ms step_avg:90.31ms +[2025-08-22 12:40:32] [Rank 0] step:5421/10000 train_time:489620ms step_avg:90.32ms +[2025-08-22 12:40:32] [Rank 0] step:5421/10000 train_time:489620ms step_avg:90.32ms +[2025-08-22 12:40:33] [Rank 0] step:5441/10000 train_time:491500ms step_avg:90.33ms +[2025-08-22 12:40:33] [Rank 0] step:5441/10000 train_time:491500ms step_avg:90.33ms +[2025-08-22 12:40:35] [Rank 0] step:5461/10000 train_time:493388ms step_avg:90.35ms +[2025-08-22 12:40:35] [Rank 0] step:5461/10000 train_time:493388ms step_avg:90.35ms +[2025-08-22 12:40:37] [Rank 0] step:5481/10000 train_time:495274ms step_avg:90.36ms +[2025-08-22 12:40:37] [Rank 0] step:5481/10000 train_time:495274ms step_avg:90.36ms +[2025-08-22 12:40:39] [Rank 0] step:5501/10000 train_time:497169ms step_avg:90.38ms +[2025-08-22 12:40:39] [Rank 0] step:5501/10000 train_time:497169ms step_avg:90.38ms +[2025-08-22 12:40:41] [Rank 0] step:5521/10000 train_time:499061ms step_avg:90.39ms +[2025-08-22 12:40:41] [Rank 0] step:5521/10000 train_time:499061ms step_avg:90.39ms +[2025-08-22 12:40:43] [Rank 0] step:5541/10000 train_time:500951ms step_avg:90.41ms +[2025-08-22 12:40:43] [Rank 0] step:5541/10000 train_time:500951ms step_avg:90.41ms +[2025-08-22 12:40:45] [Rank 0] step:5561/10000 train_time:502839ms step_avg:90.42ms +[2025-08-22 12:40:45] [Rank 0] step:5561/10000 train_time:502839ms step_avg:90.42ms +[2025-08-22 12:40:47] [Rank 0] step:5581/10000 train_time:504728ms step_avg:90.44ms +[2025-08-22 12:40:47] [Rank 0] step:5581/10000 train_time:504728ms step_avg:90.44ms +[2025-08-22 12:40:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:40:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:41:02] [Rank 0] PRINT: step:5600/10000 val_loss:3.8045 svd_entropy: attn_qk:H=0.7938,top10E=0.23,eRank=217.6,q75/q25=45.67 attn_vo:H=0.8309,top10E=0.06,eRank=395.0,q75/q25=inf mlp_w1:H=0.9171,top10E=0.12,eRank=447.9,q75/q25=4.25 mlp_w2:H=0.9676,top10E=0.05,eRank=619.7,q75/q25=2.97 vo_prod:H=0.6871,top10E=0.10,eRank=217.8,q75/q25=inf train_time:506633ms step_avg:90.47ms +[2025-08-22 12:41:02] [Rank 0] PRINT: step:5600/10000 val_loss:3.8045 svd_entropy: attn_qk:H=0.7938,top10E=0.23,eRank=217.6,q75/q25=45.67 attn_vo:H=0.8309,top10E=0.06,eRank=395.0,q75/q25=inf mlp_w1:H=0.9171,top10E=0.12,eRank=447.9,q75/q25=4.25 mlp_w2:H=0.9676,top10E=0.05,eRank=619.7,q75/q25=2.97 vo_prod:H=0.6871,top10E=0.10,eRank=217.8,q75/q25=inf train_time:506633ms step_avg:90.47ms +[2025-08-22 12:41:02] [Rank 0] step:5601/10000 train_time:506653ms step_avg:90.46ms +[2025-08-22 12:41:02] [Rank 0] step:5601/10000 train_time:506653ms step_avg:90.46ms +[2025-08-22 12:41:04] [Rank 0] step:5621/10000 train_time:508547ms step_avg:90.47ms +[2025-08-22 12:41:04] [Rank 0] step:5621/10000 train_time:508547ms step_avg:90.47ms +[2025-08-22 12:41:06] [Rank 0] step:5641/10000 train_time:510436ms step_avg:90.49ms +[2025-08-22 12:41:06] [Rank 0] step:5641/10000 train_time:510436ms step_avg:90.49ms +[2025-08-22 12:41:08] [Rank 0] step:5661/10000 train_time:512324ms step_avg:90.50ms +[2025-08-22 12:41:08] [Rank 0] step:5661/10000 train_time:512324ms step_avg:90.50ms +[2025-08-22 12:41:10] [Rank 0] step:5681/10000 train_time:514218ms step_avg:90.52ms +[2025-08-22 12:41:10] [Rank 0] step:5681/10000 train_time:514218ms step_avg:90.52ms +[2025-08-22 12:41:12] [Rank 0] step:5701/10000 train_time:516109ms step_avg:90.53ms +[2025-08-22 12:41:12] [Rank 0] step:5701/10000 train_time:516109ms step_avg:90.53ms +[2025-08-22 12:41:14] [Rank 0] step:5721/10000 train_time:518007ms step_avg:90.54ms +[2025-08-22 12:41:14] [Rank 0] step:5721/10000 train_time:518007ms step_avg:90.54ms +[2025-08-22 12:41:16] [Rank 0] step:5741/10000 train_time:519896ms step_avg:90.56ms +[2025-08-22 12:41:16] [Rank 0] step:5741/10000 train_time:519896ms step_avg:90.56ms +[2025-08-22 12:41:18] [Rank 0] step:5761/10000 train_time:521791ms step_avg:90.57ms +[2025-08-22 12:41:18] [Rank 0] step:5761/10000 train_time:521791ms step_avg:90.57ms +[2025-08-22 12:41:19] [Rank 0] step:5781/10000 train_time:523687ms step_avg:90.59ms +[2025-08-22 12:41:19] [Rank 0] step:5781/10000 train_time:523687ms step_avg:90.59ms +[2025-08-22 12:41:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:41:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:41:35] [Rank 0] PRINT: step:5800/10000 val_loss:3.7985 svd_entropy: attn_qk:H=0.7947,top10E=0.23,eRank=218.5,q75/q25=45.36 attn_vo:H=0.8311,top10E=0.06,eRank=395.4,q75/q25=inf mlp_w1:H=0.9177,top10E=0.12,eRank=449.8,q75/q25=4.23 mlp_w2:H=0.9675,top10E=0.05,eRank=619.4,q75/q25=2.97 vo_prod:H=0.6876,top10E=0.10,eRank=218.7,q75/q25=inf train_time:525595ms step_avg:90.62ms +[2025-08-22 12:41:35] [Rank 0] PRINT: step:5800/10000 val_loss:3.7985 svd_entropy: attn_qk:H=0.7947,top10E=0.23,eRank=218.5,q75/q25=45.36 attn_vo:H=0.8311,top10E=0.06,eRank=395.4,q75/q25=inf mlp_w1:H=0.9177,top10E=0.12,eRank=449.8,q75/q25=4.23 mlp_w2:H=0.9675,top10E=0.05,eRank=619.4,q75/q25=2.97 vo_prod:H=0.6876,top10E=0.10,eRank=218.7,q75/q25=inf train_time:525595ms step_avg:90.62ms +[2025-08-22 12:41:35] [Rank 0] step:5801/10000 train_time:525613ms step_avg:90.61ms +[2025-08-22 12:41:35] [Rank 0] step:5801/10000 train_time:525613ms step_avg:90.61ms +[2025-08-22 12:41:37] [Rank 0] step:5821/10000 train_time:527499ms step_avg:90.62ms +[2025-08-22 12:41:37] [Rank 0] step:5821/10000 train_time:527499ms step_avg:90.62ms +[2025-08-22 12:41:39] [Rank 0] step:5841/10000 train_time:529381ms step_avg:90.63ms +[2025-08-22 12:41:39] [Rank 0] step:5841/10000 train_time:529381ms step_avg:90.63ms +[2025-08-22 12:41:41] [Rank 0] step:5861/10000 train_time:531271ms step_avg:90.65ms +[2025-08-22 12:41:41] [Rank 0] step:5861/10000 train_time:531271ms step_avg:90.65ms +[2025-08-22 12:41:43] [Rank 0] step:5881/10000 train_time:533159ms step_avg:90.66ms +[2025-08-22 12:41:43] [Rank 0] step:5881/10000 train_time:533159ms step_avg:90.66ms +[2025-08-22 12:41:45] [Rank 0] step:5901/10000 train_time:535049ms step_avg:90.67ms +[2025-08-22 12:41:45] [Rank 0] step:5901/10000 train_time:535049ms step_avg:90.67ms +[2025-08-22 12:41:46] [Rank 0] step:5921/10000 train_time:536937ms step_avg:90.68ms +[2025-08-22 12:41:46] [Rank 0] step:5921/10000 train_time:536937ms step_avg:90.68ms +[2025-08-22 12:41:48] [Rank 0] step:5941/10000 train_time:538832ms step_avg:90.70ms +[2025-08-22 12:41:48] [Rank 0] step:5941/10000 train_time:538832ms step_avg:90.70ms +[2025-08-22 12:41:50] [Rank 0] step:5961/10000 train_time:540724ms step_avg:90.71ms +[2025-08-22 12:41:50] [Rank 0] step:5961/10000 train_time:540724ms step_avg:90.71ms +[2025-08-22 12:41:52] [Rank 0] step:5981/10000 train_time:542614ms step_avg:90.72ms +[2025-08-22 12:41:52] [Rank 0] step:5981/10000 train_time:542614ms step_avg:90.72ms +[2025-08-22 12:41:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:41:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:42:08] [Rank 0] PRINT: step:6000/10000 val_loss:3.7777 svd_entropy: attn_qk:H=0.7954,top10E=0.23,eRank=219.4,q75/q25=45.15 attn_vo:H=0.8312,top10E=0.06,eRank=395.8,q75/q25=inf mlp_w1:H=0.9183,top10E=0.12,eRank=451.5,q75/q25=4.21 mlp_w2:H=0.9674,top10E=0.05,eRank=619.0,q75/q25=2.98 vo_prod:H=0.6880,top10E=0.10,eRank=219.5,q75/q25=inf train_time:544518ms step_avg:90.75ms +[2025-08-22 12:42:08] [Rank 0] PRINT: step:6000/10000 val_loss:3.7777 svd_entropy: attn_qk:H=0.7954,top10E=0.23,eRank=219.4,q75/q25=45.15 attn_vo:H=0.8312,top10E=0.06,eRank=395.8,q75/q25=inf mlp_w1:H=0.9183,top10E=0.12,eRank=451.5,q75/q25=4.21 mlp_w2:H=0.9674,top10E=0.05,eRank=619.0,q75/q25=2.98 vo_prod:H=0.6880,top10E=0.10,eRank=219.5,q75/q25=inf train_time:544518ms step_avg:90.75ms +[2025-08-22 12:42:08] [Rank 0] step:6001/10000 train_time:544538ms step_avg:90.74ms +[2025-08-22 12:42:08] [Rank 0] step:6001/10000 train_time:544538ms step_avg:90.74ms +[2025-08-22 12:42:10] [Rank 0] step:6021/10000 train_time:546426ms step_avg:90.75ms +[2025-08-22 12:42:10] [Rank 0] step:6021/10000 train_time:546426ms step_avg:90.75ms +[2025-08-22 12:42:12] [Rank 0] step:6041/10000 train_time:548317ms step_avg:90.77ms +[2025-08-22 12:42:12] [Rank 0] step:6041/10000 train_time:548317ms step_avg:90.77ms +[2025-08-22 12:42:14] [Rank 0] step:6061/10000 train_time:550208ms step_avg:90.78ms +[2025-08-22 12:42:14] [Rank 0] step:6061/10000 train_time:550208ms step_avg:90.78ms +[2025-08-22 12:42:15] [Rank 0] step:6081/10000 train_time:552096ms step_avg:90.79ms +[2025-08-22 12:42:15] [Rank 0] step:6081/10000 train_time:552096ms step_avg:90.79ms +[2025-08-22 12:42:17] [Rank 0] step:6101/10000 train_time:553994ms step_avg:90.80ms +[2025-08-22 12:42:17] [Rank 0] step:6101/10000 train_time:553994ms step_avg:90.80ms +[2025-08-22 12:42:19] [Rank 0] step:6121/10000 train_time:556145ms step_avg:90.86ms +[2025-08-22 12:42:19] [Rank 0] step:6121/10000 train_time:556145ms step_avg:90.86ms +[2025-08-22 12:42:21] [Rank 0] step:6141/10000 train_time:558044ms step_avg:90.87ms +[2025-08-22 12:42:21] [Rank 0] step:6141/10000 train_time:558044ms step_avg:90.87ms +[2025-08-22 12:42:23] [Rank 0] step:6161/10000 train_time:559936ms step_avg:90.88ms +[2025-08-22 12:42:23] [Rank 0] step:6161/10000 train_time:559936ms step_avg:90.88ms +[2025-08-22 12:42:25] [Rank 0] step:6181/10000 train_time:561827ms step_avg:90.90ms +[2025-08-22 12:42:25] [Rank 0] step:6181/10000 train_time:561827ms step_avg:90.90ms +[2025-08-22 12:42:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:42:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:42:41] [Rank 0] PRINT: step:6200/10000 val_loss:3.7653 svd_entropy: attn_qk:H=0.7962,top10E=0.23,eRank=220.2,q75/q25=44.84 attn_vo:H=0.8314,top10E=0.06,eRank=396.2,q75/q25=inf mlp_w1:H=0.9189,top10E=0.12,eRank=453.2,q75/q25=4.20 mlp_w2:H=0.9673,top10E=0.05,eRank=618.6,q75/q25=2.98 vo_prod:H=0.6884,top10E=0.10,eRank=220.2,q75/q25=inf train_time:563732ms step_avg:90.92ms +[2025-08-22 12:42:41] [Rank 0] PRINT: step:6200/10000 val_loss:3.7653 svd_entropy: attn_qk:H=0.7962,top10E=0.23,eRank=220.2,q75/q25=44.84 attn_vo:H=0.8314,top10E=0.06,eRank=396.2,q75/q25=inf mlp_w1:H=0.9189,top10E=0.12,eRank=453.2,q75/q25=4.20 mlp_w2:H=0.9673,top10E=0.05,eRank=618.6,q75/q25=2.98 vo_prod:H=0.6884,top10E=0.10,eRank=220.2,q75/q25=inf train_time:563732ms step_avg:90.92ms +[2025-08-22 12:42:41] [Rank 0] step:6201/10000 train_time:563752ms step_avg:90.91ms +[2025-08-22 12:42:41] [Rank 0] step:6201/10000 train_time:563752ms step_avg:90.91ms +[2025-08-22 12:42:43] [Rank 0] step:6221/10000 train_time:565626ms step_avg:90.92ms +[2025-08-22 12:42:43] [Rank 0] step:6221/10000 train_time:565626ms step_avg:90.92ms +[2025-08-22 12:42:45] [Rank 0] step:6241/10000 train_time:567512ms step_avg:90.93ms +[2025-08-22 12:42:45] [Rank 0] step:6241/10000 train_time:567512ms step_avg:90.93ms +[2025-08-22 12:42:47] [Rank 0] step:6261/10000 train_time:569403ms step_avg:90.94ms +[2025-08-22 12:42:47] [Rank 0] step:6261/10000 train_time:569403ms step_avg:90.94ms +[2025-08-22 12:42:48] [Rank 0] step:6281/10000 train_time:571298ms step_avg:90.96ms +[2025-08-22 12:42:48] [Rank 0] step:6281/10000 train_time:571298ms step_avg:90.96ms +[2025-08-22 12:42:50] [Rank 0] step:6301/10000 train_time:573192ms step_avg:90.97ms +[2025-08-22 12:42:50] [Rank 0] step:6301/10000 train_time:573192ms step_avg:90.97ms +[2025-08-22 12:42:52] [Rank 0] step:6321/10000 train_time:575087ms step_avg:90.98ms +[2025-08-22 12:42:52] [Rank 0] step:6321/10000 train_time:575087ms step_avg:90.98ms +[2025-08-22 12:42:54] [Rank 0] step:6341/10000 train_time:576980ms step_avg:90.99ms +[2025-08-22 12:42:54] [Rank 0] step:6341/10000 train_time:576980ms step_avg:90.99ms +[2025-08-22 12:42:56] [Rank 0] step:6361/10000 train_time:578884ms step_avg:91.01ms +[2025-08-22 12:42:56] [Rank 0] step:6361/10000 train_time:578884ms step_avg:91.01ms +[2025-08-22 12:42:58] [Rank 0] step:6381/10000 train_time:580780ms step_avg:91.02ms +[2025-08-22 12:42:58] [Rank 0] step:6381/10000 train_time:580780ms step_avg:91.02ms +[2025-08-22 12:43:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:43:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:43:14] [Rank 0] PRINT: step:6400/10000 val_loss:3.7509 svd_entropy: attn_qk:H=0.7968,top10E=0.23,eRank=220.9,q75/q25=44.79 attn_vo:H=0.8315,top10E=0.06,eRank=396.5,q75/q25=inf mlp_w1:H=0.9194,top10E=0.12,eRank=454.8,q75/q25=4.19 mlp_w2:H=0.9672,top10E=0.05,eRank=618.3,q75/q25=2.99 vo_prod:H=0.6888,top10E=0.10,eRank=220.8,q75/q25=inf train_time:582688ms step_avg:91.04ms +[2025-08-22 12:43:14] [Rank 0] PRINT: step:6400/10000 val_loss:3.7509 svd_entropy: attn_qk:H=0.7968,top10E=0.23,eRank=220.9,q75/q25=44.79 attn_vo:H=0.8315,top10E=0.06,eRank=396.5,q75/q25=inf mlp_w1:H=0.9194,top10E=0.12,eRank=454.8,q75/q25=4.19 mlp_w2:H=0.9672,top10E=0.05,eRank=618.3,q75/q25=2.99 vo_prod:H=0.6888,top10E=0.10,eRank=220.8,q75/q25=inf train_time:582688ms step_avg:91.04ms +[2025-08-22 12:43:14] [Rank 0] step:6401/10000 train_time:582707ms step_avg:91.03ms +[2025-08-22 12:43:14] [Rank 0] step:6401/10000 train_time:582707ms step_avg:91.03ms +[2025-08-22 12:43:16] [Rank 0] step:6421/10000 train_time:584598ms step_avg:91.04ms +[2025-08-22 12:43:16] [Rank 0] step:6421/10000 train_time:584598ms step_avg:91.04ms +[2025-08-22 12:43:17] [Rank 0] step:6441/10000 train_time:586492ms step_avg:91.06ms +[2025-08-22 12:43:17] [Rank 0] step:6441/10000 train_time:586492ms step_avg:91.06ms +[2025-08-22 12:43:19] [Rank 0] step:6461/10000 train_time:588391ms step_avg:91.07ms +[2025-08-22 12:43:19] [Rank 0] step:6461/10000 train_time:588391ms step_avg:91.07ms +[2025-08-22 12:43:21] [Rank 0] step:6481/10000 train_time:590294ms step_avg:91.08ms +[2025-08-22 12:43:21] [Rank 0] step:6481/10000 train_time:590294ms step_avg:91.08ms +[2025-08-22 12:43:23] [Rank 0] step:6501/10000 train_time:592185ms step_avg:91.09ms +[2025-08-22 12:43:23] [Rank 0] step:6501/10000 train_time:592185ms step_avg:91.09ms +[2025-08-22 12:43:25] [Rank 0] step:6521/10000 train_time:594081ms step_avg:91.10ms +[2025-08-22 12:43:25] [Rank 0] step:6521/10000 train_time:594081ms step_avg:91.10ms +[2025-08-22 12:43:27] [Rank 0] step:6541/10000 train_time:595980ms step_avg:91.11ms +[2025-08-22 12:43:27] [Rank 0] step:6541/10000 train_time:595980ms step_avg:91.11ms +[2025-08-22 12:43:29] [Rank 0] step:6561/10000 train_time:597878ms step_avg:91.13ms +[2025-08-22 12:43:29] [Rank 0] step:6561/10000 train_time:597878ms step_avg:91.13ms +[2025-08-22 12:43:31] [Rank 0] step:6581/10000 train_time:599771ms step_avg:91.14ms +[2025-08-22 12:43:31] [Rank 0] step:6581/10000 train_time:599771ms step_avg:91.14ms +[2025-08-22 12:43:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:43:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:43:46] [Rank 0] PRINT: step:6600/10000 val_loss:3.7391 svd_entropy: attn_qk:H=0.7975,top10E=0.22,eRank=221.6,q75/q25=44.68 attn_vo:H=0.8316,top10E=0.06,eRank=396.8,q75/q25=inf mlp_w1:H=0.9199,top10E=0.12,eRank=456.3,q75/q25=4.18 mlp_w2:H=0.9671,top10E=0.05,eRank=618.0,q75/q25=3.00 vo_prod:H=0.6892,top10E=0.10,eRank=221.5,q75/q25=inf train_time:601683ms step_avg:91.16ms +[2025-08-22 12:43:46] [Rank 0] PRINT: step:6600/10000 val_loss:3.7391 svd_entropy: attn_qk:H=0.7975,top10E=0.22,eRank=221.6,q75/q25=44.68 attn_vo:H=0.8316,top10E=0.06,eRank=396.8,q75/q25=inf mlp_w1:H=0.9199,top10E=0.12,eRank=456.3,q75/q25=4.18 mlp_w2:H=0.9671,top10E=0.05,eRank=618.0,q75/q25=3.00 vo_prod:H=0.6892,top10E=0.10,eRank=221.5,q75/q25=inf train_time:601683ms step_avg:91.16ms +[2025-08-22 12:43:46] [Rank 0] step:6601/10000 train_time:601702ms step_avg:91.15ms +[2025-08-22 12:43:46] [Rank 0] step:6601/10000 train_time:601702ms step_avg:91.15ms +[2025-08-22 12:43:48] [Rank 0] step:6621/10000 train_time:603571ms step_avg:91.16ms +[2025-08-22 12:43:48] [Rank 0] step:6621/10000 train_time:603571ms step_avg:91.16ms +[2025-08-22 12:43:50] [Rank 0] step:6641/10000 train_time:605472ms step_avg:91.17ms +[2025-08-22 12:43:50] [Rank 0] step:6641/10000 train_time:605472ms step_avg:91.17ms +[2025-08-22 12:43:52] [Rank 0] step:6661/10000 train_time:607364ms step_avg:91.18ms +[2025-08-22 12:43:52] [Rank 0] step:6661/10000 train_time:607364ms step_avg:91.18ms +[2025-08-22 12:43:54] [Rank 0] step:6681/10000 train_time:609274ms step_avg:91.20ms +[2025-08-22 12:43:54] [Rank 0] step:6681/10000 train_time:609274ms step_avg:91.20ms +[2025-08-22 12:43:56] [Rank 0] step:6701/10000 train_time:611203ms step_avg:91.21ms +[2025-08-22 12:43:56] [Rank 0] step:6701/10000 train_time:611203ms step_avg:91.21ms +[2025-08-22 12:43:58] [Rank 0] step:6721/10000 train_time:613126ms step_avg:91.23ms +[2025-08-22 12:43:58] [Rank 0] step:6721/10000 train_time:613126ms step_avg:91.23ms +[2025-08-22 12:44:00] [Rank 0] step:6741/10000 train_time:615047ms step_avg:91.24ms +[2025-08-22 12:44:00] [Rank 0] step:6741/10000 train_time:615047ms step_avg:91.24ms +[2025-08-22 12:44:02] [Rank 0] step:6761/10000 train_time:616969ms step_avg:91.25ms +[2025-08-22 12:44:02] [Rank 0] step:6761/10000 train_time:616969ms step_avg:91.25ms +[2025-08-22 12:44:04] [Rank 0] step:6781/10000 train_time:618895ms step_avg:91.27ms +[2025-08-22 12:44:04] [Rank 0] step:6781/10000 train_time:618895ms step_avg:91.27ms +[2025-08-22 12:44:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:44:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:44:19] [Rank 0] PRINT: step:6800/10000 val_loss:3.7212 svd_entropy: attn_qk:H=0.7980,top10E=0.22,eRank=222.1,q75/q25=44.50 attn_vo:H=0.8317,top10E=0.06,eRank=397.1,q75/q25=inf mlp_w1:H=0.9204,top10E=0.12,eRank=457.7,q75/q25=4.17 mlp_w2:H=0.9670,top10E=0.05,eRank=617.6,q75/q25=3.00 vo_prod:H=0.6895,top10E=0.10,eRank=222.1,q75/q25=inf train_time:620925ms step_avg:91.31ms +[2025-08-22 12:44:19] [Rank 0] PRINT: step:6800/10000 val_loss:3.7212 svd_entropy: attn_qk:H=0.7980,top10E=0.22,eRank=222.1,q75/q25=44.50 attn_vo:H=0.8317,top10E=0.06,eRank=397.1,q75/q25=inf mlp_w1:H=0.9204,top10E=0.12,eRank=457.7,q75/q25=4.17 mlp_w2:H=0.9670,top10E=0.05,eRank=617.6,q75/q25=3.00 vo_prod:H=0.6895,top10E=0.10,eRank=222.1,q75/q25=inf train_time:620925ms step_avg:91.31ms +[2025-08-22 12:44:20] [Rank 0] step:6801/10000 train_time:620945ms step_avg:91.30ms +[2025-08-22 12:44:20] [Rank 0] step:6801/10000 train_time:620945ms step_avg:91.30ms +[2025-08-22 12:44:21] [Rank 0] step:6821/10000 train_time:622842ms step_avg:91.31ms +[2025-08-22 12:44:21] [Rank 0] step:6821/10000 train_time:622842ms step_avg:91.31ms +[2025-08-22 12:44:23] [Rank 0] step:6841/10000 train_time:624760ms step_avg:91.33ms +[2025-08-22 12:44:23] [Rank 0] step:6841/10000 train_time:624760ms step_avg:91.33ms +[2025-08-22 12:44:25] [Rank 0] step:6861/10000 train_time:626675ms step_avg:91.34ms +[2025-08-22 12:44:25] [Rank 0] step:6861/10000 train_time:626675ms step_avg:91.34ms +[2025-08-22 12:44:27] [Rank 0] step:6881/10000 train_time:628599ms step_avg:91.35ms +[2025-08-22 12:44:27] [Rank 0] step:6881/10000 train_time:628599ms step_avg:91.35ms +[2025-08-22 12:44:29] [Rank 0] step:6901/10000 train_time:630517ms step_avg:91.37ms +[2025-08-22 12:44:29] [Rank 0] step:6901/10000 train_time:630517ms step_avg:91.37ms +[2025-08-22 12:44:31] [Rank 0] step:6921/10000 train_time:632432ms step_avg:91.38ms +[2025-08-22 12:44:31] [Rank 0] step:6921/10000 train_time:632432ms step_avg:91.38ms +[2025-08-22 12:44:33] [Rank 0] step:6941/10000 train_time:634359ms step_avg:91.39ms +[2025-08-22 12:44:33] [Rank 0] step:6941/10000 train_time:634359ms step_avg:91.39ms +[2025-08-22 12:44:35] [Rank 0] step:6961/10000 train_time:636294ms step_avg:91.41ms +[2025-08-22 12:44:35] [Rank 0] step:6961/10000 train_time:636294ms step_avg:91.41ms +[2025-08-22 12:44:37] [Rank 0] step:6981/10000 train_time:638220ms step_avg:91.42ms +[2025-08-22 12:44:37] [Rank 0] step:6981/10000 train_time:638220ms step_avg:91.42ms +[2025-08-22 12:44:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:44:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:44:52] [Rank 0] PRINT: step:7000/10000 val_loss:3.7038 svd_entropy: attn_qk:H=0.7984,top10E=0.22,eRank=222.7,q75/q25=44.18 attn_vo:H=0.8317,top10E=0.06,eRank=397.3,q75/q25=inf mlp_w1:H=0.9208,top10E=0.12,eRank=459.0,q75/q25=4.16 mlp_w2:H=0.9670,top10E=0.05,eRank=617.3,q75/q25=3.01 vo_prod:H=0.6898,top10E=0.10,eRank=222.6,q75/q25=inf train_time:640160ms step_avg:91.45ms +[2025-08-22 12:44:52] [Rank 0] PRINT: step:7000/10000 val_loss:3.7038 svd_entropy: attn_qk:H=0.7984,top10E=0.22,eRank=222.7,q75/q25=44.18 attn_vo:H=0.8317,top10E=0.06,eRank=397.3,q75/q25=inf mlp_w1:H=0.9208,top10E=0.12,eRank=459.0,q75/q25=4.16 mlp_w2:H=0.9670,top10E=0.05,eRank=617.3,q75/q25=3.01 vo_prod:H=0.6898,top10E=0.10,eRank=222.6,q75/q25=inf train_time:640160ms step_avg:91.45ms +[2025-08-22 12:44:53] [Rank 0] step:7001/10000 train_time:640180ms step_avg:91.44ms +[2025-08-22 12:44:53] [Rank 0] step:7001/10000 train_time:640180ms step_avg:91.44ms +[2025-08-22 12:44:54] [Rank 0] step:7021/10000 train_time:642088ms step_avg:91.45ms +[2025-08-22 12:44:54] [Rank 0] step:7021/10000 train_time:642088ms step_avg:91.45ms +[2025-08-22 12:44:56] [Rank 0] step:7041/10000 train_time:644009ms step_avg:91.47ms +[2025-08-22 12:44:56] [Rank 0] step:7041/10000 train_time:644009ms step_avg:91.47ms +[2025-08-22 12:44:58] [Rank 0] step:7061/10000 train_time:645928ms step_avg:91.48ms +[2025-08-22 12:44:58] [Rank 0] step:7061/10000 train_time:645928ms step_avg:91.48ms +[2025-08-22 12:45:00] [Rank 0] step:7081/10000 train_time:647845ms step_avg:91.49ms +[2025-08-22 12:45:00] [Rank 0] step:7081/10000 train_time:647845ms step_avg:91.49ms +[2025-08-22 12:45:02] [Rank 0] step:7101/10000 train_time:649774ms step_avg:91.50ms +[2025-08-22 12:45:02] [Rank 0] step:7101/10000 train_time:649774ms step_avg:91.50ms +[2025-08-22 12:45:04] [Rank 0] step:7121/10000 train_time:651695ms step_avg:91.52ms +[2025-08-22 12:45:04] [Rank 0] step:7121/10000 train_time:651695ms step_avg:91.52ms +[2025-08-22 12:45:06] [Rank 0] step:7141/10000 train_time:653619ms step_avg:91.53ms +[2025-08-22 12:45:06] [Rank 0] step:7141/10000 train_time:653619ms step_avg:91.53ms +[2025-08-22 12:45:08] [Rank 0] step:7161/10000 train_time:655611ms step_avg:91.55ms +[2025-08-22 12:45:08] [Rank 0] step:7161/10000 train_time:655611ms step_avg:91.55ms +[2025-08-22 12:45:10] [Rank 0] step:7181/10000 train_time:657594ms step_avg:91.57ms +[2025-08-22 12:45:10] [Rank 0] step:7181/10000 train_time:657594ms step_avg:91.57ms +[2025-08-22 12:45:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:45:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:45:25] [Rank 0] PRINT: step:7200/10000 val_loss:3.6942 svd_entropy: attn_qk:H=0.7989,top10E=0.22,eRank=223.2,q75/q25=44.17 attn_vo:H=0.8318,top10E=0.06,eRank=397.6,q75/q25=inf mlp_w1:H=0.9212,top10E=0.12,eRank=460.1,q75/q25=4.15 mlp_w2:H=0.9669,top10E=0.05,eRank=617.1,q75/q25=3.01 vo_prod:H=0.6901,top10E=0.09,eRank=223.2,q75/q25=inf train_time:659535ms step_avg:91.60ms +[2025-08-22 12:45:25] [Rank 0] PRINT: step:7200/10000 val_loss:3.6942 svd_entropy: attn_qk:H=0.7989,top10E=0.22,eRank=223.2,q75/q25=44.17 attn_vo:H=0.8318,top10E=0.06,eRank=397.6,q75/q25=inf mlp_w1:H=0.9212,top10E=0.12,eRank=460.1,q75/q25=4.15 mlp_w2:H=0.9669,top10E=0.05,eRank=617.1,q75/q25=3.01 vo_prod:H=0.6901,top10E=0.09,eRank=223.2,q75/q25=inf train_time:659535ms step_avg:91.60ms +[2025-08-22 12:45:26] [Rank 0] step:7201/10000 train_time:659555ms step_avg:91.59ms +[2025-08-22 12:45:26] [Rank 0] step:7201/10000 train_time:659555ms step_avg:91.59ms +[2025-08-22 12:45:27] [Rank 0] step:7221/10000 train_time:661487ms step_avg:91.61ms +[2025-08-22 12:45:27] [Rank 0] step:7221/10000 train_time:661487ms step_avg:91.61ms +[2025-08-22 12:45:29] [Rank 0] step:7241/10000 train_time:663411ms step_avg:91.62ms +[2025-08-22 12:45:29] [Rank 0] step:7241/10000 train_time:663411ms step_avg:91.62ms +[2025-08-22 12:45:31] [Rank 0] step:7261/10000 train_time:665332ms step_avg:91.63ms +[2025-08-22 12:45:31] [Rank 0] step:7261/10000 train_time:665332ms step_avg:91.63ms +[2025-08-22 12:45:33] [Rank 0] step:7281/10000 train_time:667266ms step_avg:91.64ms +[2025-08-22 12:45:33] [Rank 0] step:7281/10000 train_time:667266ms step_avg:91.64ms +[2025-08-22 12:45:35] [Rank 0] step:7301/10000 train_time:669191ms step_avg:91.66ms +[2025-08-22 12:45:35] [Rank 0] step:7301/10000 train_time:669191ms step_avg:91.66ms +[2025-08-22 12:45:37] [Rank 0] step:7321/10000 train_time:671129ms step_avg:91.67ms +[2025-08-22 12:45:37] [Rank 0] step:7321/10000 train_time:671129ms step_avg:91.67ms +[2025-08-22 12:45:39] [Rank 0] step:7341/10000 train_time:673056ms step_avg:91.68ms +[2025-08-22 12:45:39] [Rank 0] step:7341/10000 train_time:673056ms step_avg:91.68ms +[2025-08-22 12:45:41] [Rank 0] step:7361/10000 train_time:674993ms step_avg:91.70ms +[2025-08-22 12:45:41] [Rank 0] step:7361/10000 train_time:674993ms step_avg:91.70ms +[2025-08-22 12:45:43] [Rank 0] step:7381/10000 train_time:676928ms step_avg:91.71ms +[2025-08-22 12:45:43] [Rank 0] step:7381/10000 train_time:676928ms step_avg:91.71ms +[2025-08-22 12:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:45:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:45:59] [Rank 0] PRINT: step:7400/10000 val_loss:3.6738 svd_entropy: attn_qk:H=0.7993,top10E=0.22,eRank=223.6,q75/q25=43.83 attn_vo:H=0.8319,top10E=0.06,eRank=397.8,q75/q25=inf mlp_w1:H=0.9216,top10E=0.12,eRank=461.2,q75/q25=4.14 mlp_w2:H=0.9669,top10E=0.05,eRank=616.9,q75/q25=3.01 vo_prod:H=0.6904,top10E=0.09,eRank=223.7,q75/q25=inf train_time:678857ms step_avg:91.74ms +[2025-08-22 12:45:59] [Rank 0] PRINT: step:7400/10000 val_loss:3.6738 svd_entropy: attn_qk:H=0.7993,top10E=0.22,eRank=223.6,q75/q25=43.83 attn_vo:H=0.8319,top10E=0.06,eRank=397.8,q75/q25=inf mlp_w1:H=0.9216,top10E=0.12,eRank=461.2,q75/q25=4.14 mlp_w2:H=0.9669,top10E=0.05,eRank=616.9,q75/q25=3.01 vo_prod:H=0.6904,top10E=0.09,eRank=223.7,q75/q25=inf train_time:678857ms step_avg:91.74ms +[2025-08-22 12:45:59] [Rank 0] step:7401/10000 train_time:678877ms step_avg:91.73ms +[2025-08-22 12:45:59] [Rank 0] step:7401/10000 train_time:678877ms step_avg:91.73ms +[2025-08-22 12:46:01] [Rank 0] step:7421/10000 train_time:680808ms step_avg:91.74ms +[2025-08-22 12:46:01] [Rank 0] step:7421/10000 train_time:680808ms step_avg:91.74ms +[2025-08-22 12:46:03] [Rank 0] step:7441/10000 train_time:682727ms step_avg:91.75ms +[2025-08-22 12:46:03] [Rank 0] step:7441/10000 train_time:682727ms step_avg:91.75ms +[2025-08-22 12:46:04] [Rank 0] step:7461/10000 train_time:684653ms step_avg:91.76ms +[2025-08-22 12:46:04] [Rank 0] step:7461/10000 train_time:684653ms step_avg:91.76ms +[2025-08-22 12:46:06] [Rank 0] step:7481/10000 train_time:686584ms step_avg:91.78ms +[2025-08-22 12:46:06] [Rank 0] step:7481/10000 train_time:686584ms step_avg:91.78ms +[2025-08-22 12:46:08] [Rank 0] step:7501/10000 train_time:688513ms step_avg:91.79ms +[2025-08-22 12:46:08] [Rank 0] step:7501/10000 train_time:688513ms step_avg:91.79ms +[2025-08-22 12:46:10] [Rank 0] step:7521/10000 train_time:690444ms step_avg:91.80ms +[2025-08-22 12:46:10] [Rank 0] step:7521/10000 train_time:690444ms step_avg:91.80ms +[2025-08-22 12:46:12] [Rank 0] step:7541/10000 train_time:692442ms step_avg:91.82ms +[2025-08-22 12:46:12] [Rank 0] step:7541/10000 train_time:692442ms step_avg:91.82ms +[2025-08-22 12:46:14] [Rank 0] step:7561/10000 train_time:694398ms step_avg:91.84ms +[2025-08-22 12:46:14] [Rank 0] step:7561/10000 train_time:694398ms step_avg:91.84ms +[2025-08-22 12:46:16] [Rank 0] step:7581/10000 train_time:696335ms step_avg:91.85ms +[2025-08-22 12:46:16] [Rank 0] step:7581/10000 train_time:696335ms step_avg:91.85ms +[2025-08-22 12:46:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:46:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:46:32] [Rank 0] PRINT: step:7600/10000 val_loss:3.6641 svd_entropy: attn_qk:H=0.7998,top10E=0.22,eRank=224.1,q75/q25=43.63 attn_vo:H=0.8320,top10E=0.06,eRank=398.0,q75/q25=inf mlp_w1:H=0.9219,top10E=0.12,eRank=462.2,q75/q25=4.13 mlp_w2:H=0.9668,top10E=0.05,eRank=616.7,q75/q25=3.02 vo_prod:H=0.6906,top10E=0.09,eRank=224.1,q75/q25=inf train_time:698285ms step_avg:91.88ms +[2025-08-22 12:46:32] [Rank 0] PRINT: step:7600/10000 val_loss:3.6641 svd_entropy: attn_qk:H=0.7998,top10E=0.22,eRank=224.1,q75/q25=43.63 attn_vo:H=0.8320,top10E=0.06,eRank=398.0,q75/q25=inf mlp_w1:H=0.9219,top10E=0.12,eRank=462.2,q75/q25=4.13 mlp_w2:H=0.9668,top10E=0.05,eRank=616.7,q75/q25=3.02 vo_prod:H=0.6906,top10E=0.09,eRank=224.1,q75/q25=inf train_time:698285ms step_avg:91.88ms +[2025-08-22 12:46:32] [Rank 0] step:7601/10000 train_time:698304ms step_avg:91.87ms +[2025-08-22 12:46:32] [Rank 0] step:7601/10000 train_time:698304ms step_avg:91.87ms +[2025-08-22 12:46:34] [Rank 0] step:7621/10000 train_time:700206ms step_avg:91.88ms +[2025-08-22 12:46:34] [Rank 0] step:7621/10000 train_time:700206ms step_avg:91.88ms +[2025-08-22 12:46:36] [Rank 0] step:7641/10000 train_time:702127ms step_avg:91.89ms +[2025-08-22 12:46:36] [Rank 0] step:7641/10000 train_time:702127ms step_avg:91.89ms +[2025-08-22 12:46:38] [Rank 0] step:7661/10000 train_time:704056ms step_avg:91.90ms +[2025-08-22 12:46:38] [Rank 0] step:7661/10000 train_time:704056ms step_avg:91.90ms +[2025-08-22 12:46:40] [Rank 0] step:7681/10000 train_time:705977ms step_avg:91.91ms +[2025-08-22 12:46:40] [Rank 0] step:7681/10000 train_time:705977ms step_avg:91.91ms +[2025-08-22 12:46:42] [Rank 0] step:7701/10000 train_time:707900ms step_avg:91.92ms +[2025-08-22 12:46:42] [Rank 0] step:7701/10000 train_time:707900ms step_avg:91.92ms +[2025-08-22 12:46:44] [Rank 0] step:7721/10000 train_time:709837ms step_avg:91.94ms +[2025-08-22 12:46:44] [Rank 0] step:7721/10000 train_time:709837ms step_avg:91.94ms +[2025-08-22 12:46:46] [Rank 0] step:7741/10000 train_time:711761ms step_avg:91.95ms +[2025-08-22 12:46:46] [Rank 0] step:7741/10000 train_time:711761ms step_avg:91.95ms +[2025-08-22 12:46:47] [Rank 0] step:7761/10000 train_time:713697ms step_avg:91.96ms +[2025-08-22 12:46:47] [Rank 0] step:7761/10000 train_time:713697ms step_avg:91.96ms +[2025-08-22 12:46:49] [Rank 0] step:7781/10000 train_time:715628ms step_avg:91.97ms +[2025-08-22 12:46:49] [Rank 0] step:7781/10000 train_time:715628ms step_avg:91.97ms +[2025-08-22 12:46:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:46:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:47:05] [Rank 0] PRINT: step:7800/10000 val_loss:3.6496 svd_entropy: attn_qk:H=0.8001,top10E=0.22,eRank=224.5,q75/q25=43.32 attn_vo:H=0.8320,top10E=0.06,eRank=398.1,q75/q25=inf mlp_w1:H=0.9222,top10E=0.12,eRank=463.1,q75/q25=4.12 mlp_w2:H=0.9668,top10E=0.05,eRank=616.6,q75/q25=3.02 vo_prod:H=0.6908,top10E=0.09,eRank=224.5,q75/q25=inf train_time:717582ms step_avg:92.00ms +[2025-08-22 12:47:05] [Rank 0] PRINT: step:7800/10000 val_loss:3.6496 svd_entropy: attn_qk:H=0.8001,top10E=0.22,eRank=224.5,q75/q25=43.32 attn_vo:H=0.8320,top10E=0.06,eRank=398.1,q75/q25=inf mlp_w1:H=0.9222,top10E=0.12,eRank=463.1,q75/q25=4.12 mlp_w2:H=0.9668,top10E=0.05,eRank=616.6,q75/q25=3.02 vo_prod:H=0.6908,top10E=0.09,eRank=224.5,q75/q25=inf train_time:717582ms step_avg:92.00ms +[2025-08-22 12:47:05] [Rank 0] step:7801/10000 train_time:717603ms step_avg:91.99ms +[2025-08-22 12:47:05] [Rank 0] step:7801/10000 train_time:717603ms step_avg:91.99ms +[2025-08-22 12:47:07] [Rank 0] step:7821/10000 train_time:719502ms step_avg:92.00ms +[2025-08-22 12:47:07] [Rank 0] step:7821/10000 train_time:719502ms step_avg:92.00ms +[2025-08-22 12:47:09] [Rank 0] step:7841/10000 train_time:721420ms step_avg:92.01ms +[2025-08-22 12:47:09] [Rank 0] step:7841/10000 train_time:721420ms step_avg:92.01ms +[2025-08-22 12:47:11] [Rank 0] step:7861/10000 train_time:723352ms step_avg:92.02ms +[2025-08-22 12:47:11] [Rank 0] step:7861/10000 train_time:723352ms step_avg:92.02ms +[2025-08-22 12:47:13] [Rank 0] step:7881/10000 train_time:725285ms step_avg:92.03ms +[2025-08-22 12:47:13] [Rank 0] step:7881/10000 train_time:725285ms step_avg:92.03ms +[2025-08-22 12:47:15] [Rank 0] step:7901/10000 train_time:727271ms step_avg:92.05ms +[2025-08-22 12:47:15] [Rank 0] step:7901/10000 train_time:727271ms step_avg:92.05ms +[2025-08-22 12:47:17] [Rank 0] step:7921/10000 train_time:729202ms step_avg:92.06ms +[2025-08-22 12:47:17] [Rank 0] step:7921/10000 train_time:729202ms step_avg:92.06ms +[2025-08-22 12:47:19] [Rank 0] step:7941/10000 train_time:731139ms step_avg:92.07ms +[2025-08-22 12:47:19] [Rank 0] step:7941/10000 train_time:731139ms step_avg:92.07ms +[2025-08-22 12:47:21] [Rank 0] step:7961/10000 train_time:733075ms step_avg:92.08ms +[2025-08-22 12:47:21] [Rank 0] step:7961/10000 train_time:733075ms step_avg:92.08ms +[2025-08-22 12:47:23] [Rank 0] step:7981/10000 train_time:734995ms step_avg:92.09ms +[2025-08-22 12:47:23] [Rank 0] step:7981/10000 train_time:734995ms step_avg:92.09ms +[2025-08-22 12:47:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:47:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:47:39] [Rank 0] PRINT: step:8000/10000 val_loss:3.6333 svd_entropy: attn_qk:H=0.8004,top10E=0.22,eRank=224.8,q75/q25=43.39 attn_vo:H=0.8321,top10E=0.05,eRank=398.3,q75/q25=inf mlp_w1:H=0.9225,top10E=0.12,eRank=464.0,q75/q25=4.11 mlp_w2:H=0.9667,top10E=0.05,eRank=616.4,q75/q25=3.02 vo_prod:H=0.6910,top10E=0.09,eRank=224.9,q75/q25=inf train_time:736945ms step_avg:92.12ms +[2025-08-22 12:47:39] [Rank 0] PRINT: step:8000/10000 val_loss:3.6333 svd_entropy: attn_qk:H=0.8004,top10E=0.22,eRank=224.8,q75/q25=43.39 attn_vo:H=0.8321,top10E=0.05,eRank=398.3,q75/q25=inf mlp_w1:H=0.9225,top10E=0.12,eRank=464.0,q75/q25=4.11 mlp_w2:H=0.9667,top10E=0.05,eRank=616.4,q75/q25=3.02 vo_prod:H=0.6910,top10E=0.09,eRank=224.9,q75/q25=inf train_time:736945ms step_avg:92.12ms +[2025-08-22 12:47:39] [Rank 0] step:8001/10000 train_time:736967ms step_avg:92.11ms +[2025-08-22 12:47:39] [Rank 0] step:8001/10000 train_time:736967ms step_avg:92.11ms +[2025-08-22 12:47:41] [Rank 0] step:8021/10000 train_time:738882ms step_avg:92.12ms +[2025-08-22 12:47:41] [Rank 0] step:8021/10000 train_time:738882ms step_avg:92.12ms +[2025-08-22 12:47:43] [Rank 0] step:8041/10000 train_time:740817ms step_avg:92.13ms +[2025-08-22 12:47:43] [Rank 0] step:8041/10000 train_time:740817ms step_avg:92.13ms +[2025-08-22 12:47:45] [Rank 0] step:8061/10000 train_time:742746ms step_avg:92.14ms +[2025-08-22 12:47:45] [Rank 0] step:8061/10000 train_time:742746ms step_avg:92.14ms +[2025-08-22 12:47:47] [Rank 0] step:8081/10000 train_time:744666ms step_avg:92.15ms +[2025-08-22 12:47:47] [Rank 0] step:8081/10000 train_time:744666ms step_avg:92.15ms +[2025-08-22 12:47:48] [Rank 0] step:8101/10000 train_time:746602ms step_avg:92.16ms +[2025-08-22 12:47:48] [Rank 0] step:8101/10000 train_time:746602ms step_avg:92.16ms +[2025-08-22 12:47:50] [Rank 0] step:8121/10000 train_time:748532ms step_avg:92.17ms +[2025-08-22 12:47:50] [Rank 0] step:8121/10000 train_time:748532ms step_avg:92.17ms +[2025-08-22 12:47:52] [Rank 0] step:8141/10000 train_time:750490ms step_avg:92.19ms +[2025-08-22 12:47:52] [Rank 0] step:8141/10000 train_time:750490ms step_avg:92.19ms +[2025-08-22 12:47:54] [Rank 0] step:8161/10000 train_time:752437ms step_avg:92.20ms +[2025-08-22 12:47:54] [Rank 0] step:8161/10000 train_time:752437ms step_avg:92.20ms +[2025-08-22 12:47:56] [Rank 0] step:8181/10000 train_time:754399ms step_avg:92.21ms +[2025-08-22 12:47:56] [Rank 0] step:8181/10000 train_time:754399ms step_avg:92.21ms +[2025-08-22 12:47:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:47:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:48:12] [Rank 0] PRINT: step:8200/10000 val_loss:3.6202 svd_entropy: attn_qk:H=0.8007,top10E=0.22,eRank=225.2,q75/q25=43.21 attn_vo:H=0.8321,top10E=0.05,eRank=398.5,q75/q25=inf mlp_w1:H=0.9228,top10E=0.12,eRank=464.7,q75/q25=4.10 mlp_w2:H=0.9667,top10E=0.05,eRank=616.3,q75/q25=3.02 vo_prod:H=0.6912,top10E=0.09,eRank=225.3,q75/q25=inf train_time:756396ms step_avg:92.24ms +[2025-08-22 12:48:12] [Rank 0] PRINT: step:8200/10000 val_loss:3.6202 svd_entropy: attn_qk:H=0.8007,top10E=0.22,eRank=225.2,q75/q25=43.21 attn_vo:H=0.8321,top10E=0.05,eRank=398.5,q75/q25=inf mlp_w1:H=0.9228,top10E=0.12,eRank=464.7,q75/q25=4.10 mlp_w2:H=0.9667,top10E=0.05,eRank=616.3,q75/q25=3.02 vo_prod:H=0.6912,top10E=0.09,eRank=225.3,q75/q25=inf train_time:756396ms step_avg:92.24ms +[2025-08-22 12:48:12] [Rank 0] step:8201/10000 train_time:756414ms step_avg:92.23ms +[2025-08-22 12:48:12] [Rank 0] step:8201/10000 train_time:756414ms step_avg:92.23ms +[2025-08-22 12:48:14] [Rank 0] step:8221/10000 train_time:758376ms step_avg:92.25ms +[2025-08-22 12:48:14] [Rank 0] step:8221/10000 train_time:758376ms step_avg:92.25ms +[2025-08-22 12:48:16] [Rank 0] step:8241/10000 train_time:760338ms step_avg:92.26ms +[2025-08-22 12:48:16] [Rank 0] step:8241/10000 train_time:760338ms step_avg:92.26ms +[2025-08-22 12:48:18] [Rank 0] step:8261/10000 train_time:762366ms step_avg:92.28ms +[2025-08-22 12:48:18] [Rank 0] step:8261/10000 train_time:762366ms step_avg:92.28ms +[2025-08-22 12:48:20] [Rank 0] step:8281/10000 train_time:764363ms step_avg:92.30ms +[2025-08-22 12:48:20] [Rank 0] step:8281/10000 train_time:764363ms step_avg:92.30ms +[2025-08-22 12:48:22] [Rank 0] step:8301/10000 train_time:766320ms step_avg:92.32ms +[2025-08-22 12:48:22] [Rank 0] step:8301/10000 train_time:766320ms step_avg:92.32ms +[2025-08-22 12:48:24] [Rank 0] step:8321/10000 train_time:768269ms step_avg:92.33ms +[2025-08-22 12:48:24] [Rank 0] step:8321/10000 train_time:768269ms step_avg:92.33ms +[2025-08-22 12:48:26] [Rank 0] step:8341/10000 train_time:770234ms step_avg:92.34ms +[2025-08-22 12:48:26] [Rank 0] step:8341/10000 train_time:770234ms step_avg:92.34ms +[2025-08-22 12:48:28] [Rank 0] step:8361/10000 train_time:772189ms step_avg:92.36ms +[2025-08-22 12:48:28] [Rank 0] step:8361/10000 train_time:772189ms step_avg:92.36ms +[2025-08-22 12:48:30] [Rank 0] step:8381/10000 train_time:774145ms step_avg:92.37ms +[2025-08-22 12:48:30] [Rank 0] step:8381/10000 train_time:774145ms step_avg:92.37ms +[2025-08-22 12:48:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:48:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:48:45] [Rank 0] PRINT: step:8400/10000 val_loss:3.6071 svd_entropy: attn_qk:H=0.8009,top10E=0.22,eRank=225.4,q75/q25=43.05 attn_vo:H=0.8322,top10E=0.05,eRank=398.6,q75/q25=inf mlp_w1:H=0.9230,top10E=0.11,eRank=465.5,q75/q25=4.10 mlp_w2:H=0.9667,top10E=0.05,eRank=616.2,q75/q25=3.02 vo_prod:H=0.6914,top10E=0.09,eRank=225.6,q75/q25=inf train_time:776118ms step_avg:92.39ms +[2025-08-22 12:48:45] [Rank 0] PRINT: step:8400/10000 val_loss:3.6071 svd_entropy: attn_qk:H=0.8009,top10E=0.22,eRank=225.4,q75/q25=43.05 attn_vo:H=0.8322,top10E=0.05,eRank=398.6,q75/q25=inf mlp_w1:H=0.9230,top10E=0.11,eRank=465.5,q75/q25=4.10 mlp_w2:H=0.9667,top10E=0.05,eRank=616.2,q75/q25=3.02 vo_prod:H=0.6914,top10E=0.09,eRank=225.6,q75/q25=inf train_time:776118ms step_avg:92.39ms +[2025-08-22 12:48:46] [Rank 0] step:8401/10000 train_time:776138ms step_avg:92.39ms +[2025-08-22 12:48:46] [Rank 0] step:8401/10000 train_time:776138ms step_avg:92.39ms +[2025-08-22 12:48:48] [Rank 0] step:8421/10000 train_time:778073ms step_avg:92.40ms +[2025-08-22 12:48:48] [Rank 0] step:8421/10000 train_time:778073ms step_avg:92.40ms +[2025-08-22 12:48:49] [Rank 0] step:8441/10000 train_time:780025ms step_avg:92.41ms +[2025-08-22 12:48:49] [Rank 0] step:8441/10000 train_time:780025ms step_avg:92.41ms +[2025-08-22 12:48:51] [Rank 0] step:8461/10000 train_time:781974ms step_avg:92.42ms +[2025-08-22 12:48:51] [Rank 0] step:8461/10000 train_time:781974ms step_avg:92.42ms +[2025-08-22 12:48:53] [Rank 0] step:8481/10000 train_time:783932ms step_avg:92.43ms +[2025-08-22 12:48:53] [Rank 0] step:8481/10000 train_time:783932ms step_avg:92.43ms +[2025-08-22 12:48:55] [Rank 0] step:8501/10000 train_time:785910ms step_avg:92.45ms +[2025-08-22 12:48:55] [Rank 0] step:8501/10000 train_time:785910ms step_avg:92.45ms +[2025-08-22 12:48:57] [Rank 0] step:8521/10000 train_time:787864ms step_avg:92.46ms +[2025-08-22 12:48:57] [Rank 0] step:8521/10000 train_time:787864ms step_avg:92.46ms +[2025-08-22 12:48:59] [Rank 0] step:8541/10000 train_time:789835ms step_avg:92.48ms +[2025-08-22 12:48:59] [Rank 0] step:8541/10000 train_time:789835ms step_avg:92.48ms +[2025-08-22 12:49:01] [Rank 0] step:8561/10000 train_time:791797ms step_avg:92.49ms +[2025-08-22 12:49:01] [Rank 0] step:8561/10000 train_time:791797ms step_avg:92.49ms +[2025-08-22 12:49:03] [Rank 0] step:8581/10000 train_time:793756ms step_avg:92.50ms +[2025-08-22 12:49:03] [Rank 0] step:8581/10000 train_time:793756ms step_avg:92.50ms +[2025-08-22 12:49:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:49:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:49:19] [Rank 0] PRINT: step:8600/10000 val_loss:3.5951 svd_entropy: attn_qk:H=0.8011,top10E=0.22,eRank=225.6,q75/q25=43.00 attn_vo:H=0.8322,top10E=0.05,eRank=398.7,q75/q25=inf mlp_w1:H=0.9232,top10E=0.11,eRank=466.1,q75/q25=4.09 mlp_w2:H=0.9666,top10E=0.05,eRank=616.1,q75/q25=3.03 vo_prod:H=0.6916,top10E=0.09,eRank=225.9,q75/q25=inf train_time:795724ms step_avg:92.53ms +[2025-08-22 12:49:19] [Rank 0] PRINT: step:8600/10000 val_loss:3.5951 svd_entropy: attn_qk:H=0.8011,top10E=0.22,eRank=225.6,q75/q25=43.00 attn_vo:H=0.8322,top10E=0.05,eRank=398.7,q75/q25=inf mlp_w1:H=0.9232,top10E=0.11,eRank=466.1,q75/q25=4.09 mlp_w2:H=0.9666,top10E=0.05,eRank=616.1,q75/q25=3.03 vo_prod:H=0.6916,top10E=0.09,eRank=225.9,q75/q25=inf train_time:795724ms step_avg:92.53ms +[2025-08-22 12:49:19] [Rank 0] step:8601/10000 train_time:795742ms step_avg:92.52ms +[2025-08-22 12:49:19] [Rank 0] step:8601/10000 train_time:795742ms step_avg:92.52ms +[2025-08-22 12:49:21] [Rank 0] step:8621/10000 train_time:797774ms step_avg:92.54ms +[2025-08-22 12:49:21] [Rank 0] step:8621/10000 train_time:797774ms step_avg:92.54ms +[2025-08-22 12:49:23] [Rank 0] step:8641/10000 train_time:799784ms step_avg:92.56ms +[2025-08-22 12:49:23] [Rank 0] step:8641/10000 train_time:799784ms step_avg:92.56ms +[2025-08-22 12:49:25] [Rank 0] step:8661/10000 train_time:801739ms step_avg:92.57ms +[2025-08-22 12:49:25] [Rank 0] step:8661/10000 train_time:801739ms step_avg:92.57ms +[2025-08-22 12:49:27] [Rank 0] step:8681/10000 train_time:803699ms step_avg:92.58ms +[2025-08-22 12:49:27] [Rank 0] step:8681/10000 train_time:803699ms step_avg:92.58ms +[2025-08-22 12:49:29] [Rank 0] step:8701/10000 train_time:805653ms step_avg:92.59ms +[2025-08-22 12:49:29] [Rank 0] step:8701/10000 train_time:805653ms step_avg:92.59ms +[2025-08-22 12:49:31] [Rank 0] step:8721/10000 train_time:807617ms step_avg:92.61ms +[2025-08-22 12:49:31] [Rank 0] step:8721/10000 train_time:807617ms step_avg:92.61ms +[2025-08-22 12:49:33] [Rank 0] step:8741/10000 train_time:809568ms step_avg:92.62ms +[2025-08-22 12:49:33] [Rank 0] step:8741/10000 train_time:809568ms step_avg:92.62ms +[2025-08-22 12:49:35] [Rank 0] step:8761/10000 train_time:811528ms step_avg:92.63ms +[2025-08-22 12:49:35] [Rank 0] step:8761/10000 train_time:811528ms step_avg:92.63ms +[2025-08-22 12:49:37] [Rank 0] step:8781/10000 train_time:813489ms step_avg:92.64ms +[2025-08-22 12:49:37] [Rank 0] step:8781/10000 train_time:813489ms step_avg:92.64ms +[2025-08-22 12:49:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:49:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:49:52] [Rank 0] PRINT: step:8800/10000 val_loss:3.5833 svd_entropy: attn_qk:H=0.8013,top10E=0.22,eRank=225.9,q75/q25=42.81 attn_vo:H=0.8323,top10E=0.05,eRank=398.9,q75/q25=inf mlp_w1:H=0.9234,top10E=0.11,eRank=466.7,q75/q25=4.09 mlp_w2:H=0.9666,top10E=0.05,eRank=616.1,q75/q25=3.03 vo_prod:H=0.6918,top10E=0.09,eRank=226.2,q75/q25=inf train_time:815459ms step_avg:92.67ms +[2025-08-22 12:49:52] [Rank 0] PRINT: step:8800/10000 val_loss:3.5833 svd_entropy: attn_qk:H=0.8013,top10E=0.22,eRank=225.9,q75/q25=42.81 attn_vo:H=0.8323,top10E=0.05,eRank=398.9,q75/q25=inf mlp_w1:H=0.9234,top10E=0.11,eRank=466.7,q75/q25=4.09 mlp_w2:H=0.9666,top10E=0.05,eRank=616.1,q75/q25=3.03 vo_prod:H=0.6918,top10E=0.09,eRank=226.2,q75/q25=inf train_time:815459ms step_avg:92.67ms +[2025-08-22 12:49:53] [Rank 0] step:8801/10000 train_time:815479ms step_avg:92.66ms +[2025-08-22 12:49:53] [Rank 0] step:8801/10000 train_time:815479ms step_avg:92.66ms +[2025-08-22 12:49:55] [Rank 0] step:8821/10000 train_time:817423ms step_avg:92.67ms +[2025-08-22 12:49:55] [Rank 0] step:8821/10000 train_time:817423ms step_avg:92.67ms +[2025-08-22 12:49:56] [Rank 0] step:8841/10000 train_time:819398ms step_avg:92.68ms +[2025-08-22 12:49:56] [Rank 0] step:8841/10000 train_time:819398ms step_avg:92.68ms +[2025-08-22 12:49:58] [Rank 0] step:8861/10000 train_time:821345ms step_avg:92.69ms +[2025-08-22 12:49:58] [Rank 0] step:8861/10000 train_time:821345ms step_avg:92.69ms +[2025-08-22 12:50:00] [Rank 0] step:8881/10000 train_time:823299ms step_avg:92.70ms +[2025-08-22 12:50:00] [Rank 0] step:8881/10000 train_time:823299ms step_avg:92.70ms +[2025-08-22 12:50:02] [Rank 0] step:8901/10000 train_time:825257ms step_avg:92.72ms +[2025-08-22 12:50:02] [Rank 0] step:8901/10000 train_time:825257ms step_avg:92.72ms +[2025-08-22 12:50:04] [Rank 0] step:8921/10000 train_time:827230ms step_avg:92.73ms +[2025-08-22 12:50:04] [Rank 0] step:8921/10000 train_time:827230ms step_avg:92.73ms +[2025-08-22 12:50:06] [Rank 0] step:8941/10000 train_time:829189ms step_avg:92.74ms +[2025-08-22 12:50:06] [Rank 0] step:8941/10000 train_time:829189ms step_avg:92.74ms +[2025-08-22 12:50:08] [Rank 0] step:8961/10000 train_time:831144ms step_avg:92.75ms +[2025-08-22 12:50:08] [Rank 0] step:8961/10000 train_time:831144ms step_avg:92.75ms +[2025-08-22 12:50:10] [Rank 0] step:8981/10000 train_time:833100ms step_avg:92.76ms +[2025-08-22 12:50:10] [Rank 0] step:8981/10000 train_time:833100ms step_avg:92.76ms +[2025-08-22 12:50:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:50:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:50:26] [Rank 0] PRINT: step:9000/10000 val_loss:3.5727 svd_entropy: attn_qk:H=0.8015,top10E=0.22,eRank=226.1,q75/q25=42.74 attn_vo:H=0.8323,top10E=0.05,eRank=399.0,q75/q25=inf mlp_w1:H=0.9236,top10E=0.11,eRank=467.2,q75/q25=4.08 mlp_w2:H=0.9666,top10E=0.05,eRank=616.0,q75/q25=3.03 vo_prod:H=0.6919,top10E=0.09,eRank=226.5,q75/q25=inf train_time:835069ms step_avg:92.79ms +[2025-08-22 12:50:26] [Rank 0] PRINT: step:9000/10000 val_loss:3.5727 svd_entropy: attn_qk:H=0.8015,top10E=0.22,eRank=226.1,q75/q25=42.74 attn_vo:H=0.8323,top10E=0.05,eRank=399.0,q75/q25=inf mlp_w1:H=0.9236,top10E=0.11,eRank=467.2,q75/q25=4.08 mlp_w2:H=0.9666,top10E=0.05,eRank=616.0,q75/q25=3.03 vo_prod:H=0.6919,top10E=0.09,eRank=226.5,q75/q25=inf train_time:835069ms step_avg:92.79ms +[2025-08-22 12:50:26] [Rank 0] step:9001/10000 train_time:835088ms step_avg:92.78ms +[2025-08-22 12:50:26] [Rank 0] step:9001/10000 train_time:835088ms step_avg:92.78ms +[2025-08-22 12:50:28] [Rank 0] step:9021/10000 train_time:837041ms step_avg:92.79ms +[2025-08-22 12:50:28] [Rank 0] step:9021/10000 train_time:837041ms step_avg:92.79ms +[2025-08-22 12:50:30] [Rank 0] step:9041/10000 train_time:838996ms step_avg:92.80ms +[2025-08-22 12:50:30] [Rank 0] step:9041/10000 train_time:838996ms step_avg:92.80ms +[2025-08-22 12:50:32] [Rank 0] step:9061/10000 train_time:840957ms step_avg:92.81ms +[2025-08-22 12:50:32] [Rank 0] step:9061/10000 train_time:840957ms step_avg:92.81ms +[2025-08-22 12:50:34] [Rank 0] step:9081/10000 train_time:842916ms step_avg:92.82ms +[2025-08-22 12:50:34] [Rank 0] step:9081/10000 train_time:842916ms step_avg:92.82ms +[2025-08-22 12:50:36] [Rank 0] step:9101/10000 train_time:844886ms step_avg:92.83ms +[2025-08-22 12:50:36] [Rank 0] step:9101/10000 train_time:844886ms step_avg:92.83ms +[2025-08-22 12:50:38] [Rank 0] step:9121/10000 train_time:846845ms step_avg:92.85ms +[2025-08-22 12:50:38] [Rank 0] step:9121/10000 train_time:846845ms step_avg:92.85ms +[2025-08-22 12:50:40] [Rank 0] step:9141/10000 train_time:848791ms step_avg:92.86ms +[2025-08-22 12:50:40] [Rank 0] step:9141/10000 train_time:848791ms step_avg:92.86ms +[2025-08-22 12:50:42] [Rank 0] step:9161/10000 train_time:850744ms step_avg:92.87ms +[2025-08-22 12:50:42] [Rank 0] step:9161/10000 train_time:850744ms step_avg:92.87ms +[2025-08-22 12:50:44] [Rank 0] step:9181/10000 train_time:852732ms step_avg:92.88ms +[2025-08-22 12:50:44] [Rank 0] step:9181/10000 train_time:852732ms step_avg:92.88ms +[2025-08-22 12:50:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:50:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:50:59] [Rank 0] PRINT: step:9200/10000 val_loss:3.5630 svd_entropy: attn_qk:H=0.8017,top10E=0.22,eRank=226.2,q75/q25=42.69 attn_vo:H=0.8324,top10E=0.05,eRank=399.1,q75/q25=inf mlp_w1:H=0.9237,top10E=0.11,eRank=467.6,q75/q25=4.07 mlp_w2:H=0.9666,top10E=0.05,eRank=616.0,q75/q25=3.03 vo_prod:H=0.6920,top10E=0.09,eRank=226.7,q75/q25=inf train_time:854698ms step_avg:92.90ms +[2025-08-22 12:50:59] [Rank 0] PRINT: step:9200/10000 val_loss:3.5630 svd_entropy: attn_qk:H=0.8017,top10E=0.22,eRank=226.2,q75/q25=42.69 attn_vo:H=0.8324,top10E=0.05,eRank=399.1,q75/q25=inf mlp_w1:H=0.9237,top10E=0.11,eRank=467.6,q75/q25=4.07 mlp_w2:H=0.9666,top10E=0.05,eRank=616.0,q75/q25=3.03 vo_prod:H=0.6920,top10E=0.09,eRank=226.7,q75/q25=inf train_time:854698ms step_avg:92.90ms +[2025-08-22 12:50:59] [Rank 0] step:9201/10000 train_time:854719ms step_avg:92.89ms +[2025-08-22 12:50:59] [Rank 0] step:9201/10000 train_time:854719ms step_avg:92.89ms +[2025-08-22 12:51:01] [Rank 0] step:9221/10000 train_time:856680ms step_avg:92.91ms +[2025-08-22 12:51:01] [Rank 0] step:9221/10000 train_time:856680ms step_avg:92.91ms +[2025-08-22 12:51:03] [Rank 0] step:9241/10000 train_time:858643ms step_avg:92.92ms +[2025-08-22 12:51:03] [Rank 0] step:9241/10000 train_time:858643ms step_avg:92.92ms +[2025-08-22 12:51:05] [Rank 0] step:9261/10000 train_time:860605ms step_avg:92.93ms +[2025-08-22 12:51:05] [Rank 0] step:9261/10000 train_time:860605ms step_avg:92.93ms +[2025-08-22 12:51:07] [Rank 0] step:9281/10000 train_time:862554ms step_avg:92.94ms +[2025-08-22 12:51:07] [Rank 0] step:9281/10000 train_time:862554ms step_avg:92.94ms +[2025-08-22 12:51:09] [Rank 0] step:9301/10000 train_time:864507ms step_avg:92.95ms +[2025-08-22 12:51:09] [Rank 0] step:9301/10000 train_time:864507ms step_avg:92.95ms +[2025-08-22 12:51:11] [Rank 0] step:9321/10000 train_time:866468ms step_avg:92.96ms +[2025-08-22 12:51:11] [Rank 0] step:9321/10000 train_time:866468ms step_avg:92.96ms +[2025-08-22 12:51:13] [Rank 0] step:9341/10000 train_time:868427ms step_avg:92.97ms +[2025-08-22 12:51:13] [Rank 0] step:9341/10000 train_time:868427ms step_avg:92.97ms +[2025-08-22 12:51:15] [Rank 0] step:9361/10000 train_time:870391ms step_avg:92.98ms +[2025-08-22 12:51:15] [Rank 0] step:9361/10000 train_time:870391ms step_avg:92.98ms +[2025-08-22 12:51:17] [Rank 0] step:9381/10000 train_time:872362ms step_avg:92.99ms +[2025-08-22 12:51:17] [Rank 0] step:9381/10000 train_time:872362ms step_avg:92.99ms +[2025-08-22 12:51:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:51:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:51:33] [Rank 0] PRINT: step:9400/10000 val_loss:3.5531 svd_entropy: attn_qk:H=0.8018,top10E=0.22,eRank=226.3,q75/q25=42.61 attn_vo:H=0.8324,top10E=0.05,eRank=399.1,q75/q25=inf mlp_w1:H=0.9239,top10E=0.11,eRank=468.0,q75/q25=4.07 mlp_w2:H=0.9666,top10E=0.05,eRank=615.9,q75/q25=3.03 vo_prod:H=0.6921,top10E=0.09,eRank=226.9,q75/q25=inf train_time:874340ms step_avg:93.01ms +[2025-08-22 12:51:33] [Rank 0] PRINT: step:9400/10000 val_loss:3.5531 svd_entropy: attn_qk:H=0.8018,top10E=0.22,eRank=226.3,q75/q25=42.61 attn_vo:H=0.8324,top10E=0.05,eRank=399.1,q75/q25=inf mlp_w1:H=0.9239,top10E=0.11,eRank=468.0,q75/q25=4.07 mlp_w2:H=0.9666,top10E=0.05,eRank=615.9,q75/q25=3.03 vo_prod:H=0.6921,top10E=0.09,eRank=226.9,q75/q25=inf train_time:874340ms step_avg:93.01ms +[2025-08-22 12:51:33] [Rank 0] step:9401/10000 train_time:874359ms step_avg:93.01ms +[2025-08-22 12:51:33] [Rank 0] step:9401/10000 train_time:874359ms step_avg:93.01ms +[2025-08-22 12:51:35] [Rank 0] step:9421/10000 train_time:876318ms step_avg:93.02ms +[2025-08-22 12:51:35] [Rank 0] step:9421/10000 train_time:876318ms step_avg:93.02ms +[2025-08-22 12:51:37] [Rank 0] step:9441/10000 train_time:878276ms step_avg:93.03ms +[2025-08-22 12:51:37] [Rank 0] step:9441/10000 train_time:878276ms step_avg:93.03ms +[2025-08-22 12:51:39] [Rank 0] step:9461/10000 train_time:880242ms step_avg:93.04ms +[2025-08-22 12:51:39] [Rank 0] step:9461/10000 train_time:880242ms step_avg:93.04ms +[2025-08-22 12:51:41] [Rank 0] step:9481/10000 train_time:882208ms step_avg:93.05ms +[2025-08-22 12:51:41] [Rank 0] step:9481/10000 train_time:882208ms step_avg:93.05ms +[2025-08-22 12:51:43] [Rank 0] step:9501/10000 train_time:884180ms step_avg:93.06ms +[2025-08-22 12:51:43] [Rank 0] step:9501/10000 train_time:884180ms step_avg:93.06ms +[2025-08-22 12:51:45] [Rank 0] step:9521/10000 train_time:886134ms step_avg:93.07ms +[2025-08-22 12:51:45] [Rank 0] step:9521/10000 train_time:886134ms step_avg:93.07ms +[2025-08-22 12:51:47] [Rank 0] step:9541/10000 train_time:888094ms step_avg:93.08ms +[2025-08-22 12:51:47] [Rank 0] step:9541/10000 train_time:888094ms step_avg:93.08ms +[2025-08-22 12:51:49] [Rank 0] step:9561/10000 train_time:890048ms step_avg:93.09ms +[2025-08-22 12:51:49] [Rank 0] step:9561/10000 train_time:890048ms step_avg:93.09ms +[2025-08-22 12:51:51] [Rank 0] step:9581/10000 train_time:892009ms step_avg:93.10ms +[2025-08-22 12:51:51] [Rank 0] step:9581/10000 train_time:892009ms step_avg:93.10ms +[2025-08-22 12:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:51:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:52:07] [Rank 0] PRINT: step:9600/10000 val_loss:3.5442 svd_entropy: attn_qk:H=0.8019,top10E=0.22,eRank=226.5,q75/q25=42.59 attn_vo:H=0.8324,top10E=0.05,eRank=399.2,q75/q25=inf mlp_w1:H=0.9240,top10E=0.11,eRank=468.3,q75/q25=4.07 mlp_w2:H=0.9666,top10E=0.05,eRank=615.9,q75/q25=3.03 vo_prod:H=0.6922,top10E=0.09,eRank=227.1,q75/q25=inf train_time:893998ms step_avg:93.12ms +[2025-08-22 12:52:07] [Rank 0] PRINT: step:9600/10000 val_loss:3.5442 svd_entropy: attn_qk:H=0.8019,top10E=0.22,eRank=226.5,q75/q25=42.59 attn_vo:H=0.8324,top10E=0.05,eRank=399.2,q75/q25=inf mlp_w1:H=0.9240,top10E=0.11,eRank=468.3,q75/q25=4.07 mlp_w2:H=0.9666,top10E=0.05,eRank=615.9,q75/q25=3.03 vo_prod:H=0.6922,top10E=0.09,eRank=227.1,q75/q25=inf train_time:893998ms step_avg:93.12ms +[2025-08-22 12:52:07] [Rank 0] step:9601/10000 train_time:894019ms step_avg:93.12ms +[2025-08-22 12:52:07] [Rank 0] step:9601/10000 train_time:894019ms step_avg:93.12ms +[2025-08-22 12:52:09] [Rank 0] step:9621/10000 train_time:895974ms step_avg:93.13ms +[2025-08-22 12:52:09] [Rank 0] step:9621/10000 train_time:895974ms step_avg:93.13ms +[2025-08-22 12:52:11] [Rank 0] step:9641/10000 train_time:897935ms step_avg:93.14ms +[2025-08-22 12:52:11] [Rank 0] step:9641/10000 train_time:897935ms step_avg:93.14ms +[2025-08-22 12:52:13] [Rank 0] step:9661/10000 train_time:899921ms step_avg:93.15ms +[2025-08-22 12:52:13] [Rank 0] step:9661/10000 train_time:899921ms step_avg:93.15ms +[2025-08-22 12:52:15] [Rank 0] step:9681/10000 train_time:901899ms step_avg:93.16ms +[2025-08-22 12:52:15] [Rank 0] step:9681/10000 train_time:901899ms step_avg:93.16ms +[2025-08-22 12:52:17] [Rank 0] step:9701/10000 train_time:903893ms step_avg:93.18ms +[2025-08-22 12:52:17] [Rank 0] step:9701/10000 train_time:903893ms step_avg:93.18ms +[2025-08-22 12:52:19] [Rank 0] step:9721/10000 train_time:905873ms step_avg:93.19ms +[2025-08-22 12:52:19] [Rank 0] step:9721/10000 train_time:905873ms step_avg:93.19ms +[2025-08-22 12:52:21] [Rank 0] step:9741/10000 train_time:907872ms step_avg:93.20ms +[2025-08-22 12:52:21] [Rank 0] step:9741/10000 train_time:907872ms step_avg:93.20ms +[2025-08-22 12:52:23] [Rank 0] step:9761/10000 train_time:909860ms step_avg:93.21ms +[2025-08-22 12:52:23] [Rank 0] step:9761/10000 train_time:909860ms step_avg:93.21ms +[2025-08-22 12:52:25] [Rank 0] step:9781/10000 train_time:911854ms step_avg:93.23ms +[2025-08-22 12:52:25] [Rank 0] step:9781/10000 train_time:911854ms step_avg:93.23ms +[2025-08-22 12:52:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:52:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:52:40] [Rank 0] PRINT: step:9800/10000 val_loss:3.5353 svd_entropy: attn_qk:H=0.8019,top10E=0.22,eRank=226.5,q75/q25=42.53 attn_vo:H=0.8324,top10E=0.05,eRank=399.3,q75/q25=inf mlp_w1:H=0.9241,top10E=0.11,eRank=468.6,q75/q25=4.06 mlp_w2:H=0.9666,top10E=0.05,eRank=615.8,q75/q25=3.03 vo_prod:H=0.6923,top10E=0.09,eRank=227.2,q75/q25=inf train_time:913869ms step_avg:93.25ms +[2025-08-22 12:52:40] [Rank 0] PRINT: step:9800/10000 val_loss:3.5353 svd_entropy: attn_qk:H=0.8019,top10E=0.22,eRank=226.5,q75/q25=42.53 attn_vo:H=0.8324,top10E=0.05,eRank=399.3,q75/q25=inf mlp_w1:H=0.9241,top10E=0.11,eRank=468.6,q75/q25=4.06 mlp_w2:H=0.9666,top10E=0.05,eRank=615.8,q75/q25=3.03 vo_prod:H=0.6923,top10E=0.09,eRank=227.2,q75/q25=inf train_time:913869ms step_avg:93.25ms +[2025-08-22 12:52:41] [Rank 0] step:9801/10000 train_time:913887ms step_avg:93.24ms +[2025-08-22 12:52:41] [Rank 0] step:9801/10000 train_time:913887ms step_avg:93.24ms +[2025-08-22 12:52:43] [Rank 0] step:9821/10000 train_time:915872ms step_avg:93.26ms +[2025-08-22 12:52:43] [Rank 0] step:9821/10000 train_time:915872ms step_avg:93.26ms +[2025-08-22 12:52:45] [Rank 0] step:9841/10000 train_time:917861ms step_avg:93.27ms +[2025-08-22 12:52:45] [Rank 0] step:9841/10000 train_time:917861ms step_avg:93.27ms +[2025-08-22 12:52:47] [Rank 0] step:9861/10000 train_time:919831ms step_avg:93.28ms +[2025-08-22 12:52:47] [Rank 0] step:9861/10000 train_time:919831ms step_avg:93.28ms +[2025-08-22 12:52:48] [Rank 0] step:9881/10000 train_time:921806ms step_avg:93.29ms +[2025-08-22 12:52:48] [Rank 0] step:9881/10000 train_time:921806ms step_avg:93.29ms +[2025-08-22 12:52:50] [Rank 0] step:9901/10000 train_time:923800ms step_avg:93.30ms +[2025-08-22 12:52:50] [Rank 0] step:9901/10000 train_time:923800ms step_avg:93.30ms +[2025-08-22 12:52:52] [Rank 0] step:9921/10000 train_time:925778ms step_avg:93.31ms +[2025-08-22 12:52:52] [Rank 0] step:9921/10000 train_time:925778ms step_avg:93.31ms +[2025-08-22 12:52:54] [Rank 0] step:9941/10000 train_time:927774ms step_avg:93.33ms +[2025-08-22 12:52:54] [Rank 0] step:9941/10000 train_time:927774ms step_avg:93.33ms +[2025-08-22 12:52:56] [Rank 0] step:9961/10000 train_time:929748ms step_avg:93.34ms +[2025-08-22 12:52:56] [Rank 0] step:9961/10000 train_time:929748ms step_avg:93.34ms +[2025-08-22 12:52:58] [Rank 0] step:9981/10000 train_time:931738ms step_avg:93.35ms +[2025-08-22 12:52:58] [Rank 0] step:9981/10000 train_time:931738ms step_avg:93.35ms +[2025-08-22 12:53:00] [Rank 0] step:10000/10000 train_time:933631ms step_avg:93.36ms +[2025-08-22 12:53:00] [Rank 0] step:10000/10000 train_time:933631ms step_avg:93.36ms +[2025-08-22 12:53:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:53:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:53:14] [Rank 0] PRINT: step:10000/10000 val_loss:3.5280 svd_entropy: attn_qk:H=0.8020,top10E=0.22,eRank=226.5,q75/q25=42.46 attn_vo:H=0.8324,top10E=0.05,eRank=399.3,q75/q25=inf mlp_w1:H=0.9241,top10E=0.11,eRank=468.8,q75/q25=4.06 mlp_w2:H=0.9666,top10E=0.05,eRank=615.8,q75/q25=3.03 vo_prod:H=0.6923,top10E=0.09,eRank=227.3,q75/q25=inf train_time:933748ms step_avg:93.37ms +[2025-08-22 12:53:14] [Rank 0] PRINT: step:10000/10000 val_loss:3.5280 svd_entropy: attn_qk:H=0.8020,top10E=0.22,eRank=226.5,q75/q25=42.46 attn_vo:H=0.8324,top10E=0.05,eRank=399.3,q75/q25=inf mlp_w1:H=0.9241,top10E=0.11,eRank=468.8,q75/q25=4.06 mlp_w2:H=0.9666,top10E=0.05,eRank=615.8,q75/q25=3.03 vo_prod:H=0.6923,top10E=0.09,eRank=227.3,q75/q25=inf train_time:933748ms step_avg:93.37ms +[2025-08-22 12:53:14] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 12:53:14 2025 --- +[2025-08-22 12:53:14] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 12:53:14 2025 --- +[2025-08-22 12:53:14] [Rank 0] PRINT: Peak memory allocated: 11393 MiB reserved: 16056 MiB +[2025-08-22 12:53:14] [Rank 0] PRINT: Peak memory allocated: 11393 MiB reserved: 16056 MiB diff --git a/logs_svd_gated/mode_8_param_gated_seed_42/config.json b/logs_svd_gated/mode_8_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..6d2aa9c5076d21a558f898326ca6b8d6440d4e4e --- /dev/null +++ b/logs_svd_gated/mode_8_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 8, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "1914c200-8f31-47b0-8237-8eabf26521bf", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_8_param_gated_seed_42/training_log_1914c200-8f31-47b0-8237-8eabf26521bf.txt b/logs_svd_gated/mode_8_param_gated_seed_42/training_log_1914c200-8f31-47b0-8237-8eabf26521bf.txt new file mode 100644 index 0000000000000000000000000000000000000000..4145401a53f82a24c67bb57c5602a56193068114 --- /dev/null +++ b/logs_svd_gated/mode_8_param_gated_seed_42/training_log_1914c200-8f31-47b0-8237-8eabf26521bf.txt @@ -0,0 +1,2926 @@ +[2025-08-22 17:38:52] [Rank 0] PRINT: --- Script Start: Fri Aug 22 17:38:52 2025 --- +[2025-08-22 17:38:52] [Rank 0] PRINT: --- Script Start: Fri Aug 22 17:38:52 2025 --- +[2025-08-22 17:38:52] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=8, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 17:38:52] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=8, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 17:38:52] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 17:38:52] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 17:38:52] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 17:38:52] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 17:38:52] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_8_param_gated_seed_42 +[2025-08-22 17:38:52] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_8_param_gated_seed_42 +[2025-08-22 17:38:52] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 17:38:52] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 17:38:52] [Rank 0] PRINT: Constructing model... +[2025-08-22 17:38:52] [Rank 0] PRINT: Constructing model... +[2025-08-22 17:38:54] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 17:38:54] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 17:38:54] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 17:38:54] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 17:38:54] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 17:38:54] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 17:38:54] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-08-22 17:38:54] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-08-22 17:38:54] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 17:38:54] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 17:38:54] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 17:38:54] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 17:38:54] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-08-22 17:38:54] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-08-22 17:38:54] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 17:38:54] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 17:38:54] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 17:38:54] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 17:38:54] [Rank 0] PRINT: Starting warmup... +[2025-08-22 17:38:54] [Rank 0] PRINT: Starting warmup... +[2025-08-22 17:39:37] [Rank 0] PRINT: Warmup complete. +[2025-08-22 17:39:37] [Rank 0] PRINT: Warmup complete. +[2025-08-22 17:39:37] [Rank 0] PRINT: Starting training... +[2025-08-22 17:39:37] [Rank 0] PRINT: Starting training... +[2025-08-22 17:39:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:39:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:39:55] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 17:39:55] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 17:39:57] [Rank 0] step:21/10000 train_time:1767ms step_avg:84.16ms +[2025-08-22 17:39:57] [Rank 0] step:21/10000 train_time:1767ms step_avg:84.16ms +[2025-08-22 17:39:59] [Rank 0] step:41/10000 train_time:3479ms step_avg:84.85ms +[2025-08-22 17:39:59] [Rank 0] step:41/10000 train_time:3479ms step_avg:84.85ms +[2025-08-22 17:40:00] [Rank 0] step:61/10000 train_time:5191ms step_avg:85.09ms +[2025-08-22 17:40:00] [Rank 0] step:61/10000 train_time:5191ms step_avg:85.09ms +[2025-08-22 17:40:02] [Rank 0] step:81/10000 train_time:6905ms step_avg:85.25ms +[2025-08-22 17:40:02] [Rank 0] step:81/10000 train_time:6905ms step_avg:85.25ms +[2025-08-22 17:40:04] [Rank 0] step:101/10000 train_time:8622ms step_avg:85.37ms +[2025-08-22 17:40:04] [Rank 0] step:101/10000 train_time:8622ms step_avg:85.37ms +[2025-08-22 17:40:05] [Rank 0] step:121/10000 train_time:10337ms step_avg:85.43ms +[2025-08-22 17:40:05] [Rank 0] step:121/10000 train_time:10337ms step_avg:85.43ms +[2025-08-22 17:40:07] [Rank 0] step:141/10000 train_time:12054ms step_avg:85.49ms +[2025-08-22 17:40:07] [Rank 0] step:141/10000 train_time:12054ms step_avg:85.49ms +[2025-08-22 17:40:09] [Rank 0] step:161/10000 train_time:13770ms step_avg:85.53ms +[2025-08-22 17:40:09] [Rank 0] step:161/10000 train_time:13770ms step_avg:85.53ms +[2025-08-22 17:40:11] [Rank 0] step:181/10000 train_time:15487ms step_avg:85.56ms +[2025-08-22 17:40:11] [Rank 0] step:181/10000 train_time:15487ms step_avg:85.56ms +[2025-08-22 17:40:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:40:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:40:26] [Rank 0] PRINT: step:200/10000 val_loss:5.8538 svd_entropy: attn_qk:H=0.7051,top10E=0.39,eRank=150.2,q75/q25=19.45 attn_vo:H=0.7055,top10E=0.19,eRank=182.4,q75/q25=inf mlp_w1:H=0.6373,top10E=0.51,eRank=77.4,q75/q25=8.31 mlp_w2:H=0.8807,top10E=0.10,eRank=350.8,q75/q25=12.10 vo_prod:H=0.4749,top10E=0.43,eRank=50.2,q75/q25=inf train_time:17216ms step_avg:86.08ms +[2025-08-22 17:40:26] [Rank 0] PRINT: step:200/10000 val_loss:5.8538 svd_entropy: attn_qk:H=0.7051,top10E=0.39,eRank=150.2,q75/q25=19.45 attn_vo:H=0.7055,top10E=0.19,eRank=182.4,q75/q25=inf mlp_w1:H=0.6373,top10E=0.51,eRank=77.4,q75/q25=8.31 mlp_w2:H=0.8807,top10E=0.10,eRank=350.8,q75/q25=12.10 vo_prod:H=0.4749,top10E=0.43,eRank=50.2,q75/q25=inf train_time:17216ms step_avg:86.08ms +[2025-08-22 17:40:26] [Rank 0] step:201/10000 train_time:17236ms step_avg:85.75ms +[2025-08-22 17:40:26] [Rank 0] step:201/10000 train_time:17236ms step_avg:85.75ms +[2025-08-22 17:40:28] [Rank 0] step:221/10000 train_time:18947ms step_avg:85.73ms +[2025-08-22 17:40:28] [Rank 0] step:221/10000 train_time:18947ms step_avg:85.73ms +[2025-08-22 17:40:29] [Rank 0] step:241/10000 train_time:20662ms step_avg:85.73ms +[2025-08-22 17:40:29] [Rank 0] step:241/10000 train_time:20662ms step_avg:85.73ms +[2025-08-22 17:40:31] [Rank 0] step:261/10000 train_time:22374ms step_avg:85.73ms +[2025-08-22 17:40:31] [Rank 0] step:261/10000 train_time:22374ms step_avg:85.73ms +[2025-08-22 17:40:33] [Rank 0] step:281/10000 train_time:24088ms step_avg:85.72ms +[2025-08-22 17:40:33] [Rank 0] step:281/10000 train_time:24088ms step_avg:85.72ms +[2025-08-22 17:40:34] [Rank 0] step:301/10000 train_time:25800ms step_avg:85.71ms +[2025-08-22 17:40:34] [Rank 0] step:301/10000 train_time:25800ms step_avg:85.71ms +[2025-08-22 17:40:36] [Rank 0] step:321/10000 train_time:27511ms step_avg:85.71ms +[2025-08-22 17:40:36] [Rank 0] step:321/10000 train_time:27511ms step_avg:85.71ms +[2025-08-22 17:40:38] [Rank 0] step:341/10000 train_time:29225ms step_avg:85.70ms +[2025-08-22 17:40:38] [Rank 0] step:341/10000 train_time:29225ms step_avg:85.70ms +[2025-08-22 17:40:40] [Rank 0] step:361/10000 train_time:30936ms step_avg:85.69ms +[2025-08-22 17:40:40] [Rank 0] step:361/10000 train_time:30936ms step_avg:85.69ms +[2025-08-22 17:40:41] [Rank 0] step:381/10000 train_time:32649ms step_avg:85.69ms +[2025-08-22 17:40:41] [Rank 0] step:381/10000 train_time:32649ms step_avg:85.69ms +[2025-08-22 17:40:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:40:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:40:56] [Rank 0] PRINT: step:400/10000 val_loss:5.3318 svd_entropy: attn_qk:H=0.7254,top10E=0.33,eRank=160.6,q75/q25=29.75 attn_vo:H=0.7435,top10E=0.13,eRank=224.6,q75/q25=inf mlp_w1:H=0.7992,top10E=0.28,eRank=205.3,q75/q25=6.91 mlp_w2:H=0.9631,top10E=0.05,eRank=601.8,q75/q25=3.26 vo_prod:H=0.5502,top10E=0.26,eRank=80.2,q75/q25=inf train_time:34373ms step_avg:85.93ms +[2025-08-22 17:40:56] [Rank 0] PRINT: step:400/10000 val_loss:5.3318 svd_entropy: attn_qk:H=0.7254,top10E=0.33,eRank=160.6,q75/q25=29.75 attn_vo:H=0.7435,top10E=0.13,eRank=224.6,q75/q25=inf mlp_w1:H=0.7992,top10E=0.28,eRank=205.3,q75/q25=6.91 mlp_w2:H=0.9631,top10E=0.05,eRank=601.8,q75/q25=3.26 vo_prod:H=0.5502,top10E=0.26,eRank=80.2,q75/q25=inf train_time:34373ms step_avg:85.93ms +[2025-08-22 17:40:57] [Rank 0] step:401/10000 train_time:34392ms step_avg:85.77ms +[2025-08-22 17:40:57] [Rank 0] step:401/10000 train_time:34392ms step_avg:85.77ms +[2025-08-22 17:40:58] [Rank 0] step:421/10000 train_time:36091ms step_avg:85.73ms +[2025-08-22 17:40:58] [Rank 0] step:421/10000 train_time:36091ms step_avg:85.73ms +[2025-08-22 17:41:00] [Rank 0] step:441/10000 train_time:37797ms step_avg:85.71ms +[2025-08-22 17:41:00] [Rank 0] step:441/10000 train_time:37797ms step_avg:85.71ms +[2025-08-22 17:41:02] [Rank 0] step:461/10000 train_time:39502ms step_avg:85.69ms +[2025-08-22 17:41:02] [Rank 0] step:461/10000 train_time:39502ms step_avg:85.69ms +[2025-08-22 17:41:03] [Rank 0] step:481/10000 train_time:41209ms step_avg:85.67ms +[2025-08-22 17:41:03] [Rank 0] step:481/10000 train_time:41209ms step_avg:85.67ms +[2025-08-22 17:41:05] [Rank 0] step:501/10000 train_time:42917ms step_avg:85.66ms +[2025-08-22 17:41:05] [Rank 0] step:501/10000 train_time:42917ms step_avg:85.66ms +[2025-08-22 17:41:07] [Rank 0] step:521/10000 train_time:44624ms step_avg:85.65ms +[2025-08-22 17:41:07] [Rank 0] step:521/10000 train_time:44624ms step_avg:85.65ms +[2025-08-22 17:41:09] [Rank 0] step:541/10000 train_time:46332ms step_avg:85.64ms +[2025-08-22 17:41:09] [Rank 0] step:541/10000 train_time:46332ms step_avg:85.64ms +[2025-08-22 17:41:10] [Rank 0] step:561/10000 train_time:48040ms step_avg:85.63ms +[2025-08-22 17:41:10] [Rank 0] step:561/10000 train_time:48040ms step_avg:85.63ms +[2025-08-22 17:41:12] [Rank 0] step:581/10000 train_time:49749ms step_avg:85.63ms +[2025-08-22 17:41:12] [Rank 0] step:581/10000 train_time:49749ms step_avg:85.63ms +[2025-08-22 17:41:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:41:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:41:27] [Rank 0] PRINT: step:600/10000 val_loss:5.0708 svd_entropy: attn_qk:H=0.7388,top10E=0.30,eRank=169.1,q75/q25=35.84 attn_vo:H=0.7806,top10E=0.09,eRank=280.8,q75/q25=inf mlp_w1:H=0.8367,top10E=0.23,eRank=262.9,q75/q25=5.70 mlp_w2:H=0.9691,top10E=0.04,eRank=625.6,q75/q25=2.96 vo_prod:H=0.6005,top10E=0.19,eRank=112.8,q75/q25=inf train_time:51470ms step_avg:85.78ms +[2025-08-22 17:41:27] [Rank 0] PRINT: step:600/10000 val_loss:5.0708 svd_entropy: attn_qk:H=0.7388,top10E=0.30,eRank=169.1,q75/q25=35.84 attn_vo:H=0.7806,top10E=0.09,eRank=280.8,q75/q25=inf mlp_w1:H=0.8367,top10E=0.23,eRank=262.9,q75/q25=5.70 mlp_w2:H=0.9691,top10E=0.04,eRank=625.6,q75/q25=2.96 vo_prod:H=0.6005,top10E=0.19,eRank=112.8,q75/q25=inf train_time:51470ms step_avg:85.78ms +[2025-08-22 17:41:27] [Rank 0] step:601/10000 train_time:51488ms step_avg:85.67ms +[2025-08-22 17:41:27] [Rank 0] step:601/10000 train_time:51488ms step_avg:85.67ms +[2025-08-22 17:41:29] [Rank 0] step:621/10000 train_time:53190ms step_avg:85.65ms +[2025-08-22 17:41:29] [Rank 0] step:621/10000 train_time:53190ms step_avg:85.65ms +[2025-08-22 17:41:31] [Rank 0] step:641/10000 train_time:54895ms step_avg:85.64ms +[2025-08-22 17:41:31] [Rank 0] step:641/10000 train_time:54895ms step_avg:85.64ms +[2025-08-22 17:41:32] [Rank 0] step:661/10000 train_time:56600ms step_avg:85.63ms +[2025-08-22 17:41:32] [Rank 0] step:661/10000 train_time:56600ms step_avg:85.63ms +[2025-08-22 17:41:34] [Rank 0] step:681/10000 train_time:58306ms step_avg:85.62ms +[2025-08-22 17:41:34] [Rank 0] step:681/10000 train_time:58306ms step_avg:85.62ms +[2025-08-22 17:41:36] [Rank 0] step:701/10000 train_time:60013ms step_avg:85.61ms +[2025-08-22 17:41:36] [Rank 0] step:701/10000 train_time:60013ms step_avg:85.61ms +[2025-08-22 17:41:37] [Rank 0] step:721/10000 train_time:61720ms step_avg:85.60ms +[2025-08-22 17:41:37] [Rank 0] step:721/10000 train_time:61720ms step_avg:85.60ms +[2025-08-22 17:41:39] [Rank 0] step:741/10000 train_time:63429ms step_avg:85.60ms +[2025-08-22 17:41:39] [Rank 0] step:741/10000 train_time:63429ms step_avg:85.60ms +[2025-08-22 17:41:41] [Rank 0] step:761/10000 train_time:65151ms step_avg:85.61ms +[2025-08-22 17:41:41] [Rank 0] step:761/10000 train_time:65151ms step_avg:85.61ms +[2025-08-22 17:41:43] [Rank 0] step:781/10000 train_time:66874ms step_avg:85.63ms +[2025-08-22 17:41:43] [Rank 0] step:781/10000 train_time:66874ms step_avg:85.63ms +[2025-08-22 17:41:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:41:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:41:58] [Rank 0] PRINT: step:800/10000 val_loss:4.8488 svd_entropy: attn_qk:H=0.7478,top10E=0.29,eRank=175.5,q75/q25=40.28 attn_vo:H=0.7979,top10E=0.08,eRank=314.3,q75/q25=inf mlp_w1:H=0.8534,top10E=0.20,eRank=293.6,q75/q25=5.30 mlp_w2:H=0.9694,top10E=0.04,eRank=627.0,q75/q25=2.95 vo_prod:H=0.6273,top10E=0.16,eRank=137.2,q75/q25=inf train_time:68608ms step_avg:85.76ms +[2025-08-22 17:41:58] [Rank 0] PRINT: step:800/10000 val_loss:4.8488 svd_entropy: attn_qk:H=0.7478,top10E=0.29,eRank=175.5,q75/q25=40.28 attn_vo:H=0.7979,top10E=0.08,eRank=314.3,q75/q25=inf mlp_w1:H=0.8534,top10E=0.20,eRank=293.6,q75/q25=5.30 mlp_w2:H=0.9694,top10E=0.04,eRank=627.0,q75/q25=2.95 vo_prod:H=0.6273,top10E=0.16,eRank=137.2,q75/q25=inf train_time:68608ms step_avg:85.76ms +[2025-08-22 17:41:58] [Rank 0] step:801/10000 train_time:68630ms step_avg:85.68ms +[2025-08-22 17:41:58] [Rank 0] step:801/10000 train_time:68630ms step_avg:85.68ms +[2025-08-22 17:42:00] [Rank 0] step:821/10000 train_time:70339ms step_avg:85.68ms +[2025-08-22 17:42:00] [Rank 0] step:821/10000 train_time:70339ms step_avg:85.68ms +[2025-08-22 17:42:01] [Rank 0] step:841/10000 train_time:72061ms step_avg:85.68ms +[2025-08-22 17:42:01] [Rank 0] step:841/10000 train_time:72061ms step_avg:85.68ms +[2025-08-22 17:42:03] [Rank 0] step:861/10000 train_time:73782ms step_avg:85.69ms +[2025-08-22 17:42:03] [Rank 0] step:861/10000 train_time:73782ms step_avg:85.69ms +[2025-08-22 17:42:05] [Rank 0] step:881/10000 train_time:75504ms step_avg:85.70ms +[2025-08-22 17:42:05] [Rank 0] step:881/10000 train_time:75504ms step_avg:85.70ms +[2025-08-22 17:42:07] [Rank 0] step:901/10000 train_time:77229ms step_avg:85.71ms +[2025-08-22 17:42:07] [Rank 0] step:901/10000 train_time:77229ms step_avg:85.71ms +[2025-08-22 17:42:08] [Rank 0] step:921/10000 train_time:78953ms step_avg:85.73ms +[2025-08-22 17:42:08] [Rank 0] step:921/10000 train_time:78953ms step_avg:85.73ms +[2025-08-22 17:42:10] [Rank 0] step:941/10000 train_time:80678ms step_avg:85.74ms +[2025-08-22 17:42:10] [Rank 0] step:941/10000 train_time:80678ms step_avg:85.74ms +[2025-08-22 17:42:12] [Rank 0] step:961/10000 train_time:82404ms step_avg:85.75ms +[2025-08-22 17:42:12] [Rank 0] step:961/10000 train_time:82404ms step_avg:85.75ms +[2025-08-22 17:42:13] [Rank 0] step:981/10000 train_time:84130ms step_avg:85.76ms +[2025-08-22 17:42:13] [Rank 0] step:981/10000 train_time:84130ms step_avg:85.76ms +[2025-08-22 17:42:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:42:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:42:29] [Rank 0] PRINT: step:1000/10000 val_loss:4.7046 svd_entropy: attn_qk:H=0.7552,top10E=0.28,eRank=181.1,q75/q25=42.39 attn_vo:H=0.8079,top10E=0.07,eRank=336.3,q75/q25=inf mlp_w1:H=0.8640,top10E=0.19,eRank=314.9,q75/q25=5.12 mlp_w2:H=0.9693,top10E=0.04,eRank=626.6,q75/q25=2.95 vo_prod:H=0.6441,top10E=0.14,eRank=155.8,q75/q25=inf train_time:85868ms step_avg:85.87ms +[2025-08-22 17:42:29] [Rank 0] PRINT: step:1000/10000 val_loss:4.7046 svd_entropy: attn_qk:H=0.7552,top10E=0.28,eRank=181.1,q75/q25=42.39 attn_vo:H=0.8079,top10E=0.07,eRank=336.3,q75/q25=inf mlp_w1:H=0.8640,top10E=0.19,eRank=314.9,q75/q25=5.12 mlp_w2:H=0.9693,top10E=0.04,eRank=626.6,q75/q25=2.95 vo_prod:H=0.6441,top10E=0.14,eRank=155.8,q75/q25=inf train_time:85868ms step_avg:85.87ms +[2025-08-22 17:42:29] [Rank 0] step:1001/10000 train_time:85890ms step_avg:85.80ms +[2025-08-22 17:42:29] [Rank 0] step:1001/10000 train_time:85890ms step_avg:85.80ms +[2025-08-22 17:42:31] [Rank 0] step:1021/10000 train_time:87590ms step_avg:85.79ms +[2025-08-22 17:42:31] [Rank 0] step:1021/10000 train_time:87590ms step_avg:85.79ms +[2025-08-22 17:42:32] [Rank 0] step:1041/10000 train_time:89309ms step_avg:85.79ms +[2025-08-22 17:42:32] [Rank 0] step:1041/10000 train_time:89309ms step_avg:85.79ms +[2025-08-22 17:42:34] [Rank 0] step:1061/10000 train_time:91029ms step_avg:85.80ms +[2025-08-22 17:42:34] [Rank 0] step:1061/10000 train_time:91029ms step_avg:85.80ms +[2025-08-22 17:42:36] [Rank 0] step:1081/10000 train_time:92751ms step_avg:85.80ms +[2025-08-22 17:42:36] [Rank 0] step:1081/10000 train_time:92751ms step_avg:85.80ms +[2025-08-22 17:42:37] [Rank 0] step:1101/10000 train_time:94471ms step_avg:85.80ms +[2025-08-22 17:42:37] [Rank 0] step:1101/10000 train_time:94471ms step_avg:85.80ms +[2025-08-22 17:42:39] [Rank 0] step:1121/10000 train_time:96191ms step_avg:85.81ms +[2025-08-22 17:42:39] [Rank 0] step:1121/10000 train_time:96191ms step_avg:85.81ms +[2025-08-22 17:42:41] [Rank 0] step:1141/10000 train_time:97912ms step_avg:85.81ms +[2025-08-22 17:42:41] [Rank 0] step:1141/10000 train_time:97912ms step_avg:85.81ms +[2025-08-22 17:42:43] [Rank 0] step:1161/10000 train_time:99634ms step_avg:85.82ms +[2025-08-22 17:42:43] [Rank 0] step:1161/10000 train_time:99634ms step_avg:85.82ms +[2025-08-22 17:42:44] [Rank 0] step:1181/10000 train_time:101356ms step_avg:85.82ms +[2025-08-22 17:42:44] [Rank 0] step:1181/10000 train_time:101356ms step_avg:85.82ms +[2025-08-22 17:42:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:42:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:43:00] [Rank 0] PRINT: step:1200/10000 val_loss:4.5421 svd_entropy: attn_qk:H=0.7611,top10E=0.27,eRank=185.9,q75/q25=44.10 attn_vo:H=0.8150,top10E=0.07,eRank=352.9,q75/q25=inf mlp_w1:H=0.8716,top10E=0.18,eRank=330.9,q75/q25=5.02 mlp_w2:H=0.9691,top10E=0.04,eRank=625.6,q75/q25=2.95 vo_prod:H=0.6561,top10E=0.13,eRank=170.5,q75/q25=inf train_time:103091ms step_avg:85.91ms +[2025-08-22 17:43:00] [Rank 0] PRINT: step:1200/10000 val_loss:4.5421 svd_entropy: attn_qk:H=0.7611,top10E=0.27,eRank=185.9,q75/q25=44.10 attn_vo:H=0.8150,top10E=0.07,eRank=352.9,q75/q25=inf mlp_w1:H=0.8716,top10E=0.18,eRank=330.9,q75/q25=5.02 mlp_w2:H=0.9691,top10E=0.04,eRank=625.6,q75/q25=2.95 vo_prod:H=0.6561,top10E=0.13,eRank=170.5,q75/q25=inf train_time:103091ms step_avg:85.91ms +[2025-08-22 17:43:00] [Rank 0] step:1201/10000 train_time:103110ms step_avg:85.85ms +[2025-08-22 17:43:00] [Rank 0] step:1201/10000 train_time:103110ms step_avg:85.85ms +[2025-08-22 17:43:01] [Rank 0] step:1221/10000 train_time:104814ms step_avg:85.84ms +[2025-08-22 17:43:01] [Rank 0] step:1221/10000 train_time:104814ms step_avg:85.84ms +[2025-08-22 17:43:03] [Rank 0] step:1241/10000 train_time:106533ms step_avg:85.84ms +[2025-08-22 17:43:03] [Rank 0] step:1241/10000 train_time:106533ms step_avg:85.84ms +[2025-08-22 17:43:05] [Rank 0] step:1261/10000 train_time:108253ms step_avg:85.85ms +[2025-08-22 17:43:05] [Rank 0] step:1261/10000 train_time:108253ms step_avg:85.85ms +[2025-08-22 17:43:07] [Rank 0] step:1281/10000 train_time:109972ms step_avg:85.85ms +[2025-08-22 17:43:07] [Rank 0] step:1281/10000 train_time:109972ms step_avg:85.85ms +[2025-08-22 17:43:08] [Rank 0] step:1301/10000 train_time:111692ms step_avg:85.85ms +[2025-08-22 17:43:08] [Rank 0] step:1301/10000 train_time:111692ms step_avg:85.85ms +[2025-08-22 17:43:10] [Rank 0] step:1321/10000 train_time:113411ms step_avg:85.85ms +[2025-08-22 17:43:10] [Rank 0] step:1321/10000 train_time:113411ms step_avg:85.85ms +[2025-08-22 17:43:12] [Rank 0] step:1341/10000 train_time:115133ms step_avg:85.86ms +[2025-08-22 17:43:12] [Rank 0] step:1341/10000 train_time:115133ms step_avg:85.86ms +[2025-08-22 17:43:13] [Rank 0] step:1361/10000 train_time:116855ms step_avg:85.86ms +[2025-08-22 17:43:13] [Rank 0] step:1361/10000 train_time:116855ms step_avg:85.86ms +[2025-08-22 17:43:15] [Rank 0] step:1381/10000 train_time:118578ms step_avg:85.86ms +[2025-08-22 17:43:15] [Rank 0] step:1381/10000 train_time:118578ms step_avg:85.86ms +[2025-08-22 17:43:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:43:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:43:30] [Rank 0] PRINT: step:1400/10000 val_loss:4.4401 svd_entropy: attn_qk:H=0.7653,top10E=0.26,eRank=189.5,q75/q25=45.35 attn_vo:H=0.8189,top10E=0.07,eRank=362.6,q75/q25=inf mlp_w1:H=0.8775,top10E=0.17,eRank=344.2,q75/q25=4.94 mlp_w2:H=0.9689,top10E=0.05,eRank=624.7,q75/q25=2.95 vo_prod:H=0.6624,top10E=0.13,eRank=179.1,q75/q25=inf train_time:120314ms step_avg:85.94ms +[2025-08-22 17:43:30] [Rank 0] PRINT: step:1400/10000 val_loss:4.4401 svd_entropy: attn_qk:H=0.7653,top10E=0.26,eRank=189.5,q75/q25=45.35 attn_vo:H=0.8189,top10E=0.07,eRank=362.6,q75/q25=inf mlp_w1:H=0.8775,top10E=0.17,eRank=344.2,q75/q25=4.94 mlp_w2:H=0.9689,top10E=0.05,eRank=624.7,q75/q25=2.95 vo_prod:H=0.6624,top10E=0.13,eRank=179.1,q75/q25=inf train_time:120314ms step_avg:85.94ms +[2025-08-22 17:43:31] [Rank 0] step:1401/10000 train_time:120334ms step_avg:85.89ms +[2025-08-22 17:43:31] [Rank 0] step:1401/10000 train_time:120334ms step_avg:85.89ms +[2025-08-22 17:43:32] [Rank 0] step:1421/10000 train_time:122037ms step_avg:85.88ms +[2025-08-22 17:43:32] [Rank 0] step:1421/10000 train_time:122037ms step_avg:85.88ms +[2025-08-22 17:43:34] [Rank 0] step:1441/10000 train_time:123755ms step_avg:85.88ms +[2025-08-22 17:43:34] [Rank 0] step:1441/10000 train_time:123755ms step_avg:85.88ms +[2025-08-22 17:43:36] [Rank 0] step:1461/10000 train_time:125475ms step_avg:85.88ms +[2025-08-22 17:43:36] [Rank 0] step:1461/10000 train_time:125475ms step_avg:85.88ms +[2025-08-22 17:43:37] [Rank 0] step:1481/10000 train_time:127197ms step_avg:85.89ms +[2025-08-22 17:43:37] [Rank 0] step:1481/10000 train_time:127197ms step_avg:85.89ms +[2025-08-22 17:43:39] [Rank 0] step:1501/10000 train_time:128929ms step_avg:85.90ms +[2025-08-22 17:43:39] [Rank 0] step:1501/10000 train_time:128929ms step_avg:85.90ms +[2025-08-22 17:43:41] [Rank 0] step:1521/10000 train_time:130663ms step_avg:85.91ms +[2025-08-22 17:43:41] [Rank 0] step:1521/10000 train_time:130663ms step_avg:85.91ms +[2025-08-22 17:43:43] [Rank 0] step:1541/10000 train_time:132395ms step_avg:85.92ms +[2025-08-22 17:43:43] [Rank 0] step:1541/10000 train_time:132395ms step_avg:85.92ms +[2025-08-22 17:43:44] [Rank 0] step:1561/10000 train_time:134130ms step_avg:85.93ms +[2025-08-22 17:43:44] [Rank 0] step:1561/10000 train_time:134130ms step_avg:85.93ms +[2025-08-22 17:43:46] [Rank 0] step:1581/10000 train_time:135864ms step_avg:85.94ms +[2025-08-22 17:43:46] [Rank 0] step:1581/10000 train_time:135864ms step_avg:85.94ms +[2025-08-22 17:43:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:43:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:44:01] [Rank 0] PRINT: step:1600/10000 val_loss:4.3279 svd_entropy: attn_qk:H=0.7687,top10E=0.26,eRank=192.5,q75/q25=46.07 attn_vo:H=0.8212,top10E=0.06,eRank=368.5,q75/q25=inf mlp_w1:H=0.8823,top10E=0.17,eRank=355.4,q75/q25=4.88 mlp_w2:H=0.9687,top10E=0.05,eRank=623.8,q75/q25=2.96 vo_prod:H=0.6663,top10E=0.12,eRank=184.7,q75/q25=inf train_time:137610ms step_avg:86.01ms +[2025-08-22 17:44:01] [Rank 0] PRINT: step:1600/10000 val_loss:4.3279 svd_entropy: attn_qk:H=0.7687,top10E=0.26,eRank=192.5,q75/q25=46.07 attn_vo:H=0.8212,top10E=0.06,eRank=368.5,q75/q25=inf mlp_w1:H=0.8823,top10E=0.17,eRank=355.4,q75/q25=4.88 mlp_w2:H=0.9687,top10E=0.05,eRank=623.8,q75/q25=2.96 vo_prod:H=0.6663,top10E=0.12,eRank=184.7,q75/q25=inf train_time:137610ms step_avg:86.01ms +[2025-08-22 17:44:01] [Rank 0] step:1601/10000 train_time:137628ms step_avg:85.96ms +[2025-08-22 17:44:01] [Rank 0] step:1601/10000 train_time:137628ms step_avg:85.96ms +[2025-08-22 17:44:03] [Rank 0] step:1621/10000 train_time:139372ms step_avg:85.98ms +[2025-08-22 17:44:03] [Rank 0] step:1621/10000 train_time:139372ms step_avg:85.98ms +[2025-08-22 17:44:05] [Rank 0] step:1641/10000 train_time:141105ms step_avg:85.99ms +[2025-08-22 17:44:05] [Rank 0] step:1641/10000 train_time:141105ms step_avg:85.99ms +[2025-08-22 17:44:07] [Rank 0] step:1661/10000 train_time:142838ms step_avg:86.00ms +[2025-08-22 17:44:07] [Rank 0] step:1661/10000 train_time:142838ms step_avg:86.00ms +[2025-08-22 17:44:08] [Rank 0] step:1681/10000 train_time:144573ms step_avg:86.00ms +[2025-08-22 17:44:08] [Rank 0] step:1681/10000 train_time:144573ms step_avg:86.00ms +[2025-08-22 17:44:10] [Rank 0] step:1701/10000 train_time:146308ms step_avg:86.01ms +[2025-08-22 17:44:10] [Rank 0] step:1701/10000 train_time:146308ms step_avg:86.01ms +[2025-08-22 17:44:12] [Rank 0] step:1721/10000 train_time:148043ms step_avg:86.02ms +[2025-08-22 17:44:12] [Rank 0] step:1721/10000 train_time:148043ms step_avg:86.02ms +[2025-08-22 17:44:14] [Rank 0] step:1741/10000 train_time:149780ms step_avg:86.03ms +[2025-08-22 17:44:14] [Rank 0] step:1741/10000 train_time:149780ms step_avg:86.03ms +[2025-08-22 17:44:15] [Rank 0] step:1761/10000 train_time:151517ms step_avg:86.04ms +[2025-08-22 17:44:15] [Rank 0] step:1761/10000 train_time:151517ms step_avg:86.04ms +[2025-08-22 17:44:17] [Rank 0] step:1781/10000 train_time:153255ms step_avg:86.05ms +[2025-08-22 17:44:17] [Rank 0] step:1781/10000 train_time:153255ms step_avg:86.05ms +[2025-08-22 17:44:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:44:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:44:32] [Rank 0] PRINT: step:1800/10000 val_loss:4.2541 svd_entropy: attn_qk:H=0.7715,top10E=0.25,eRank=195.0,q75/q25=46.82 attn_vo:H=0.8229,top10E=0.06,eRank=372.8,q75/q25=inf mlp_w1:H=0.8861,top10E=0.16,eRank=364.6,q75/q25=4.83 mlp_w2:H=0.9685,top10E=0.05,eRank=623.0,q75/q25=2.97 vo_prod:H=0.6692,top10E=0.12,eRank=189.0,q75/q25=inf train_time:155005ms step_avg:86.11ms +[2025-08-22 17:44:32] [Rank 0] PRINT: step:1800/10000 val_loss:4.2541 svd_entropy: attn_qk:H=0.7715,top10E=0.25,eRank=195.0,q75/q25=46.82 attn_vo:H=0.8229,top10E=0.06,eRank=372.8,q75/q25=inf mlp_w1:H=0.8861,top10E=0.16,eRank=364.6,q75/q25=4.83 mlp_w2:H=0.9685,top10E=0.05,eRank=623.0,q75/q25=2.97 vo_prod:H=0.6692,top10E=0.12,eRank=189.0,q75/q25=inf train_time:155005ms step_avg:86.11ms +[2025-08-22 17:44:33] [Rank 0] step:1801/10000 train_time:155024ms step_avg:86.08ms +[2025-08-22 17:44:33] [Rank 0] step:1801/10000 train_time:155024ms step_avg:86.08ms +[2025-08-22 17:44:34] [Rank 0] step:1821/10000 train_time:156741ms step_avg:86.07ms +[2025-08-22 17:44:34] [Rank 0] step:1821/10000 train_time:156741ms step_avg:86.07ms +[2025-08-22 17:44:36] [Rank 0] step:1841/10000 train_time:158473ms step_avg:86.08ms +[2025-08-22 17:44:36] [Rank 0] step:1841/10000 train_time:158473ms step_avg:86.08ms +[2025-08-22 17:44:38] [Rank 0] step:1861/10000 train_time:160207ms step_avg:86.09ms +[2025-08-22 17:44:38] [Rank 0] step:1861/10000 train_time:160207ms step_avg:86.09ms +[2025-08-22 17:44:39] [Rank 0] step:1881/10000 train_time:161940ms step_avg:86.09ms +[2025-08-22 17:44:39] [Rank 0] step:1881/10000 train_time:161940ms step_avg:86.09ms +[2025-08-22 17:44:41] [Rank 0] step:1901/10000 train_time:163673ms step_avg:86.10ms +[2025-08-22 17:44:41] [Rank 0] step:1901/10000 train_time:163673ms step_avg:86.10ms +[2025-08-22 17:44:43] [Rank 0] step:1921/10000 train_time:165406ms step_avg:86.10ms +[2025-08-22 17:44:43] [Rank 0] step:1921/10000 train_time:165406ms step_avg:86.10ms +[2025-08-22 17:44:45] [Rank 0] step:1941/10000 train_time:167141ms step_avg:86.11ms +[2025-08-22 17:44:45] [Rank 0] step:1941/10000 train_time:167141ms step_avg:86.11ms +[2025-08-22 17:44:46] [Rank 0] step:1961/10000 train_time:168876ms step_avg:86.12ms +[2025-08-22 17:44:46] [Rank 0] step:1961/10000 train_time:168876ms step_avg:86.12ms +[2025-08-22 17:44:48] [Rank 0] step:1981/10000 train_time:170612ms step_avg:86.12ms +[2025-08-22 17:44:48] [Rank 0] step:1981/10000 train_time:170612ms step_avg:86.12ms +[2025-08-22 17:44:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:44:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:45:03] [Rank 0] PRINT: step:2000/10000 val_loss:4.2214 svd_entropy: attn_qk:H=0.7739,top10E=0.25,eRank=197.2,q75/q25=47.31 attn_vo:H=0.8241,top10E=0.06,eRank=376.2,q75/q25=inf mlp_w1:H=0.8891,top10E=0.16,eRank=372.2,q75/q25=4.78 mlp_w2:H=0.9683,top10E=0.05,eRank=622.2,q75/q25=2.98 vo_prod:H=0.6716,top10E=0.12,eRank=192.6,q75/q25=inf train_time:172359ms step_avg:86.18ms +[2025-08-22 17:45:03] [Rank 0] PRINT: step:2000/10000 val_loss:4.2214 svd_entropy: attn_qk:H=0.7739,top10E=0.25,eRank=197.2,q75/q25=47.31 attn_vo:H=0.8241,top10E=0.06,eRank=376.2,q75/q25=inf mlp_w1:H=0.8891,top10E=0.16,eRank=372.2,q75/q25=4.78 mlp_w2:H=0.9683,top10E=0.05,eRank=622.2,q75/q25=2.98 vo_prod:H=0.6716,top10E=0.12,eRank=192.6,q75/q25=inf train_time:172359ms step_avg:86.18ms +[2025-08-22 17:45:04] [Rank 0] step:2001/10000 train_time:172378ms step_avg:86.15ms +[2025-08-22 17:45:04] [Rank 0] step:2001/10000 train_time:172378ms step_avg:86.15ms +[2025-08-22 17:45:05] [Rank 0] step:2021/10000 train_time:174106ms step_avg:86.15ms +[2025-08-22 17:45:05] [Rank 0] step:2021/10000 train_time:174106ms step_avg:86.15ms +[2025-08-22 17:45:07] [Rank 0] step:2041/10000 train_time:175875ms step_avg:86.17ms +[2025-08-22 17:45:07] [Rank 0] step:2041/10000 train_time:175875ms step_avg:86.17ms +[2025-08-22 17:45:09] [Rank 0] step:2061/10000 train_time:177606ms step_avg:86.17ms +[2025-08-22 17:45:09] [Rank 0] step:2061/10000 train_time:177606ms step_avg:86.17ms +[2025-08-22 17:45:11] [Rank 0] step:2081/10000 train_time:179337ms step_avg:86.18ms +[2025-08-22 17:45:11] [Rank 0] step:2081/10000 train_time:179337ms step_avg:86.18ms +[2025-08-22 17:45:12] [Rank 0] step:2101/10000 train_time:181069ms step_avg:86.18ms +[2025-08-22 17:45:12] [Rank 0] step:2101/10000 train_time:181069ms step_avg:86.18ms +[2025-08-22 17:45:14] [Rank 0] step:2121/10000 train_time:182801ms step_avg:86.19ms +[2025-08-22 17:45:14] [Rank 0] step:2121/10000 train_time:182801ms step_avg:86.19ms +[2025-08-22 17:45:16] [Rank 0] step:2141/10000 train_time:184535ms step_avg:86.19ms +[2025-08-22 17:45:16] [Rank 0] step:2141/10000 train_time:184535ms step_avg:86.19ms +[2025-08-22 17:45:17] [Rank 0] step:2161/10000 train_time:186270ms step_avg:86.20ms +[2025-08-22 17:45:17] [Rank 0] step:2161/10000 train_time:186270ms step_avg:86.20ms +[2025-08-22 17:45:19] [Rank 0] step:2181/10000 train_time:188005ms step_avg:86.20ms +[2025-08-22 17:45:19] [Rank 0] step:2181/10000 train_time:188005ms step_avg:86.20ms +[2025-08-22 17:45:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:45:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:45:34] [Rank 0] PRINT: step:2200/10000 val_loss:4.1617 svd_entropy: attn_qk:H=0.7759,top10E=0.25,eRank=199.1,q75/q25=47.69 attn_vo:H=0.8251,top10E=0.06,eRank=378.8,q75/q25=inf mlp_w1:H=0.8916,top10E=0.15,eRank=378.6,q75/q25=4.74 mlp_w2:H=0.9681,top10E=0.05,eRank=621.4,q75/q25=2.98 vo_prod:H=0.6733,top10E=0.12,eRank=195.4,q75/q25=inf train_time:189755ms step_avg:86.25ms +[2025-08-22 17:45:34] [Rank 0] PRINT: step:2200/10000 val_loss:4.1617 svd_entropy: attn_qk:H=0.7759,top10E=0.25,eRank=199.1,q75/q25=47.69 attn_vo:H=0.8251,top10E=0.06,eRank=378.8,q75/q25=inf mlp_w1:H=0.8916,top10E=0.15,eRank=378.6,q75/q25=4.74 mlp_w2:H=0.9681,top10E=0.05,eRank=621.4,q75/q25=2.98 vo_prod:H=0.6733,top10E=0.12,eRank=195.4,q75/q25=inf train_time:189755ms step_avg:86.25ms +[2025-08-22 17:45:35] [Rank 0] step:2201/10000 train_time:189773ms step_avg:86.22ms +[2025-08-22 17:45:35] [Rank 0] step:2201/10000 train_time:189773ms step_avg:86.22ms +[2025-08-22 17:45:36] [Rank 0] step:2221/10000 train_time:191503ms step_avg:86.22ms +[2025-08-22 17:45:36] [Rank 0] step:2221/10000 train_time:191503ms step_avg:86.22ms +[2025-08-22 17:45:38] [Rank 0] step:2241/10000 train_time:193273ms step_avg:86.24ms +[2025-08-22 17:45:38] [Rank 0] step:2241/10000 train_time:193273ms step_avg:86.24ms +[2025-08-22 17:45:40] [Rank 0] step:2261/10000 train_time:195052ms step_avg:86.27ms +[2025-08-22 17:45:40] [Rank 0] step:2261/10000 train_time:195052ms step_avg:86.27ms +[2025-08-22 17:45:42] [Rank 0] step:2281/10000 train_time:196833ms step_avg:86.29ms +[2025-08-22 17:45:42] [Rank 0] step:2281/10000 train_time:196833ms step_avg:86.29ms +[2025-08-22 17:45:43] [Rank 0] step:2301/10000 train_time:198613ms step_avg:86.32ms +[2025-08-22 17:45:43] [Rank 0] step:2301/10000 train_time:198613ms step_avg:86.32ms +[2025-08-22 17:45:45] [Rank 0] step:2321/10000 train_time:200393ms step_avg:86.34ms +[2025-08-22 17:45:45] [Rank 0] step:2321/10000 train_time:200393ms step_avg:86.34ms +[2025-08-22 17:45:47] [Rank 0] step:2341/10000 train_time:202173ms step_avg:86.36ms +[2025-08-22 17:45:47] [Rank 0] step:2341/10000 train_time:202173ms step_avg:86.36ms +[2025-08-22 17:45:49] [Rank 0] step:2361/10000 train_time:203954ms step_avg:86.38ms +[2025-08-22 17:45:49] [Rank 0] step:2361/10000 train_time:203954ms step_avg:86.38ms +[2025-08-22 17:45:51] [Rank 0] step:2381/10000 train_time:205736ms step_avg:86.41ms +[2025-08-22 17:45:51] [Rank 0] step:2381/10000 train_time:205736ms step_avg:86.41ms +[2025-08-22 17:45:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:45:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:46:06] [Rank 0] PRINT: step:2400/10000 val_loss:4.1044 svd_entropy: attn_qk:H=0.7772,top10E=0.25,eRank=200.4,q75/q25=47.26 attn_vo:H=0.8258,top10E=0.06,eRank=380.6,q75/q25=inf mlp_w1:H=0.8939,top10E=0.15,eRank=384.5,q75/q25=4.69 mlp_w2:H=0.9679,top10E=0.05,eRank=620.8,q75/q25=2.99 vo_prod:H=0.6746,top10E=0.11,eRank=197.5,q75/q25=inf train_time:207530ms step_avg:86.47ms +[2025-08-22 17:46:06] [Rank 0] PRINT: step:2400/10000 val_loss:4.1044 svd_entropy: attn_qk:H=0.7772,top10E=0.25,eRank=200.4,q75/q25=47.26 attn_vo:H=0.8258,top10E=0.06,eRank=380.6,q75/q25=inf mlp_w1:H=0.8939,top10E=0.15,eRank=384.5,q75/q25=4.69 mlp_w2:H=0.9679,top10E=0.05,eRank=620.8,q75/q25=2.99 vo_prod:H=0.6746,top10E=0.11,eRank=197.5,q75/q25=inf train_time:207530ms step_avg:86.47ms +[2025-08-22 17:46:06] [Rank 0] step:2401/10000 train_time:207548ms step_avg:86.44ms +[2025-08-22 17:46:06] [Rank 0] step:2401/10000 train_time:207548ms step_avg:86.44ms +[2025-08-22 17:46:08] [Rank 0] step:2421/10000 train_time:209323ms step_avg:86.46ms +[2025-08-22 17:46:08] [Rank 0] step:2421/10000 train_time:209323ms step_avg:86.46ms +[2025-08-22 17:46:10] [Rank 0] step:2441/10000 train_time:211098ms step_avg:86.48ms +[2025-08-22 17:46:10] [Rank 0] step:2441/10000 train_time:211098ms step_avg:86.48ms +[2025-08-22 17:46:11] [Rank 0] step:2461/10000 train_time:212875ms step_avg:86.50ms +[2025-08-22 17:46:11] [Rank 0] step:2461/10000 train_time:212875ms step_avg:86.50ms +[2025-08-22 17:46:13] [Rank 0] step:2481/10000 train_time:214652ms step_avg:86.52ms +[2025-08-22 17:46:13] [Rank 0] step:2481/10000 train_time:214652ms step_avg:86.52ms +[2025-08-22 17:46:15] [Rank 0] step:2501/10000 train_time:216429ms step_avg:86.54ms +[2025-08-22 17:46:15] [Rank 0] step:2501/10000 train_time:216429ms step_avg:86.54ms +[2025-08-22 17:46:17] [Rank 0] step:2521/10000 train_time:218207ms step_avg:86.56ms +[2025-08-22 17:46:17] [Rank 0] step:2521/10000 train_time:218207ms step_avg:86.56ms +[2025-08-22 17:46:18] [Rank 0] step:2541/10000 train_time:219984ms step_avg:86.57ms +[2025-08-22 17:46:18] [Rank 0] step:2541/10000 train_time:219984ms step_avg:86.57ms +[2025-08-22 17:46:20] [Rank 0] step:2561/10000 train_time:221763ms step_avg:86.59ms +[2025-08-22 17:46:20] [Rank 0] step:2561/10000 train_time:221763ms step_avg:86.59ms +[2025-08-22 17:46:22] [Rank 0] step:2581/10000 train_time:223544ms step_avg:86.61ms +[2025-08-22 17:46:22] [Rank 0] step:2581/10000 train_time:223544ms step_avg:86.61ms +[2025-08-22 17:46:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:46:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:46:37] [Rank 0] PRINT: step:2600/10000 val_loss:4.0684 svd_entropy: attn_qk:H=0.7789,top10E=0.25,eRank=202.1,q75/q25=47.38 attn_vo:H=0.8264,top10E=0.06,eRank=382.4,q75/q25=inf mlp_w1:H=0.8959,top10E=0.15,eRank=389.8,q75/q25=4.64 mlp_w2:H=0.9678,top10E=0.05,eRank=620.2,q75/q25=2.99 vo_prod:H=0.6760,top10E=0.11,eRank=199.6,q75/q25=inf train_time:225333ms step_avg:86.67ms +[2025-08-22 17:46:37] [Rank 0] PRINT: step:2600/10000 val_loss:4.0684 svd_entropy: attn_qk:H=0.7789,top10E=0.25,eRank=202.1,q75/q25=47.38 attn_vo:H=0.8264,top10E=0.06,eRank=382.4,q75/q25=inf mlp_w1:H=0.8959,top10E=0.15,eRank=389.8,q75/q25=4.64 mlp_w2:H=0.9678,top10E=0.05,eRank=620.2,q75/q25=2.99 vo_prod:H=0.6760,top10E=0.11,eRank=199.6,q75/q25=inf train_time:225333ms step_avg:86.67ms +[2025-08-22 17:46:38] [Rank 0] step:2601/10000 train_time:225352ms step_avg:86.64ms +[2025-08-22 17:46:38] [Rank 0] step:2601/10000 train_time:225352ms step_avg:86.64ms +[2025-08-22 17:46:39] [Rank 0] step:2621/10000 train_time:227119ms step_avg:86.65ms +[2025-08-22 17:46:39] [Rank 0] step:2621/10000 train_time:227119ms step_avg:86.65ms +[2025-08-22 17:46:41] [Rank 0] step:2641/10000 train_time:228893ms step_avg:86.67ms +[2025-08-22 17:46:41] [Rank 0] step:2641/10000 train_time:228893ms step_avg:86.67ms +[2025-08-22 17:46:43] [Rank 0] step:2661/10000 train_time:230668ms step_avg:86.68ms +[2025-08-22 17:46:43] [Rank 0] step:2661/10000 train_time:230668ms step_avg:86.68ms +[2025-08-22 17:46:45] [Rank 0] step:2681/10000 train_time:232445ms step_avg:86.70ms +[2025-08-22 17:46:45] [Rank 0] step:2681/10000 train_time:232445ms step_avg:86.70ms +[2025-08-22 17:46:46] [Rank 0] step:2701/10000 train_time:234220ms step_avg:86.72ms +[2025-08-22 17:46:46] [Rank 0] step:2701/10000 train_time:234220ms step_avg:86.72ms +[2025-08-22 17:46:48] [Rank 0] step:2721/10000 train_time:235996ms step_avg:86.73ms +[2025-08-22 17:46:48] [Rank 0] step:2721/10000 train_time:235996ms step_avg:86.73ms +[2025-08-22 17:46:50] [Rank 0] step:2741/10000 train_time:237774ms step_avg:86.75ms +[2025-08-22 17:46:50] [Rank 0] step:2741/10000 train_time:237774ms step_avg:86.75ms +[2025-08-22 17:46:52] [Rank 0] step:2761/10000 train_time:239553ms step_avg:86.76ms +[2025-08-22 17:46:52] [Rank 0] step:2761/10000 train_time:239553ms step_avg:86.76ms +[2025-08-22 17:46:54] [Rank 0] step:2781/10000 train_time:241330ms step_avg:86.78ms +[2025-08-22 17:46:54] [Rank 0] step:2781/10000 train_time:241330ms step_avg:86.78ms +[2025-08-22 17:46:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:46:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:47:09] [Rank 0] PRINT: step:2800/10000 val_loss:4.0416 svd_entropy: attn_qk:H=0.7803,top10E=0.25,eRank=203.5,q75/q25=47.23 attn_vo:H=0.8269,top10E=0.06,eRank=383.7,q75/q25=inf mlp_w1:H=0.8977,top10E=0.15,eRank=394.4,q75/q25=4.60 mlp_w2:H=0.9676,top10E=0.05,eRank=619.6,q75/q25=3.00 vo_prod:H=0.6770,top10E=0.11,eRank=201.3,q75/q25=inf train_time:243121ms step_avg:86.83ms +[2025-08-22 17:47:09] [Rank 0] PRINT: step:2800/10000 val_loss:4.0416 svd_entropy: attn_qk:H=0.7803,top10E=0.25,eRank=203.5,q75/q25=47.23 attn_vo:H=0.8269,top10E=0.06,eRank=383.7,q75/q25=inf mlp_w1:H=0.8977,top10E=0.15,eRank=394.4,q75/q25=4.60 mlp_w2:H=0.9676,top10E=0.05,eRank=619.6,q75/q25=3.00 vo_prod:H=0.6770,top10E=0.11,eRank=201.3,q75/q25=inf train_time:243121ms step_avg:86.83ms +[2025-08-22 17:47:09] [Rank 0] step:2801/10000 train_time:243140ms step_avg:86.80ms +[2025-08-22 17:47:09] [Rank 0] step:2801/10000 train_time:243140ms step_avg:86.80ms +[2025-08-22 17:47:11] [Rank 0] step:2821/10000 train_time:244904ms step_avg:86.81ms +[2025-08-22 17:47:11] [Rank 0] step:2821/10000 train_time:244904ms step_avg:86.81ms +[2025-08-22 17:47:13] [Rank 0] step:2841/10000 train_time:246677ms step_avg:86.83ms +[2025-08-22 17:47:13] [Rank 0] step:2841/10000 train_time:246677ms step_avg:86.83ms +[2025-08-22 17:47:14] [Rank 0] step:2861/10000 train_time:248453ms step_avg:86.84ms +[2025-08-22 17:47:14] [Rank 0] step:2861/10000 train_time:248453ms step_avg:86.84ms +[2025-08-22 17:47:16] [Rank 0] step:2881/10000 train_time:250229ms step_avg:86.85ms +[2025-08-22 17:47:16] [Rank 0] step:2881/10000 train_time:250229ms step_avg:86.85ms +[2025-08-22 17:47:18] [Rank 0] step:2901/10000 train_time:252006ms step_avg:86.87ms +[2025-08-22 17:47:18] [Rank 0] step:2901/10000 train_time:252006ms step_avg:86.87ms +[2025-08-22 17:47:20] [Rank 0] step:2921/10000 train_time:253784ms step_avg:86.88ms +[2025-08-22 17:47:20] [Rank 0] step:2921/10000 train_time:253784ms step_avg:86.88ms +[2025-08-22 17:47:22] [Rank 0] step:2941/10000 train_time:255563ms step_avg:86.90ms +[2025-08-22 17:47:22] [Rank 0] step:2941/10000 train_time:255563ms step_avg:86.90ms +[2025-08-22 17:47:23] [Rank 0] step:2961/10000 train_time:257342ms step_avg:86.91ms +[2025-08-22 17:47:23] [Rank 0] step:2961/10000 train_time:257342ms step_avg:86.91ms +[2025-08-22 17:47:25] [Rank 0] step:2981/10000 train_time:259128ms step_avg:86.93ms +[2025-08-22 17:47:25] [Rank 0] step:2981/10000 train_time:259128ms step_avg:86.93ms +[2025-08-22 17:47:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:47:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:47:40] [Rank 0] PRINT: step:3000/10000 val_loss:4.0071 svd_entropy: attn_qk:H=0.7817,top10E=0.24,eRank=204.9,q75/q25=47.09 attn_vo:H=0.8273,top10E=0.06,eRank=385.0,q75/q25=inf mlp_w1:H=0.8992,top10E=0.15,eRank=398.7,q75/q25=4.57 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=3.00 vo_prod:H=0.6781,top10E=0.11,eRank=203.1,q75/q25=inf train_time:260927ms step_avg:86.98ms +[2025-08-22 17:47:40] [Rank 0] PRINT: step:3000/10000 val_loss:4.0071 svd_entropy: attn_qk:H=0.7817,top10E=0.24,eRank=204.9,q75/q25=47.09 attn_vo:H=0.8273,top10E=0.06,eRank=385.0,q75/q25=inf mlp_w1:H=0.8992,top10E=0.15,eRank=398.7,q75/q25=4.57 mlp_w2:H=0.9675,top10E=0.05,eRank=618.9,q75/q25=3.00 vo_prod:H=0.6781,top10E=0.11,eRank=203.1,q75/q25=inf train_time:260927ms step_avg:86.98ms +[2025-08-22 17:47:41] [Rank 0] step:3001/10000 train_time:260945ms step_avg:86.95ms +[2025-08-22 17:47:41] [Rank 0] step:3001/10000 train_time:260945ms step_avg:86.95ms +[2025-08-22 17:47:42] [Rank 0] step:3021/10000 train_time:262723ms step_avg:86.97ms +[2025-08-22 17:47:42] [Rank 0] step:3021/10000 train_time:262723ms step_avg:86.97ms +[2025-08-22 17:47:44] [Rank 0] step:3041/10000 train_time:264508ms step_avg:86.98ms +[2025-08-22 17:47:44] [Rank 0] step:3041/10000 train_time:264508ms step_avg:86.98ms +[2025-08-22 17:47:46] [Rank 0] step:3061/10000 train_time:266295ms step_avg:87.00ms +[2025-08-22 17:47:46] [Rank 0] step:3061/10000 train_time:266295ms step_avg:87.00ms +[2025-08-22 17:47:48] [Rank 0] step:3081/10000 train_time:268083ms step_avg:87.01ms +[2025-08-22 17:47:48] [Rank 0] step:3081/10000 train_time:268083ms step_avg:87.01ms +[2025-08-22 17:47:49] [Rank 0] step:3101/10000 train_time:269873ms step_avg:87.03ms +[2025-08-22 17:47:49] [Rank 0] step:3101/10000 train_time:269873ms step_avg:87.03ms +[2025-08-22 17:47:51] [Rank 0] step:3121/10000 train_time:271662ms step_avg:87.04ms +[2025-08-22 17:47:51] [Rank 0] step:3121/10000 train_time:271662ms step_avg:87.04ms +[2025-08-22 17:47:53] [Rank 0] step:3141/10000 train_time:273454ms step_avg:87.06ms +[2025-08-22 17:47:53] [Rank 0] step:3141/10000 train_time:273454ms step_avg:87.06ms +[2025-08-22 17:47:55] [Rank 0] step:3161/10000 train_time:275245ms step_avg:87.08ms +[2025-08-22 17:47:55] [Rank 0] step:3161/10000 train_time:275245ms step_avg:87.08ms +[2025-08-22 17:47:57] [Rank 0] step:3181/10000 train_time:277036ms step_avg:87.09ms +[2025-08-22 17:47:57] [Rank 0] step:3181/10000 train_time:277036ms step_avg:87.09ms +[2025-08-22 17:47:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:47:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:48:12] [Rank 0] PRINT: step:3200/10000 val_loss:3.9779 svd_entropy: attn_qk:H=0.7830,top10E=0.24,eRank=206.1,q75/q25=47.13 attn_vo:H=0.8277,top10E=0.06,eRank=386.0,q75/q25=inf mlp_w1:H=0.9007,top10E=0.14,eRank=402.5,q75/q25=4.54 mlp_w2:H=0.9673,top10E=0.05,eRank=618.2,q75/q25=3.01 vo_prod:H=0.6789,top10E=0.11,eRank=204.4,q75/q25=inf train_time:278841ms step_avg:87.14ms +[2025-08-22 17:48:12] [Rank 0] PRINT: step:3200/10000 val_loss:3.9779 svd_entropy: attn_qk:H=0.7830,top10E=0.24,eRank=206.1,q75/q25=47.13 attn_vo:H=0.8277,top10E=0.06,eRank=386.0,q75/q25=inf mlp_w1:H=0.9007,top10E=0.14,eRank=402.5,q75/q25=4.54 mlp_w2:H=0.9673,top10E=0.05,eRank=618.2,q75/q25=3.01 vo_prod:H=0.6789,top10E=0.11,eRank=204.4,q75/q25=inf train_time:278841ms step_avg:87.14ms +[2025-08-22 17:48:12] [Rank 0] step:3201/10000 train_time:278859ms step_avg:87.12ms +[2025-08-22 17:48:12] [Rank 0] step:3201/10000 train_time:278859ms step_avg:87.12ms +[2025-08-22 17:48:14] [Rank 0] step:3221/10000 train_time:280650ms step_avg:87.13ms +[2025-08-22 17:48:14] [Rank 0] step:3221/10000 train_time:280650ms step_avg:87.13ms +[2025-08-22 17:48:16] [Rank 0] step:3241/10000 train_time:282435ms step_avg:87.14ms +[2025-08-22 17:48:16] [Rank 0] step:3241/10000 train_time:282435ms step_avg:87.14ms +[2025-08-22 17:48:18] [Rank 0] step:3261/10000 train_time:284221ms step_avg:87.16ms +[2025-08-22 17:48:18] [Rank 0] step:3261/10000 train_time:284221ms step_avg:87.16ms +[2025-08-22 17:48:19] [Rank 0] step:3281/10000 train_time:286006ms step_avg:87.17ms +[2025-08-22 17:48:19] [Rank 0] step:3281/10000 train_time:286006ms step_avg:87.17ms +[2025-08-22 17:48:21] [Rank 0] step:3301/10000 train_time:287792ms step_avg:87.18ms +[2025-08-22 17:48:21] [Rank 0] step:3301/10000 train_time:287792ms step_avg:87.18ms +[2025-08-22 17:48:23] [Rank 0] step:3321/10000 train_time:289579ms step_avg:87.20ms +[2025-08-22 17:48:23] [Rank 0] step:3321/10000 train_time:289579ms step_avg:87.20ms +[2025-08-22 17:48:25] [Rank 0] step:3341/10000 train_time:291365ms step_avg:87.21ms +[2025-08-22 17:48:25] [Rank 0] step:3341/10000 train_time:291365ms step_avg:87.21ms +[2025-08-22 17:48:27] [Rank 0] step:3361/10000 train_time:293152ms step_avg:87.22ms +[2025-08-22 17:48:27] [Rank 0] step:3361/10000 train_time:293152ms step_avg:87.22ms +[2025-08-22 17:48:28] [Rank 0] step:3381/10000 train_time:294939ms step_avg:87.23ms +[2025-08-22 17:48:28] [Rank 0] step:3381/10000 train_time:294939ms step_avg:87.23ms +[2025-08-22 17:48:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:48:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:48:44] [Rank 0] PRINT: step:3400/10000 val_loss:3.9569 svd_entropy: attn_qk:H=0.7842,top10E=0.24,eRank=207.4,q75/q25=47.07 attn_vo:H=0.8281,top10E=0.06,eRank=387.0,q75/q25=inf mlp_w1:H=0.9021,top10E=0.14,eRank=406.3,q75/q25=4.52 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=3.02 vo_prod:H=0.6798,top10E=0.11,eRank=205.9,q75/q25=inf train_time:296739ms step_avg:87.28ms +[2025-08-22 17:48:44] [Rank 0] PRINT: step:3400/10000 val_loss:3.9569 svd_entropy: attn_qk:H=0.7842,top10E=0.24,eRank=207.4,q75/q25=47.07 attn_vo:H=0.8281,top10E=0.06,eRank=387.0,q75/q25=inf mlp_w1:H=0.9021,top10E=0.14,eRank=406.3,q75/q25=4.52 mlp_w2:H=0.9671,top10E=0.05,eRank=617.6,q75/q25=3.02 vo_prod:H=0.6798,top10E=0.11,eRank=205.9,q75/q25=inf train_time:296739ms step_avg:87.28ms +[2025-08-22 17:48:44] [Rank 0] step:3401/10000 train_time:296757ms step_avg:87.26ms +[2025-08-22 17:48:44] [Rank 0] step:3401/10000 train_time:296757ms step_avg:87.26ms +[2025-08-22 17:48:46] [Rank 0] step:3421/10000 train_time:298539ms step_avg:87.27ms +[2025-08-22 17:48:46] [Rank 0] step:3421/10000 train_time:298539ms step_avg:87.27ms +[2025-08-22 17:48:48] [Rank 0] step:3441/10000 train_time:300321ms step_avg:87.28ms +[2025-08-22 17:48:48] [Rank 0] step:3441/10000 train_time:300321ms step_avg:87.28ms +[2025-08-22 17:48:49] [Rank 0] step:3461/10000 train_time:302108ms step_avg:87.29ms +[2025-08-22 17:48:49] [Rank 0] step:3461/10000 train_time:302108ms step_avg:87.29ms +[2025-08-22 17:48:51] [Rank 0] step:3481/10000 train_time:303892ms step_avg:87.30ms +[2025-08-22 17:48:51] [Rank 0] step:3481/10000 train_time:303892ms step_avg:87.30ms +[2025-08-22 17:48:53] [Rank 0] step:3501/10000 train_time:305678ms step_avg:87.31ms +[2025-08-22 17:48:53] [Rank 0] step:3501/10000 train_time:305678ms step_avg:87.31ms +[2025-08-22 17:48:55] [Rank 0] step:3521/10000 train_time:307465ms step_avg:87.32ms +[2025-08-22 17:48:55] [Rank 0] step:3521/10000 train_time:307465ms step_avg:87.32ms +[2025-08-22 17:48:57] [Rank 0] step:3541/10000 train_time:309251ms step_avg:87.33ms +[2025-08-22 17:48:57] [Rank 0] step:3541/10000 train_time:309251ms step_avg:87.33ms +[2025-08-22 17:48:58] [Rank 0] step:3561/10000 train_time:311038ms step_avg:87.35ms +[2025-08-22 17:48:58] [Rank 0] step:3561/10000 train_time:311038ms step_avg:87.35ms +[2025-08-22 17:49:00] [Rank 0] step:3581/10000 train_time:312826ms step_avg:87.36ms +[2025-08-22 17:49:00] [Rank 0] step:3581/10000 train_time:312826ms step_avg:87.36ms +[2025-08-22 17:49:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:49:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:49:16] [Rank 0] PRINT: step:3600/10000 val_loss:3.9524 svd_entropy: attn_qk:H=0.7856,top10E=0.24,eRank=208.8,q75/q25=46.73 attn_vo:H=0.8284,top10E=0.06,eRank=387.9,q75/q25=inf mlp_w1:H=0.9033,top10E=0.14,eRank=409.6,q75/q25=4.49 mlp_w2:H=0.9670,top10E=0.05,eRank=616.9,q75/q25=3.03 vo_prod:H=0.6807,top10E=0.11,eRank=207.4,q75/q25=inf train_time:314628ms step_avg:87.40ms +[2025-08-22 17:49:16] [Rank 0] PRINT: step:3600/10000 val_loss:3.9524 svd_entropy: attn_qk:H=0.7856,top10E=0.24,eRank=208.8,q75/q25=46.73 attn_vo:H=0.8284,top10E=0.06,eRank=387.9,q75/q25=inf mlp_w1:H=0.9033,top10E=0.14,eRank=409.6,q75/q25=4.49 mlp_w2:H=0.9670,top10E=0.05,eRank=616.9,q75/q25=3.03 vo_prod:H=0.6807,top10E=0.11,eRank=207.4,q75/q25=inf train_time:314628ms step_avg:87.40ms +[2025-08-22 17:49:16] [Rank 0] step:3601/10000 train_time:314646ms step_avg:87.38ms +[2025-08-22 17:49:16] [Rank 0] step:3601/10000 train_time:314646ms step_avg:87.38ms +[2025-08-22 17:49:18] [Rank 0] step:3621/10000 train_time:316422ms step_avg:87.39ms +[2025-08-22 17:49:18] [Rank 0] step:3621/10000 train_time:316422ms step_avg:87.39ms +[2025-08-22 17:49:19] [Rank 0] step:3641/10000 train_time:318208ms step_avg:87.40ms +[2025-08-22 17:49:19] [Rank 0] step:3641/10000 train_time:318208ms step_avg:87.40ms +[2025-08-22 17:49:21] [Rank 0] step:3661/10000 train_time:319996ms step_avg:87.41ms +[2025-08-22 17:49:21] [Rank 0] step:3661/10000 train_time:319996ms step_avg:87.41ms +[2025-08-22 17:49:23] [Rank 0] step:3681/10000 train_time:321783ms step_avg:87.42ms +[2025-08-22 17:49:23] [Rank 0] step:3681/10000 train_time:321783ms step_avg:87.42ms +[2025-08-22 17:49:25] [Rank 0] step:3701/10000 train_time:323574ms step_avg:87.43ms +[2025-08-22 17:49:25] [Rank 0] step:3701/10000 train_time:323574ms step_avg:87.43ms +[2025-08-22 17:49:27] [Rank 0] step:3721/10000 train_time:325390ms step_avg:87.45ms +[2025-08-22 17:49:27] [Rank 0] step:3721/10000 train_time:325390ms step_avg:87.45ms +[2025-08-22 17:49:28] [Rank 0] step:3741/10000 train_time:327215ms step_avg:87.47ms +[2025-08-22 17:49:28] [Rank 0] step:3741/10000 train_time:327215ms step_avg:87.47ms +[2025-08-22 17:49:30] [Rank 0] step:3761/10000 train_time:329043ms step_avg:87.49ms +[2025-08-22 17:49:30] [Rank 0] step:3761/10000 train_time:329043ms step_avg:87.49ms +[2025-08-22 17:49:32] [Rank 0] step:3781/10000 train_time:330869ms step_avg:87.51ms +[2025-08-22 17:49:32] [Rank 0] step:3781/10000 train_time:330869ms step_avg:87.51ms +[2025-08-22 17:49:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:49:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:49:48] [Rank 0] PRINT: step:3800/10000 val_loss:3.9136 svd_entropy: attn_qk:H=0.7866,top10E=0.24,eRank=209.8,q75/q25=46.72 attn_vo:H=0.8286,top10E=0.06,eRank=388.7,q75/q25=inf mlp_w1:H=0.9044,top10E=0.14,eRank=412.8,q75/q25=4.46 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=3.03 vo_prod:H=0.6814,top10E=0.10,eRank=208.7,q75/q25=inf train_time:332709ms step_avg:87.56ms +[2025-08-22 17:49:48] [Rank 0] PRINT: step:3800/10000 val_loss:3.9136 svd_entropy: attn_qk:H=0.7866,top10E=0.24,eRank=209.8,q75/q25=46.72 attn_vo:H=0.8286,top10E=0.06,eRank=388.7,q75/q25=inf mlp_w1:H=0.9044,top10E=0.14,eRank=412.8,q75/q25=4.46 mlp_w2:H=0.9668,top10E=0.05,eRank=616.3,q75/q25=3.03 vo_prod:H=0.6814,top10E=0.10,eRank=208.7,q75/q25=inf train_time:332709ms step_avg:87.56ms +[2025-08-22 17:49:48] [Rank 0] step:3801/10000 train_time:332727ms step_avg:87.54ms +[2025-08-22 17:49:48] [Rank 0] step:3801/10000 train_time:332727ms step_avg:87.54ms +[2025-08-22 17:49:50] [Rank 0] step:3821/10000 train_time:334532ms step_avg:87.55ms +[2025-08-22 17:49:50] [Rank 0] step:3821/10000 train_time:334532ms step_avg:87.55ms +[2025-08-22 17:49:51] [Rank 0] step:3841/10000 train_time:336357ms step_avg:87.57ms +[2025-08-22 17:49:51] [Rank 0] step:3841/10000 train_time:336357ms step_avg:87.57ms +[2025-08-22 17:49:53] [Rank 0] step:3861/10000 train_time:338179ms step_avg:87.59ms +[2025-08-22 17:49:53] [Rank 0] step:3861/10000 train_time:338179ms step_avg:87.59ms +[2025-08-22 17:49:55] [Rank 0] step:3881/10000 train_time:339999ms step_avg:87.61ms +[2025-08-22 17:49:55] [Rank 0] step:3881/10000 train_time:339999ms step_avg:87.61ms +[2025-08-22 17:49:57] [Rank 0] step:3901/10000 train_time:341820ms step_avg:87.62ms +[2025-08-22 17:49:57] [Rank 0] step:3901/10000 train_time:341820ms step_avg:87.62ms +[2025-08-22 17:49:59] [Rank 0] step:3921/10000 train_time:343641ms step_avg:87.64ms +[2025-08-22 17:49:59] [Rank 0] step:3921/10000 train_time:343641ms step_avg:87.64ms +[2025-08-22 17:50:01] [Rank 0] step:3941/10000 train_time:345462ms step_avg:87.66ms +[2025-08-22 17:50:01] [Rank 0] step:3941/10000 train_time:345462ms step_avg:87.66ms +[2025-08-22 17:50:02] [Rank 0] step:3961/10000 train_time:347283ms step_avg:87.68ms +[2025-08-22 17:50:02] [Rank 0] step:3961/10000 train_time:347283ms step_avg:87.68ms +[2025-08-22 17:50:04] [Rank 0] step:3981/10000 train_time:349107ms step_avg:87.69ms +[2025-08-22 17:50:04] [Rank 0] step:3981/10000 train_time:349107ms step_avg:87.69ms +[2025-08-22 17:50:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:50:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:50:20] [Rank 0] PRINT: step:4000/10000 val_loss:3.8926 svd_entropy: attn_qk:H=0.7876,top10E=0.24,eRank=210.9,q75/q25=46.40 attn_vo:H=0.8289,top10E=0.06,eRank=389.2,q75/q25=inf mlp_w1:H=0.9056,top10E=0.14,eRank=415.9,q75/q25=4.45 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=3.04 vo_prod:H=0.6821,top10E=0.10,eRank=209.8,q75/q25=inf train_time:350942ms step_avg:87.74ms +[2025-08-22 17:50:20] [Rank 0] PRINT: step:4000/10000 val_loss:3.8926 svd_entropy: attn_qk:H=0.7876,top10E=0.24,eRank=210.9,q75/q25=46.40 attn_vo:H=0.8289,top10E=0.06,eRank=389.2,q75/q25=inf mlp_w1:H=0.9056,top10E=0.14,eRank=415.9,q75/q25=4.45 mlp_w2:H=0.9667,top10E=0.05,eRank=615.7,q75/q25=3.04 vo_prod:H=0.6821,top10E=0.10,eRank=209.8,q75/q25=inf train_time:350942ms step_avg:87.74ms +[2025-08-22 17:50:20] [Rank 0] step:4001/10000 train_time:350961ms step_avg:87.72ms +[2025-08-22 17:50:20] [Rank 0] step:4001/10000 train_time:350961ms step_avg:87.72ms +[2025-08-22 17:50:22] [Rank 0] step:4021/10000 train_time:352764ms step_avg:87.73ms +[2025-08-22 17:50:22] [Rank 0] step:4021/10000 train_time:352764ms step_avg:87.73ms +[2025-08-22 17:50:23] [Rank 0] step:4041/10000 train_time:354586ms step_avg:87.75ms +[2025-08-22 17:50:23] [Rank 0] step:4041/10000 train_time:354586ms step_avg:87.75ms +[2025-08-22 17:50:25] [Rank 0] step:4061/10000 train_time:356404ms step_avg:87.76ms +[2025-08-22 17:50:25] [Rank 0] step:4061/10000 train_time:356404ms step_avg:87.76ms +[2025-08-22 17:50:27] [Rank 0] step:4081/10000 train_time:358266ms step_avg:87.79ms +[2025-08-22 17:50:27] [Rank 0] step:4081/10000 train_time:358266ms step_avg:87.79ms +[2025-08-22 17:50:29] [Rank 0] step:4101/10000 train_time:360085ms step_avg:87.80ms +[2025-08-22 17:50:29] [Rank 0] step:4101/10000 train_time:360085ms step_avg:87.80ms +[2025-08-22 17:50:31] [Rank 0] step:4121/10000 train_time:361906ms step_avg:87.82ms +[2025-08-22 17:50:31] [Rank 0] step:4121/10000 train_time:361906ms step_avg:87.82ms +[2025-08-22 17:50:33] [Rank 0] step:4141/10000 train_time:363728ms step_avg:87.84ms +[2025-08-22 17:50:33] [Rank 0] step:4141/10000 train_time:363728ms step_avg:87.84ms +[2025-08-22 17:50:34] [Rank 0] step:4161/10000 train_time:365550ms step_avg:87.85ms +[2025-08-22 17:50:34] [Rank 0] step:4161/10000 train_time:365550ms step_avg:87.85ms +[2025-08-22 17:50:36] [Rank 0] step:4181/10000 train_time:367372ms step_avg:87.87ms +[2025-08-22 17:50:36] [Rank 0] step:4181/10000 train_time:367372ms step_avg:87.87ms +[2025-08-22 17:50:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:50:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:50:52] [Rank 0] PRINT: step:4200/10000 val_loss:3.8847 svd_entropy: attn_qk:H=0.7886,top10E=0.24,eRank=211.9,q75/q25=46.09 attn_vo:H=0.8291,top10E=0.06,eRank=389.9,q75/q25=inf mlp_w1:H=0.9065,top10E=0.14,eRank=418.5,q75/q25=4.43 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=3.04 vo_prod:H=0.6827,top10E=0.10,eRank=210.8,q75/q25=inf train_time:369206ms step_avg:87.91ms +[2025-08-22 17:50:52] [Rank 0] PRINT: step:4200/10000 val_loss:3.8847 svd_entropy: attn_qk:H=0.7886,top10E=0.24,eRank=211.9,q75/q25=46.09 attn_vo:H=0.8291,top10E=0.06,eRank=389.9,q75/q25=inf mlp_w1:H=0.9065,top10E=0.14,eRank=418.5,q75/q25=4.43 mlp_w2:H=0.9665,top10E=0.05,eRank=615.1,q75/q25=3.04 vo_prod:H=0.6827,top10E=0.10,eRank=210.8,q75/q25=inf train_time:369206ms step_avg:87.91ms +[2025-08-22 17:50:52] [Rank 0] step:4201/10000 train_time:369226ms step_avg:87.89ms +[2025-08-22 17:50:52] [Rank 0] step:4201/10000 train_time:369226ms step_avg:87.89ms +[2025-08-22 17:50:54] [Rank 0] step:4221/10000 train_time:371027ms step_avg:87.90ms +[2025-08-22 17:50:54] [Rank 0] step:4221/10000 train_time:371027ms step_avg:87.90ms +[2025-08-22 17:50:56] [Rank 0] step:4241/10000 train_time:372847ms step_avg:87.91ms +[2025-08-22 17:50:56] [Rank 0] step:4241/10000 train_time:372847ms step_avg:87.91ms +[2025-08-22 17:50:57] [Rank 0] step:4261/10000 train_time:374669ms step_avg:87.93ms +[2025-08-22 17:50:57] [Rank 0] step:4261/10000 train_time:374669ms step_avg:87.93ms +[2025-08-22 17:50:59] [Rank 0] step:4281/10000 train_time:376490ms step_avg:87.94ms +[2025-08-22 17:50:59] [Rank 0] step:4281/10000 train_time:376490ms step_avg:87.94ms +[2025-08-22 17:51:01] [Rank 0] step:4301/10000 train_time:378310ms step_avg:87.96ms +[2025-08-22 17:51:01] [Rank 0] step:4301/10000 train_time:378310ms step_avg:87.96ms +[2025-08-22 17:51:03] [Rank 0] step:4321/10000 train_time:380132ms step_avg:87.97ms +[2025-08-22 17:51:03] [Rank 0] step:4321/10000 train_time:380132ms step_avg:87.97ms +[2025-08-22 17:51:05] [Rank 0] step:4341/10000 train_time:381952ms step_avg:87.99ms +[2025-08-22 17:51:05] [Rank 0] step:4341/10000 train_time:381952ms step_avg:87.99ms +[2025-08-22 17:51:06] [Rank 0] step:4361/10000 train_time:383774ms step_avg:88.00ms +[2025-08-22 17:51:06] [Rank 0] step:4361/10000 train_time:383774ms step_avg:88.00ms +[2025-08-22 17:51:08] [Rank 0] step:4381/10000 train_time:385595ms step_avg:88.02ms +[2025-08-22 17:51:08] [Rank 0] step:4381/10000 train_time:385595ms step_avg:88.02ms +[2025-08-22 17:51:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:51:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:51:24] [Rank 0] PRINT: step:4400/10000 val_loss:3.8622 svd_entropy: attn_qk:H=0.7896,top10E=0.23,eRank=213.0,q75/q25=45.91 attn_vo:H=0.8293,top10E=0.06,eRank=390.5,q75/q25=inf mlp_w1:H=0.9075,top10E=0.14,eRank=421.2,q75/q25=4.42 mlp_w2:H=0.9664,top10E=0.05,eRank=614.5,q75/q25=3.06 vo_prod:H=0.6833,top10E=0.10,eRank=211.9,q75/q25=inf train_time:387429ms step_avg:88.05ms +[2025-08-22 17:51:24] [Rank 0] PRINT: step:4400/10000 val_loss:3.8622 svd_entropy: attn_qk:H=0.7896,top10E=0.23,eRank=213.0,q75/q25=45.91 attn_vo:H=0.8293,top10E=0.06,eRank=390.5,q75/q25=inf mlp_w1:H=0.9075,top10E=0.14,eRank=421.2,q75/q25=4.42 mlp_w2:H=0.9664,top10E=0.05,eRank=614.5,q75/q25=3.06 vo_prod:H=0.6833,top10E=0.10,eRank=211.9,q75/q25=inf train_time:387429ms step_avg:88.05ms +[2025-08-22 17:51:24] [Rank 0] step:4401/10000 train_time:387448ms step_avg:88.04ms +[2025-08-22 17:51:24] [Rank 0] step:4401/10000 train_time:387448ms step_avg:88.04ms +[2025-08-22 17:51:26] [Rank 0] step:4421/10000 train_time:389249ms step_avg:88.05ms +[2025-08-22 17:51:26] [Rank 0] step:4421/10000 train_time:389249ms step_avg:88.05ms +[2025-08-22 17:51:28] [Rank 0] step:4441/10000 train_time:391069ms step_avg:88.06ms +[2025-08-22 17:51:28] [Rank 0] step:4441/10000 train_time:391069ms step_avg:88.06ms +[2025-08-22 17:51:29] [Rank 0] step:4461/10000 train_time:392897ms step_avg:88.07ms +[2025-08-22 17:51:29] [Rank 0] step:4461/10000 train_time:392897ms step_avg:88.07ms +[2025-08-22 17:51:31] [Rank 0] step:4481/10000 train_time:394727ms step_avg:88.09ms +[2025-08-22 17:51:31] [Rank 0] step:4481/10000 train_time:394727ms step_avg:88.09ms +[2025-08-22 17:51:33] [Rank 0] step:4501/10000 train_time:396554ms step_avg:88.10ms +[2025-08-22 17:51:33] [Rank 0] step:4501/10000 train_time:396554ms step_avg:88.10ms +[2025-08-22 17:51:35] [Rank 0] step:4521/10000 train_time:398384ms step_avg:88.12ms +[2025-08-22 17:51:35] [Rank 0] step:4521/10000 train_time:398384ms step_avg:88.12ms +[2025-08-22 17:51:37] [Rank 0] step:4541/10000 train_time:400217ms step_avg:88.13ms +[2025-08-22 17:51:37] [Rank 0] step:4541/10000 train_time:400217ms step_avg:88.13ms +[2025-08-22 17:51:39] [Rank 0] step:4561/10000 train_time:402048ms step_avg:88.15ms +[2025-08-22 17:51:39] [Rank 0] step:4561/10000 train_time:402048ms step_avg:88.15ms +[2025-08-22 17:51:40] [Rank 0] step:4581/10000 train_time:403881ms step_avg:88.16ms +[2025-08-22 17:51:40] [Rank 0] step:4581/10000 train_time:403881ms step_avg:88.16ms +[2025-08-22 17:51:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:51:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:51:56] [Rank 0] PRINT: step:4600/10000 val_loss:3.8452 svd_entropy: attn_qk:H=0.7904,top10E=0.23,eRank=213.9,q75/q25=45.64 attn_vo:H=0.8295,top10E=0.06,eRank=391.1,q75/q25=inf mlp_w1:H=0.9084,top10E=0.13,eRank=423.8,q75/q25=4.39 mlp_w2:H=0.9662,top10E=0.05,eRank=614.0,q75/q25=3.06 vo_prod:H=0.6840,top10E=0.10,eRank=213.0,q75/q25=inf train_time:405730ms step_avg:88.20ms +[2025-08-22 17:51:56] [Rank 0] PRINT: step:4600/10000 val_loss:3.8452 svd_entropy: attn_qk:H=0.7904,top10E=0.23,eRank=213.9,q75/q25=45.64 attn_vo:H=0.8295,top10E=0.06,eRank=391.1,q75/q25=inf mlp_w1:H=0.9084,top10E=0.13,eRank=423.8,q75/q25=4.39 mlp_w2:H=0.9662,top10E=0.05,eRank=614.0,q75/q25=3.06 vo_prod:H=0.6840,top10E=0.10,eRank=213.0,q75/q25=inf train_time:405730ms step_avg:88.20ms +[2025-08-22 17:51:56] [Rank 0] step:4601/10000 train_time:405748ms step_avg:88.19ms +[2025-08-22 17:51:56] [Rank 0] step:4601/10000 train_time:405748ms step_avg:88.19ms +[2025-08-22 17:51:58] [Rank 0] step:4621/10000 train_time:407573ms step_avg:88.20ms +[2025-08-22 17:51:58] [Rank 0] step:4621/10000 train_time:407573ms step_avg:88.20ms +[2025-08-22 17:52:00] [Rank 0] step:4641/10000 train_time:409404ms step_avg:88.21ms +[2025-08-22 17:52:00] [Rank 0] step:4641/10000 train_time:409404ms step_avg:88.21ms +[2025-08-22 17:52:01] [Rank 0] step:4661/10000 train_time:411232ms step_avg:88.23ms +[2025-08-22 17:52:01] [Rank 0] step:4661/10000 train_time:411232ms step_avg:88.23ms +[2025-08-22 17:52:03] [Rank 0] step:4681/10000 train_time:413059ms step_avg:88.24ms +[2025-08-22 17:52:03] [Rank 0] step:4681/10000 train_time:413059ms step_avg:88.24ms +[2025-08-22 17:52:05] [Rank 0] step:4701/10000 train_time:414886ms step_avg:88.25ms +[2025-08-22 17:52:05] [Rank 0] step:4701/10000 train_time:414886ms step_avg:88.25ms +[2025-08-22 17:52:07] [Rank 0] step:4721/10000 train_time:416714ms step_avg:88.27ms +[2025-08-22 17:52:07] [Rank 0] step:4721/10000 train_time:416714ms step_avg:88.27ms +[2025-08-22 17:52:09] [Rank 0] step:4741/10000 train_time:418542ms step_avg:88.28ms +[2025-08-22 17:52:09] [Rank 0] step:4741/10000 train_time:418542ms step_avg:88.28ms +[2025-08-22 17:52:11] [Rank 0] step:4761/10000 train_time:420373ms step_avg:88.30ms +[2025-08-22 17:52:11] [Rank 0] step:4761/10000 train_time:420373ms step_avg:88.30ms +[2025-08-22 17:52:12] [Rank 0] step:4781/10000 train_time:422201ms step_avg:88.31ms +[2025-08-22 17:52:12] [Rank 0] step:4781/10000 train_time:422201ms step_avg:88.31ms +[2025-08-22 17:52:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:52:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:52:28] [Rank 0] PRINT: step:4800/10000 val_loss:3.8357 svd_entropy: attn_qk:H=0.7913,top10E=0.23,eRank=214.8,q75/q25=45.62 attn_vo:H=0.8297,top10E=0.06,eRank=391.6,q75/q25=inf mlp_w1:H=0.9093,top10E=0.13,eRank=426.1,q75/q25=4.38 mlp_w2:H=0.9661,top10E=0.05,eRank=613.5,q75/q25=3.07 vo_prod:H=0.6845,top10E=0.10,eRank=213.9,q75/q25=inf train_time:424044ms step_avg:88.34ms +[2025-08-22 17:52:28] [Rank 0] PRINT: step:4800/10000 val_loss:3.8357 svd_entropy: attn_qk:H=0.7913,top10E=0.23,eRank=214.8,q75/q25=45.62 attn_vo:H=0.8297,top10E=0.06,eRank=391.6,q75/q25=inf mlp_w1:H=0.9093,top10E=0.13,eRank=426.1,q75/q25=4.38 mlp_w2:H=0.9661,top10E=0.05,eRank=613.5,q75/q25=3.07 vo_prod:H=0.6845,top10E=0.10,eRank=213.9,q75/q25=inf train_time:424044ms step_avg:88.34ms +[2025-08-22 17:52:28] [Rank 0] step:4801/10000 train_time:424062ms step_avg:88.33ms +[2025-08-22 17:52:28] [Rank 0] step:4801/10000 train_time:424062ms step_avg:88.33ms +[2025-08-22 17:52:30] [Rank 0] step:4821/10000 train_time:425897ms step_avg:88.34ms +[2025-08-22 17:52:30] [Rank 0] step:4821/10000 train_time:425897ms step_avg:88.34ms +[2025-08-22 17:52:32] [Rank 0] step:4841/10000 train_time:427720ms step_avg:88.35ms +[2025-08-22 17:52:32] [Rank 0] step:4841/10000 train_time:427720ms step_avg:88.35ms +[2025-08-22 17:52:34] [Rank 0] step:4861/10000 train_time:429548ms step_avg:88.37ms +[2025-08-22 17:52:34] [Rank 0] step:4861/10000 train_time:429548ms step_avg:88.37ms +[2025-08-22 17:52:36] [Rank 0] step:4881/10000 train_time:431372ms step_avg:88.38ms +[2025-08-22 17:52:36] [Rank 0] step:4881/10000 train_time:431372ms step_avg:88.38ms +[2025-08-22 17:52:37] [Rank 0] step:4901/10000 train_time:433197ms step_avg:88.39ms +[2025-08-22 17:52:37] [Rank 0] step:4901/10000 train_time:433197ms step_avg:88.39ms +[2025-08-22 17:52:39] [Rank 0] step:4921/10000 train_time:435025ms step_avg:88.40ms +[2025-08-22 17:52:39] [Rank 0] step:4921/10000 train_time:435025ms step_avg:88.40ms +[2025-08-22 17:52:41] [Rank 0] step:4941/10000 train_time:436853ms step_avg:88.41ms +[2025-08-22 17:52:41] [Rank 0] step:4941/10000 train_time:436853ms step_avg:88.41ms +[2025-08-22 17:52:43] [Rank 0] step:4961/10000 train_time:438679ms step_avg:88.43ms +[2025-08-22 17:52:43] [Rank 0] step:4961/10000 train_time:438679ms step_avg:88.43ms +[2025-08-22 17:52:45] [Rank 0] step:4981/10000 train_time:440507ms step_avg:88.44ms +[2025-08-22 17:52:45] [Rank 0] step:4981/10000 train_time:440507ms step_avg:88.44ms +[2025-08-22 17:52:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:52:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:53:00] [Rank 0] PRINT: step:5000/10000 val_loss:3.8225 svd_entropy: attn_qk:H=0.7921,top10E=0.23,eRank=215.7,q75/q25=45.36 attn_vo:H=0.8298,top10E=0.06,eRank=392.0,q75/q25=inf mlp_w1:H=0.9100,top10E=0.13,eRank=428.2,q75/q25=4.36 mlp_w2:H=0.9660,top10E=0.05,eRank=613.0,q75/q25=3.07 vo_prod:H=0.6850,top10E=0.10,eRank=214.8,q75/q25=inf train_time:442350ms step_avg:88.47ms +[2025-08-22 17:53:00] [Rank 0] PRINT: step:5000/10000 val_loss:3.8225 svd_entropy: attn_qk:H=0.7921,top10E=0.23,eRank=215.7,q75/q25=45.36 attn_vo:H=0.8298,top10E=0.06,eRank=392.0,q75/q25=inf mlp_w1:H=0.9100,top10E=0.13,eRank=428.2,q75/q25=4.36 mlp_w2:H=0.9660,top10E=0.05,eRank=613.0,q75/q25=3.07 vo_prod:H=0.6850,top10E=0.10,eRank=214.8,q75/q25=inf train_time:442350ms step_avg:88.47ms +[2025-08-22 17:53:00] [Rank 0] step:5001/10000 train_time:442369ms step_avg:88.46ms +[2025-08-22 17:53:00] [Rank 0] step:5001/10000 train_time:442369ms step_avg:88.46ms +[2025-08-22 17:53:02] [Rank 0] step:5021/10000 train_time:444197ms step_avg:88.47ms +[2025-08-22 17:53:02] [Rank 0] step:5021/10000 train_time:444197ms step_avg:88.47ms +[2025-08-22 17:53:04] [Rank 0] step:5041/10000 train_time:446023ms step_avg:88.48ms +[2025-08-22 17:53:04] [Rank 0] step:5041/10000 train_time:446023ms step_avg:88.48ms +[2025-08-22 17:53:06] [Rank 0] step:5061/10000 train_time:447847ms step_avg:88.49ms +[2025-08-22 17:53:06] [Rank 0] step:5061/10000 train_time:447847ms step_avg:88.49ms +[2025-08-22 17:53:08] [Rank 0] step:5081/10000 train_time:449674ms step_avg:88.50ms +[2025-08-22 17:53:08] [Rank 0] step:5081/10000 train_time:449674ms step_avg:88.50ms +[2025-08-22 17:53:09] [Rank 0] step:5101/10000 train_time:451501ms step_avg:88.51ms +[2025-08-22 17:53:09] [Rank 0] step:5101/10000 train_time:451501ms step_avg:88.51ms +[2025-08-22 17:53:11] [Rank 0] step:5121/10000 train_time:453330ms step_avg:88.52ms +[2025-08-22 17:53:11] [Rank 0] step:5121/10000 train_time:453330ms step_avg:88.52ms +[2025-08-22 17:53:13] [Rank 0] step:5141/10000 train_time:455162ms step_avg:88.54ms +[2025-08-22 17:53:13] [Rank 0] step:5141/10000 train_time:455162ms step_avg:88.54ms +[2025-08-22 17:53:15] [Rank 0] step:5161/10000 train_time:456991ms step_avg:88.55ms +[2025-08-22 17:53:15] [Rank 0] step:5161/10000 train_time:456991ms step_avg:88.55ms +[2025-08-22 17:53:17] [Rank 0] step:5181/10000 train_time:458822ms step_avg:88.56ms +[2025-08-22 17:53:17] [Rank 0] step:5181/10000 train_time:458822ms step_avg:88.56ms +[2025-08-22 17:53:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:53:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:53:32] [Rank 0] PRINT: step:5200/10000 val_loss:3.8113 svd_entropy: attn_qk:H=0.7928,top10E=0.23,eRank=216.5,q75/q25=45.15 attn_vo:H=0.8300,top10E=0.06,eRank=392.5,q75/q25=inf mlp_w1:H=0.9107,top10E=0.13,eRank=430.3,q75/q25=4.35 mlp_w2:H=0.9658,top10E=0.05,eRank=612.4,q75/q25=3.08 vo_prod:H=0.6855,top10E=0.10,eRank=215.7,q75/q25=inf train_time:460689ms step_avg:88.59ms +[2025-08-22 17:53:32] [Rank 0] PRINT: step:5200/10000 val_loss:3.8113 svd_entropy: attn_qk:H=0.7928,top10E=0.23,eRank=216.5,q75/q25=45.15 attn_vo:H=0.8300,top10E=0.06,eRank=392.5,q75/q25=inf mlp_w1:H=0.9107,top10E=0.13,eRank=430.3,q75/q25=4.35 mlp_w2:H=0.9658,top10E=0.05,eRank=612.4,q75/q25=3.08 vo_prod:H=0.6855,top10E=0.10,eRank=215.7,q75/q25=inf train_time:460689ms step_avg:88.59ms +[2025-08-22 17:53:32] [Rank 0] step:5201/10000 train_time:460708ms step_avg:88.58ms +[2025-08-22 17:53:32] [Rank 0] step:5201/10000 train_time:460708ms step_avg:88.58ms +[2025-08-22 17:53:34] [Rank 0] step:5221/10000 train_time:462549ms step_avg:88.59ms +[2025-08-22 17:53:34] [Rank 0] step:5221/10000 train_time:462549ms step_avg:88.59ms +[2025-08-22 17:53:36] [Rank 0] step:5241/10000 train_time:464408ms step_avg:88.61ms +[2025-08-22 17:53:36] [Rank 0] step:5241/10000 train_time:464408ms step_avg:88.61ms +[2025-08-22 17:53:38] [Rank 0] step:5261/10000 train_time:466266ms step_avg:88.63ms +[2025-08-22 17:53:38] [Rank 0] step:5261/10000 train_time:466266ms step_avg:88.63ms +[2025-08-22 17:53:40] [Rank 0] step:5281/10000 train_time:468126ms step_avg:88.64ms +[2025-08-22 17:53:40] [Rank 0] step:5281/10000 train_time:468126ms step_avg:88.64ms +[2025-08-22 17:53:42] [Rank 0] step:5301/10000 train_time:469998ms step_avg:88.66ms +[2025-08-22 17:53:42] [Rank 0] step:5301/10000 train_time:469998ms step_avg:88.66ms +[2025-08-22 17:53:44] [Rank 0] step:5321/10000 train_time:471861ms step_avg:88.68ms +[2025-08-22 17:53:44] [Rank 0] step:5321/10000 train_time:471861ms step_avg:88.68ms +[2025-08-22 17:53:46] [Rank 0] step:5341/10000 train_time:473723ms step_avg:88.70ms +[2025-08-22 17:53:46] [Rank 0] step:5341/10000 train_time:473723ms step_avg:88.70ms +[2025-08-22 17:53:47] [Rank 0] step:5361/10000 train_time:475588ms step_avg:88.71ms +[2025-08-22 17:53:47] [Rank 0] step:5361/10000 train_time:475588ms step_avg:88.71ms +[2025-08-22 17:53:49] [Rank 0] step:5381/10000 train_time:477455ms step_avg:88.73ms +[2025-08-22 17:53:49] [Rank 0] step:5381/10000 train_time:477455ms step_avg:88.73ms +[2025-08-22 17:53:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:53:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:54:05] [Rank 0] PRINT: step:5400/10000 val_loss:3.7966 svd_entropy: attn_qk:H=0.7935,top10E=0.23,eRank=217.3,q75/q25=45.09 attn_vo:H=0.8301,top10E=0.06,eRank=392.9,q75/q25=inf mlp_w1:H=0.9115,top10E=0.13,eRank=432.4,q75/q25=4.34 mlp_w2:H=0.9657,top10E=0.05,eRank=611.8,q75/q25=3.09 vo_prod:H=0.6860,top10E=0.10,eRank=216.5,q75/q25=inf train_time:479331ms step_avg:88.76ms +[2025-08-22 17:54:05] [Rank 0] PRINT: step:5400/10000 val_loss:3.7966 svd_entropy: attn_qk:H=0.7935,top10E=0.23,eRank=217.3,q75/q25=45.09 attn_vo:H=0.8301,top10E=0.06,eRank=392.9,q75/q25=inf mlp_w1:H=0.9115,top10E=0.13,eRank=432.4,q75/q25=4.34 mlp_w2:H=0.9657,top10E=0.05,eRank=611.8,q75/q25=3.09 vo_prod:H=0.6860,top10E=0.10,eRank=216.5,q75/q25=inf train_time:479331ms step_avg:88.76ms +[2025-08-22 17:54:05] [Rank 0] step:5401/10000 train_time:479349ms step_avg:88.75ms +[2025-08-22 17:54:05] [Rank 0] step:5401/10000 train_time:479349ms step_avg:88.75ms +[2025-08-22 17:54:07] [Rank 0] step:5421/10000 train_time:481197ms step_avg:88.77ms +[2025-08-22 17:54:07] [Rank 0] step:5421/10000 train_time:481197ms step_avg:88.77ms +[2025-08-22 17:54:09] [Rank 0] step:5441/10000 train_time:483050ms step_avg:88.78ms +[2025-08-22 17:54:09] [Rank 0] step:5441/10000 train_time:483050ms step_avg:88.78ms +[2025-08-22 17:54:11] [Rank 0] step:5461/10000 train_time:484911ms step_avg:88.80ms +[2025-08-22 17:54:11] [Rank 0] step:5461/10000 train_time:484911ms step_avg:88.80ms +[2025-08-22 17:54:12] [Rank 0] step:5481/10000 train_time:486770ms step_avg:88.81ms +[2025-08-22 17:54:12] [Rank 0] step:5481/10000 train_time:486770ms step_avg:88.81ms +[2025-08-22 17:54:14] [Rank 0] step:5501/10000 train_time:488635ms step_avg:88.83ms +[2025-08-22 17:54:14] [Rank 0] step:5501/10000 train_time:488635ms step_avg:88.83ms +[2025-08-22 17:54:16] [Rank 0] step:5521/10000 train_time:490500ms step_avg:88.84ms +[2025-08-22 17:54:16] [Rank 0] step:5521/10000 train_time:490500ms step_avg:88.84ms +[2025-08-22 17:54:18] [Rank 0] step:5541/10000 train_time:492361ms step_avg:88.86ms +[2025-08-22 17:54:18] [Rank 0] step:5541/10000 train_time:492361ms step_avg:88.86ms +[2025-08-22 17:54:20] [Rank 0] step:5561/10000 train_time:494223ms step_avg:88.87ms +[2025-08-22 17:54:20] [Rank 0] step:5561/10000 train_time:494223ms step_avg:88.87ms +[2025-08-22 17:54:22] [Rank 0] step:5581/10000 train_time:496085ms step_avg:88.89ms +[2025-08-22 17:54:22] [Rank 0] step:5581/10000 train_time:496085ms step_avg:88.89ms +[2025-08-22 17:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:54:37] [Rank 0] PRINT: step:5600/10000 val_loss:3.7887 svd_entropy: attn_qk:H=0.7943,top10E=0.23,eRank=218.1,q75/q25=44.94 attn_vo:H=0.8303,top10E=0.06,eRank=393.2,q75/q25=inf mlp_w1:H=0.9121,top10E=0.13,eRank=434.2,q75/q25=4.33 mlp_w2:H=0.9655,top10E=0.05,eRank=611.4,q75/q25=3.10 vo_prod:H=0.6865,top10E=0.10,eRank=217.3,q75/q25=inf train_time:497961ms step_avg:88.92ms +[2025-08-22 17:54:37] [Rank 0] PRINT: step:5600/10000 val_loss:3.7887 svd_entropy: attn_qk:H=0.7943,top10E=0.23,eRank=218.1,q75/q25=44.94 attn_vo:H=0.8303,top10E=0.06,eRank=393.2,q75/q25=inf mlp_w1:H=0.9121,top10E=0.13,eRank=434.2,q75/q25=4.33 mlp_w2:H=0.9655,top10E=0.05,eRank=611.4,q75/q25=3.10 vo_prod:H=0.6865,top10E=0.10,eRank=217.3,q75/q25=inf train_time:497961ms step_avg:88.92ms +[2025-08-22 17:54:37] [Rank 0] step:5601/10000 train_time:497980ms step_avg:88.91ms +[2025-08-22 17:54:37] [Rank 0] step:5601/10000 train_time:497980ms step_avg:88.91ms +[2025-08-22 17:54:39] [Rank 0] step:5621/10000 train_time:499831ms step_avg:88.92ms +[2025-08-22 17:54:39] [Rank 0] step:5621/10000 train_time:499831ms step_avg:88.92ms +[2025-08-22 17:54:41] [Rank 0] step:5641/10000 train_time:501688ms step_avg:88.94ms +[2025-08-22 17:54:41] [Rank 0] step:5641/10000 train_time:501688ms step_avg:88.94ms +[2025-08-22 17:54:43] [Rank 0] step:5661/10000 train_time:503543ms step_avg:88.95ms +[2025-08-22 17:54:43] [Rank 0] step:5661/10000 train_time:503543ms step_avg:88.95ms +[2025-08-22 17:54:45] [Rank 0] step:5681/10000 train_time:505404ms step_avg:88.96ms +[2025-08-22 17:54:45] [Rank 0] step:5681/10000 train_time:505404ms step_avg:88.96ms +[2025-08-22 17:54:47] [Rank 0] step:5701/10000 train_time:507263ms step_avg:88.98ms +[2025-08-22 17:54:47] [Rank 0] step:5701/10000 train_time:507263ms step_avg:88.98ms +[2025-08-22 17:54:49] [Rank 0] step:5721/10000 train_time:509127ms step_avg:88.99ms +[2025-08-22 17:54:49] [Rank 0] step:5721/10000 train_time:509127ms step_avg:88.99ms +[2025-08-22 17:54:50] [Rank 0] step:5741/10000 train_time:510985ms step_avg:89.01ms +[2025-08-22 17:54:50] [Rank 0] step:5741/10000 train_time:510985ms step_avg:89.01ms +[2025-08-22 17:54:52] [Rank 0] step:5761/10000 train_time:512846ms step_avg:89.02ms +[2025-08-22 17:54:52] [Rank 0] step:5761/10000 train_time:512846ms step_avg:89.02ms +[2025-08-22 17:54:54] [Rank 0] step:5781/10000 train_time:514709ms step_avg:89.03ms +[2025-08-22 17:54:54] [Rank 0] step:5781/10000 train_time:514709ms step_avg:89.03ms +[2025-08-22 17:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:54:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:55:10] [Rank 0] PRINT: step:5800/10000 val_loss:3.7825 svd_entropy: attn_qk:H=0.7951,top10E=0.23,eRank=219.0,q75/q25=44.77 attn_vo:H=0.8304,top10E=0.06,eRank=393.6,q75/q25=inf mlp_w1:H=0.9128,top10E=0.13,eRank=436.1,q75/q25=4.31 mlp_w2:H=0.9654,top10E=0.05,eRank=610.9,q75/q25=3.10 vo_prod:H=0.6869,top10E=0.10,eRank=218.1,q75/q25=inf train_time:516584ms step_avg:89.07ms +[2025-08-22 17:55:10] [Rank 0] PRINT: step:5800/10000 val_loss:3.7825 svd_entropy: attn_qk:H=0.7951,top10E=0.23,eRank=219.0,q75/q25=44.77 attn_vo:H=0.8304,top10E=0.06,eRank=393.6,q75/q25=inf mlp_w1:H=0.9128,top10E=0.13,eRank=436.1,q75/q25=4.31 mlp_w2:H=0.9654,top10E=0.05,eRank=610.9,q75/q25=3.10 vo_prod:H=0.6869,top10E=0.10,eRank=218.1,q75/q25=inf train_time:516584ms step_avg:89.07ms +[2025-08-22 17:55:10] [Rank 0] step:5801/10000 train_time:516602ms step_avg:89.05ms +[2025-08-22 17:55:10] [Rank 0] step:5801/10000 train_time:516602ms step_avg:89.05ms +[2025-08-22 17:55:12] [Rank 0] step:5821/10000 train_time:518452ms step_avg:89.07ms +[2025-08-22 17:55:12] [Rank 0] step:5821/10000 train_time:518452ms step_avg:89.07ms +[2025-08-22 17:55:14] [Rank 0] step:5841/10000 train_time:520307ms step_avg:89.08ms +[2025-08-22 17:55:14] [Rank 0] step:5841/10000 train_time:520307ms step_avg:89.08ms +[2025-08-22 17:55:16] [Rank 0] step:5861/10000 train_time:522173ms step_avg:89.09ms +[2025-08-22 17:55:16] [Rank 0] step:5861/10000 train_time:522173ms step_avg:89.09ms +[2025-08-22 17:55:17] [Rank 0] step:5881/10000 train_time:524032ms step_avg:89.11ms +[2025-08-22 17:55:17] [Rank 0] step:5881/10000 train_time:524032ms step_avg:89.11ms +[2025-08-22 17:55:19] [Rank 0] step:5901/10000 train_time:525891ms step_avg:89.12ms +[2025-08-22 17:55:19] [Rank 0] step:5901/10000 train_time:525891ms step_avg:89.12ms +[2025-08-22 17:55:21] [Rank 0] step:5921/10000 train_time:527753ms step_avg:89.13ms +[2025-08-22 17:55:21] [Rank 0] step:5921/10000 train_time:527753ms step_avg:89.13ms +[2025-08-22 17:55:23] [Rank 0] step:5941/10000 train_time:529619ms step_avg:89.15ms +[2025-08-22 17:55:23] [Rank 0] step:5941/10000 train_time:529619ms step_avg:89.15ms +[2025-08-22 17:55:25] [Rank 0] step:5961/10000 train_time:531483ms step_avg:89.16ms +[2025-08-22 17:55:25] [Rank 0] step:5961/10000 train_time:531483ms step_avg:89.16ms +[2025-08-22 17:55:27] [Rank 0] step:5981/10000 train_time:533347ms step_avg:89.17ms +[2025-08-22 17:55:27] [Rank 0] step:5981/10000 train_time:533347ms step_avg:89.17ms +[2025-08-22 17:55:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:55:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:55:42] [Rank 0] PRINT: step:6000/10000 val_loss:3.7609 svd_entropy: attn_qk:H=0.7957,top10E=0.23,eRank=219.6,q75/q25=44.61 attn_vo:H=0.8305,top10E=0.06,eRank=394.0,q75/q25=inf mlp_w1:H=0.9135,top10E=0.13,eRank=438.0,q75/q25=4.31 mlp_w2:H=0.9653,top10E=0.05,eRank=610.5,q75/q25=3.11 vo_prod:H=0.6873,top10E=0.10,eRank=218.8,q75/q25=inf train_time:535222ms step_avg:89.20ms +[2025-08-22 17:55:42] [Rank 0] PRINT: step:6000/10000 val_loss:3.7609 svd_entropy: attn_qk:H=0.7957,top10E=0.23,eRank=219.6,q75/q25=44.61 attn_vo:H=0.8305,top10E=0.06,eRank=394.0,q75/q25=inf mlp_w1:H=0.9135,top10E=0.13,eRank=438.0,q75/q25=4.31 mlp_w2:H=0.9653,top10E=0.05,eRank=610.5,q75/q25=3.11 vo_prod:H=0.6873,top10E=0.10,eRank=218.8,q75/q25=inf train_time:535222ms step_avg:89.20ms +[2025-08-22 17:55:42] [Rank 0] step:6001/10000 train_time:535240ms step_avg:89.19ms +[2025-08-22 17:55:42] [Rank 0] step:6001/10000 train_time:535240ms step_avg:89.19ms +[2025-08-22 17:55:44] [Rank 0] step:6021/10000 train_time:537090ms step_avg:89.20ms +[2025-08-22 17:55:44] [Rank 0] step:6021/10000 train_time:537090ms step_avg:89.20ms +[2025-08-22 17:55:46] [Rank 0] step:6041/10000 train_time:538956ms step_avg:89.22ms +[2025-08-22 17:55:46] [Rank 0] step:6041/10000 train_time:538956ms step_avg:89.22ms +[2025-08-22 17:55:48] [Rank 0] step:6061/10000 train_time:540824ms step_avg:89.23ms +[2025-08-22 17:55:48] [Rank 0] step:6061/10000 train_time:540824ms step_avg:89.23ms +[2025-08-22 17:55:50] [Rank 0] step:6081/10000 train_time:542689ms step_avg:89.24ms +[2025-08-22 17:55:50] [Rank 0] step:6081/10000 train_time:542689ms step_avg:89.24ms +[2025-08-22 17:55:52] [Rank 0] step:6101/10000 train_time:544561ms step_avg:89.26ms +[2025-08-22 17:55:52] [Rank 0] step:6101/10000 train_time:544561ms step_avg:89.26ms +[2025-08-22 17:55:54] [Rank 0] step:6121/10000 train_time:546696ms step_avg:89.31ms +[2025-08-22 17:55:54] [Rank 0] step:6121/10000 train_time:546696ms step_avg:89.31ms +[2025-08-22 17:55:56] [Rank 0] step:6141/10000 train_time:548574ms step_avg:89.33ms +[2025-08-22 17:55:56] [Rank 0] step:6141/10000 train_time:548574ms step_avg:89.33ms +[2025-08-22 17:55:58] [Rank 0] step:6161/10000 train_time:550443ms step_avg:89.34ms +[2025-08-22 17:55:58] [Rank 0] step:6161/10000 train_time:550443ms step_avg:89.34ms +[2025-08-22 17:56:00] [Rank 0] step:6181/10000 train_time:552312ms step_avg:89.36ms +[2025-08-22 17:56:00] [Rank 0] step:6181/10000 train_time:552312ms step_avg:89.36ms +[2025-08-22 17:56:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:56:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:56:15] [Rank 0] PRINT: step:6200/10000 val_loss:3.7479 svd_entropy: attn_qk:H=0.7963,top10E=0.23,eRank=220.4,q75/q25=44.57 attn_vo:H=0.8306,top10E=0.06,eRank=394.2,q75/q25=inf mlp_w1:H=0.9141,top10E=0.13,eRank=439.7,q75/q25=4.31 mlp_w2:H=0.9652,top10E=0.05,eRank=610.1,q75/q25=3.11 vo_prod:H=0.6877,top10E=0.10,eRank=219.5,q75/q25=inf train_time:554195ms step_avg:89.39ms +[2025-08-22 17:56:15] [Rank 0] PRINT: step:6200/10000 val_loss:3.7479 svd_entropy: attn_qk:H=0.7963,top10E=0.23,eRank=220.4,q75/q25=44.57 attn_vo:H=0.8306,top10E=0.06,eRank=394.2,q75/q25=inf mlp_w1:H=0.9141,top10E=0.13,eRank=439.7,q75/q25=4.31 mlp_w2:H=0.9652,top10E=0.05,eRank=610.1,q75/q25=3.11 vo_prod:H=0.6877,top10E=0.10,eRank=219.5,q75/q25=inf train_time:554195ms step_avg:89.39ms +[2025-08-22 17:56:15] [Rank 0] step:6201/10000 train_time:554213ms step_avg:89.37ms +[2025-08-22 17:56:15] [Rank 0] step:6201/10000 train_time:554213ms step_avg:89.37ms +[2025-08-22 17:56:17] [Rank 0] step:6221/10000 train_time:556070ms step_avg:89.39ms +[2025-08-22 17:56:17] [Rank 0] step:6221/10000 train_time:556070ms step_avg:89.39ms +[2025-08-22 17:56:19] [Rank 0] step:6241/10000 train_time:557930ms step_avg:89.40ms +[2025-08-22 17:56:19] [Rank 0] step:6241/10000 train_time:557930ms step_avg:89.40ms +[2025-08-22 17:56:21] [Rank 0] step:6261/10000 train_time:559795ms step_avg:89.41ms +[2025-08-22 17:56:21] [Rank 0] step:6261/10000 train_time:559795ms step_avg:89.41ms +[2025-08-22 17:56:23] [Rank 0] step:6281/10000 train_time:561665ms step_avg:89.42ms +[2025-08-22 17:56:23] [Rank 0] step:6281/10000 train_time:561665ms step_avg:89.42ms +[2025-08-22 17:56:25] [Rank 0] step:6301/10000 train_time:563529ms step_avg:89.43ms +[2025-08-22 17:56:25] [Rank 0] step:6301/10000 train_time:563529ms step_avg:89.43ms +[2025-08-22 17:56:27] [Rank 0] step:6321/10000 train_time:565395ms step_avg:89.45ms +[2025-08-22 17:56:27] [Rank 0] step:6321/10000 train_time:565395ms step_avg:89.45ms +[2025-08-22 17:56:28] [Rank 0] step:6341/10000 train_time:567263ms step_avg:89.46ms +[2025-08-22 17:56:28] [Rank 0] step:6341/10000 train_time:567263ms step_avg:89.46ms +[2025-08-22 17:56:30] [Rank 0] step:6361/10000 train_time:569136ms step_avg:89.47ms +[2025-08-22 17:56:30] [Rank 0] step:6361/10000 train_time:569136ms step_avg:89.47ms +[2025-08-22 17:56:32] [Rank 0] step:6381/10000 train_time:571005ms step_avg:89.49ms +[2025-08-22 17:56:32] [Rank 0] step:6381/10000 train_time:571005ms step_avg:89.49ms +[2025-08-22 17:56:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:56:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:56:48] [Rank 0] PRINT: step:6400/10000 val_loss:3.7340 svd_entropy: attn_qk:H=0.7969,top10E=0.22,eRank=221.0,q75/q25=44.42 attn_vo:H=0.8307,top10E=0.06,eRank=394.5,q75/q25=inf mlp_w1:H=0.9146,top10E=0.13,eRank=441.2,q75/q25=4.29 mlp_w2:H=0.9651,top10E=0.05,eRank=609.6,q75/q25=3.12 vo_prod:H=0.6881,top10E=0.10,eRank=220.1,q75/q25=inf train_time:572882ms step_avg:89.51ms +[2025-08-22 17:56:48] [Rank 0] PRINT: step:6400/10000 val_loss:3.7340 svd_entropy: attn_qk:H=0.7969,top10E=0.22,eRank=221.0,q75/q25=44.42 attn_vo:H=0.8307,top10E=0.06,eRank=394.5,q75/q25=inf mlp_w1:H=0.9146,top10E=0.13,eRank=441.2,q75/q25=4.29 mlp_w2:H=0.9651,top10E=0.05,eRank=609.6,q75/q25=3.12 vo_prod:H=0.6881,top10E=0.10,eRank=220.1,q75/q25=inf train_time:572882ms step_avg:89.51ms +[2025-08-22 17:56:48] [Rank 0] step:6401/10000 train_time:572900ms step_avg:89.50ms +[2025-08-22 17:56:48] [Rank 0] step:6401/10000 train_time:572900ms step_avg:89.50ms +[2025-08-22 17:56:50] [Rank 0] step:6421/10000 train_time:574771ms step_avg:89.51ms +[2025-08-22 17:56:50] [Rank 0] step:6421/10000 train_time:574771ms step_avg:89.51ms +[2025-08-22 17:56:52] [Rank 0] step:6441/10000 train_time:576634ms step_avg:89.53ms +[2025-08-22 17:56:52] [Rank 0] step:6441/10000 train_time:576634ms step_avg:89.53ms +[2025-08-22 17:56:53] [Rank 0] step:6461/10000 train_time:578501ms step_avg:89.54ms +[2025-08-22 17:56:53] [Rank 0] step:6461/10000 train_time:578501ms step_avg:89.54ms +[2025-08-22 17:56:55] [Rank 0] step:6481/10000 train_time:580371ms step_avg:89.55ms +[2025-08-22 17:56:55] [Rank 0] step:6481/10000 train_time:580371ms step_avg:89.55ms +[2025-08-22 17:56:57] [Rank 0] step:6501/10000 train_time:582231ms step_avg:89.56ms +[2025-08-22 17:56:57] [Rank 0] step:6501/10000 train_time:582231ms step_avg:89.56ms +[2025-08-22 17:56:59] [Rank 0] step:6521/10000 train_time:584093ms step_avg:89.57ms +[2025-08-22 17:56:59] [Rank 0] step:6521/10000 train_time:584093ms step_avg:89.57ms +[2025-08-22 17:57:01] [Rank 0] step:6541/10000 train_time:585962ms step_avg:89.58ms +[2025-08-22 17:57:01] [Rank 0] step:6541/10000 train_time:585962ms step_avg:89.58ms +[2025-08-22 17:57:03] [Rank 0] step:6561/10000 train_time:587831ms step_avg:89.59ms +[2025-08-22 17:57:03] [Rank 0] step:6561/10000 train_time:587831ms step_avg:89.59ms +[2025-08-22 17:57:05] [Rank 0] step:6581/10000 train_time:589699ms step_avg:89.61ms +[2025-08-22 17:57:05] [Rank 0] step:6581/10000 train_time:589699ms step_avg:89.61ms +[2025-08-22 17:57:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:57:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:57:20] [Rank 0] PRINT: step:6600/10000 val_loss:3.7199 svd_entropy: attn_qk:H=0.7974,top10E=0.22,eRank=221.7,q75/q25=44.21 attn_vo:H=0.8308,top10E=0.06,eRank=394.8,q75/q25=inf mlp_w1:H=0.9152,top10E=0.13,eRank=442.9,q75/q25=4.28 mlp_w2:H=0.9650,top10E=0.05,eRank=609.2,q75/q25=3.13 vo_prod:H=0.6884,top10E=0.10,eRank=220.8,q75/q25=inf train_time:591581ms step_avg:89.63ms +[2025-08-22 17:57:20] [Rank 0] PRINT: step:6600/10000 val_loss:3.7199 svd_entropy: attn_qk:H=0.7974,top10E=0.22,eRank=221.7,q75/q25=44.21 attn_vo:H=0.8308,top10E=0.06,eRank=394.8,q75/q25=inf mlp_w1:H=0.9152,top10E=0.13,eRank=442.9,q75/q25=4.28 mlp_w2:H=0.9650,top10E=0.05,eRank=609.2,q75/q25=3.13 vo_prod:H=0.6884,top10E=0.10,eRank=220.8,q75/q25=inf train_time:591581ms step_avg:89.63ms +[2025-08-22 17:57:20] [Rank 0] step:6601/10000 train_time:591602ms step_avg:89.62ms +[2025-08-22 17:57:20] [Rank 0] step:6601/10000 train_time:591602ms step_avg:89.62ms +[2025-08-22 17:57:22] [Rank 0] step:6621/10000 train_time:593450ms step_avg:89.63ms +[2025-08-22 17:57:22] [Rank 0] step:6621/10000 train_time:593450ms step_avg:89.63ms +[2025-08-22 17:57:24] [Rank 0] step:6641/10000 train_time:595326ms step_avg:89.64ms +[2025-08-22 17:57:24] [Rank 0] step:6641/10000 train_time:595326ms step_avg:89.64ms +[2025-08-22 17:57:26] [Rank 0] step:6661/10000 train_time:597195ms step_avg:89.66ms +[2025-08-22 17:57:26] [Rank 0] step:6661/10000 train_time:597195ms step_avg:89.66ms +[2025-08-22 17:57:28] [Rank 0] step:6681/10000 train_time:599081ms step_avg:89.67ms +[2025-08-22 17:57:28] [Rank 0] step:6681/10000 train_time:599081ms step_avg:89.67ms +[2025-08-22 17:57:30] [Rank 0] step:6701/10000 train_time:600988ms step_avg:89.69ms +[2025-08-22 17:57:30] [Rank 0] step:6701/10000 train_time:600988ms step_avg:89.69ms +[2025-08-22 17:57:32] [Rank 0] step:6721/10000 train_time:602887ms step_avg:89.70ms +[2025-08-22 17:57:32] [Rank 0] step:6721/10000 train_time:602887ms step_avg:89.70ms +[2025-08-22 17:57:34] [Rank 0] step:6741/10000 train_time:604785ms step_avg:89.72ms +[2025-08-22 17:57:34] [Rank 0] step:6741/10000 train_time:604785ms step_avg:89.72ms +[2025-08-22 17:57:35] [Rank 0] step:6761/10000 train_time:606680ms step_avg:89.73ms +[2025-08-22 17:57:35] [Rank 0] step:6761/10000 train_time:606680ms step_avg:89.73ms +[2025-08-22 17:57:37] [Rank 0] step:6781/10000 train_time:608580ms step_avg:89.75ms +[2025-08-22 17:57:37] [Rank 0] step:6781/10000 train_time:608580ms step_avg:89.75ms +[2025-08-22 17:57:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:57:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:57:53] [Rank 0] PRINT: step:6800/10000 val_loss:3.7060 svd_entropy: attn_qk:H=0.7978,top10E=0.22,eRank=222.1,q75/q25=44.10 attn_vo:H=0.8309,top10E=0.06,eRank=395.1,q75/q25=inf mlp_w1:H=0.9157,top10E=0.12,eRank=444.4,q75/q25=4.28 mlp_w2:H=0.9649,top10E=0.05,eRank=608.9,q75/q25=3.13 vo_prod:H=0.6888,top10E=0.09,eRank=221.4,q75/q25=inf train_time:610496ms step_avg:89.78ms +[2025-08-22 17:57:53] [Rank 0] PRINT: step:6800/10000 val_loss:3.7060 svd_entropy: attn_qk:H=0.7978,top10E=0.22,eRank=222.1,q75/q25=44.10 attn_vo:H=0.8309,top10E=0.06,eRank=395.1,q75/q25=inf mlp_w1:H=0.9157,top10E=0.12,eRank=444.4,q75/q25=4.28 mlp_w2:H=0.9649,top10E=0.05,eRank=608.9,q75/q25=3.13 vo_prod:H=0.6888,top10E=0.09,eRank=221.4,q75/q25=inf train_time:610496ms step_avg:89.78ms +[2025-08-22 17:57:53] [Rank 0] step:6801/10000 train_time:610515ms step_avg:89.77ms +[2025-08-22 17:57:53] [Rank 0] step:6801/10000 train_time:610515ms step_avg:89.77ms +[2025-08-22 17:57:55] [Rank 0] step:6821/10000 train_time:612389ms step_avg:89.78ms +[2025-08-22 17:57:55] [Rank 0] step:6821/10000 train_time:612389ms step_avg:89.78ms +[2025-08-22 17:57:57] [Rank 0] step:6841/10000 train_time:614282ms step_avg:89.79ms +[2025-08-22 17:57:57] [Rank 0] step:6841/10000 train_time:614282ms step_avg:89.79ms +[2025-08-22 17:57:59] [Rank 0] step:6861/10000 train_time:616171ms step_avg:89.81ms +[2025-08-22 17:57:59] [Rank 0] step:6861/10000 train_time:616171ms step_avg:89.81ms +[2025-08-22 17:58:01] [Rank 0] step:6881/10000 train_time:618065ms step_avg:89.82ms +[2025-08-22 17:58:01] [Rank 0] step:6881/10000 train_time:618065ms step_avg:89.82ms +[2025-08-22 17:58:02] [Rank 0] step:6901/10000 train_time:619958ms step_avg:89.84ms +[2025-08-22 17:58:02] [Rank 0] step:6901/10000 train_time:619958ms step_avg:89.84ms +[2025-08-22 17:58:04] [Rank 0] step:6921/10000 train_time:621855ms step_avg:89.85ms +[2025-08-22 17:58:04] [Rank 0] step:6921/10000 train_time:621855ms step_avg:89.85ms +[2025-08-22 17:58:06] [Rank 0] step:6941/10000 train_time:623754ms step_avg:89.87ms +[2025-08-22 17:58:06] [Rank 0] step:6941/10000 train_time:623754ms step_avg:89.87ms +[2025-08-22 17:58:08] [Rank 0] step:6961/10000 train_time:625660ms step_avg:89.88ms +[2025-08-22 17:58:08] [Rank 0] step:6961/10000 train_time:625660ms step_avg:89.88ms +[2025-08-22 17:58:10] [Rank 0] step:6981/10000 train_time:627557ms step_avg:89.90ms +[2025-08-22 17:58:10] [Rank 0] step:6981/10000 train_time:627557ms step_avg:89.90ms +[2025-08-22 17:58:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:58:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:58:26] [Rank 0] PRINT: step:7000/10000 val_loss:3.6888 svd_entropy: attn_qk:H=0.7983,top10E=0.22,eRank=222.6,q75/q25=43.95 attn_vo:H=0.8310,top10E=0.06,eRank=395.4,q75/q25=inf mlp_w1:H=0.9162,top10E=0.12,eRank=445.7,q75/q25=4.27 mlp_w2:H=0.9648,top10E=0.05,eRank=608.6,q75/q25=3.14 vo_prod:H=0.6891,top10E=0.09,eRank=222.0,q75/q25=inf train_time:629527ms step_avg:89.93ms +[2025-08-22 17:58:26] [Rank 0] PRINT: step:7000/10000 val_loss:3.6888 svd_entropy: attn_qk:H=0.7983,top10E=0.22,eRank=222.6,q75/q25=43.95 attn_vo:H=0.8310,top10E=0.06,eRank=395.4,q75/q25=inf mlp_w1:H=0.9162,top10E=0.12,eRank=445.7,q75/q25=4.27 mlp_w2:H=0.9648,top10E=0.05,eRank=608.6,q75/q25=3.14 vo_prod:H=0.6891,top10E=0.09,eRank=222.0,q75/q25=inf train_time:629527ms step_avg:89.93ms +[2025-08-22 17:58:26] [Rank 0] step:7001/10000 train_time:629546ms step_avg:89.92ms +[2025-08-22 17:58:26] [Rank 0] step:7001/10000 train_time:629546ms step_avg:89.92ms +[2025-08-22 17:58:28] [Rank 0] step:7021/10000 train_time:631444ms step_avg:89.94ms +[2025-08-22 17:58:28] [Rank 0] step:7021/10000 train_time:631444ms step_avg:89.94ms +[2025-08-22 17:58:30] [Rank 0] step:7041/10000 train_time:633335ms step_avg:89.95ms +[2025-08-22 17:58:30] [Rank 0] step:7041/10000 train_time:633335ms step_avg:89.95ms +[2025-08-22 17:58:31] [Rank 0] step:7061/10000 train_time:635226ms step_avg:89.96ms +[2025-08-22 17:58:31] [Rank 0] step:7061/10000 train_time:635226ms step_avg:89.96ms +[2025-08-22 17:58:33] [Rank 0] step:7081/10000 train_time:637116ms step_avg:89.98ms +[2025-08-22 17:58:33] [Rank 0] step:7081/10000 train_time:637116ms step_avg:89.98ms +[2025-08-22 17:58:35] [Rank 0] step:7101/10000 train_time:639015ms step_avg:89.99ms +[2025-08-22 17:58:35] [Rank 0] step:7101/10000 train_time:639015ms step_avg:89.99ms +[2025-08-22 17:58:37] [Rank 0] step:7121/10000 train_time:640903ms step_avg:90.00ms +[2025-08-22 17:58:37] [Rank 0] step:7121/10000 train_time:640903ms step_avg:90.00ms +[2025-08-22 17:58:39] [Rank 0] step:7141/10000 train_time:642797ms step_avg:90.02ms +[2025-08-22 17:58:39] [Rank 0] step:7141/10000 train_time:642797ms step_avg:90.02ms +[2025-08-22 17:58:41] [Rank 0] step:7161/10000 train_time:644694ms step_avg:90.03ms +[2025-08-22 17:58:41] [Rank 0] step:7161/10000 train_time:644694ms step_avg:90.03ms +[2025-08-22 17:58:43] [Rank 0] step:7181/10000 train_time:646591ms step_avg:90.04ms +[2025-08-22 17:58:43] [Rank 0] step:7181/10000 train_time:646591ms step_avg:90.04ms +[2025-08-22 17:58:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:58:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:58:58] [Rank 0] PRINT: step:7200/10000 val_loss:3.6766 svd_entropy: attn_qk:H=0.7987,top10E=0.22,eRank=223.1,q75/q25=43.77 attn_vo:H=0.8311,top10E=0.06,eRank=395.6,q75/q25=inf mlp_w1:H=0.9166,top10E=0.12,eRank=447.0,q75/q25=4.26 mlp_w2:H=0.9647,top10E=0.05,eRank=608.3,q75/q25=3.14 vo_prod:H=0.6894,top10E=0.09,eRank=222.5,q75/q25=inf train_time:648501ms step_avg:90.07ms +[2025-08-22 17:58:58] [Rank 0] PRINT: step:7200/10000 val_loss:3.6766 svd_entropy: attn_qk:H=0.7987,top10E=0.22,eRank=223.1,q75/q25=43.77 attn_vo:H=0.8311,top10E=0.06,eRank=395.6,q75/q25=inf mlp_w1:H=0.9166,top10E=0.12,eRank=447.0,q75/q25=4.26 mlp_w2:H=0.9647,top10E=0.05,eRank=608.3,q75/q25=3.14 vo_prod:H=0.6894,top10E=0.09,eRank=222.5,q75/q25=inf train_time:648501ms step_avg:90.07ms +[2025-08-22 17:58:58] [Rank 0] step:7201/10000 train_time:648519ms step_avg:90.06ms +[2025-08-22 17:58:58] [Rank 0] step:7201/10000 train_time:648519ms step_avg:90.06ms +[2025-08-22 17:59:00] [Rank 0] step:7221/10000 train_time:650420ms step_avg:90.07ms +[2025-08-22 17:59:00] [Rank 0] step:7221/10000 train_time:650420ms step_avg:90.07ms +[2025-08-22 17:59:02] [Rank 0] step:7241/10000 train_time:652312ms step_avg:90.09ms +[2025-08-22 17:59:02] [Rank 0] step:7241/10000 train_time:652312ms step_avg:90.09ms +[2025-08-22 17:59:04] [Rank 0] step:7261/10000 train_time:654199ms step_avg:90.10ms +[2025-08-22 17:59:04] [Rank 0] step:7261/10000 train_time:654199ms step_avg:90.10ms +[2025-08-22 17:59:06] [Rank 0] step:7281/10000 train_time:656102ms step_avg:90.11ms +[2025-08-22 17:59:06] [Rank 0] step:7281/10000 train_time:656102ms step_avg:90.11ms +[2025-08-22 17:59:08] [Rank 0] step:7301/10000 train_time:657994ms step_avg:90.12ms +[2025-08-22 17:59:08] [Rank 0] step:7301/10000 train_time:657994ms step_avg:90.12ms +[2025-08-22 17:59:10] [Rank 0] step:7321/10000 train_time:659902ms step_avg:90.14ms +[2025-08-22 17:59:10] [Rank 0] step:7321/10000 train_time:659902ms step_avg:90.14ms +[2025-08-22 17:59:12] [Rank 0] step:7341/10000 train_time:661796ms step_avg:90.15ms +[2025-08-22 17:59:12] [Rank 0] step:7341/10000 train_time:661796ms step_avg:90.15ms +[2025-08-22 17:59:14] [Rank 0] step:7361/10000 train_time:663701ms step_avg:90.16ms +[2025-08-22 17:59:14] [Rank 0] step:7361/10000 train_time:663701ms step_avg:90.16ms +[2025-08-22 17:59:16] [Rank 0] step:7381/10000 train_time:665656ms step_avg:90.19ms +[2025-08-22 17:59:16] [Rank 0] step:7381/10000 train_time:665656ms step_avg:90.19ms +[2025-08-22 17:59:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:59:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:59:31] [Rank 0] PRINT: step:7400/10000 val_loss:3.6559 svd_entropy: attn_qk:H=0.7991,top10E=0.22,eRank=223.5,q75/q25=43.70 attn_vo:H=0.8312,top10E=0.06,eRank=395.8,q75/q25=inf mlp_w1:H=0.9171,top10E=0.12,eRank=448.2,q75/q25=4.25 mlp_w2:H=0.9647,top10E=0.05,eRank=608.1,q75/q25=3.14 vo_prod:H=0.6897,top10E=0.09,eRank=223.0,q75/q25=inf train_time:667603ms step_avg:90.22ms +[2025-08-22 17:59:31] [Rank 0] PRINT: step:7400/10000 val_loss:3.6559 svd_entropy: attn_qk:H=0.7991,top10E=0.22,eRank=223.5,q75/q25=43.70 attn_vo:H=0.8312,top10E=0.06,eRank=395.8,q75/q25=inf mlp_w1:H=0.9171,top10E=0.12,eRank=448.2,q75/q25=4.25 mlp_w2:H=0.9647,top10E=0.05,eRank=608.1,q75/q25=3.14 vo_prod:H=0.6897,top10E=0.09,eRank=223.0,q75/q25=inf train_time:667603ms step_avg:90.22ms +[2025-08-22 17:59:31] [Rank 0] step:7401/10000 train_time:667621ms step_avg:90.21ms +[2025-08-22 17:59:31] [Rank 0] step:7401/10000 train_time:667621ms step_avg:90.21ms +[2025-08-22 17:59:33] [Rank 0] step:7421/10000 train_time:669504ms step_avg:90.22ms +[2025-08-22 17:59:33] [Rank 0] step:7421/10000 train_time:669504ms step_avg:90.22ms +[2025-08-22 17:59:35] [Rank 0] step:7441/10000 train_time:671398ms step_avg:90.23ms +[2025-08-22 17:59:35] [Rank 0] step:7441/10000 train_time:671398ms step_avg:90.23ms +[2025-08-22 17:59:37] [Rank 0] step:7461/10000 train_time:673297ms step_avg:90.24ms +[2025-08-22 17:59:37] [Rank 0] step:7461/10000 train_time:673297ms step_avg:90.24ms +[2025-08-22 17:59:39] [Rank 0] step:7481/10000 train_time:675202ms step_avg:90.26ms +[2025-08-22 17:59:39] [Rank 0] step:7481/10000 train_time:675202ms step_avg:90.26ms +[2025-08-22 17:59:41] [Rank 0] step:7501/10000 train_time:677106ms step_avg:90.27ms +[2025-08-22 17:59:41] [Rank 0] step:7501/10000 train_time:677106ms step_avg:90.27ms +[2025-08-22 17:59:43] [Rank 0] step:7521/10000 train_time:679012ms step_avg:90.28ms +[2025-08-22 17:59:43] [Rank 0] step:7521/10000 train_time:679012ms step_avg:90.28ms +[2025-08-22 17:59:45] [Rank 0] step:7541/10000 train_time:680926ms step_avg:90.30ms +[2025-08-22 17:59:45] [Rank 0] step:7541/10000 train_time:680926ms step_avg:90.30ms +[2025-08-22 17:59:46] [Rank 0] step:7561/10000 train_time:682821ms step_avg:90.31ms +[2025-08-22 17:59:46] [Rank 0] step:7561/10000 train_time:682821ms step_avg:90.31ms +[2025-08-22 17:59:48] [Rank 0] step:7581/10000 train_time:684734ms step_avg:90.32ms +[2025-08-22 17:59:48] [Rank 0] step:7581/10000 train_time:684734ms step_avg:90.32ms +[2025-08-22 17:59:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 17:59:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:00:04] [Rank 0] PRINT: step:7600/10000 val_loss:3.6473 svd_entropy: attn_qk:H=0.7994,top10E=0.22,eRank=223.9,q75/q25=43.55 attn_vo:H=0.8312,top10E=0.06,eRank=396.1,q75/q25=inf mlp_w1:H=0.9174,top10E=0.12,eRank=449.3,q75/q25=4.25 mlp_w2:H=0.9646,top10E=0.05,eRank=607.9,q75/q25=3.15 vo_prod:H=0.6900,top10E=0.09,eRank=223.5,q75/q25=inf train_time:686658ms step_avg:90.35ms +[2025-08-22 18:00:04] [Rank 0] PRINT: step:7600/10000 val_loss:3.6473 svd_entropy: attn_qk:H=0.7994,top10E=0.22,eRank=223.9,q75/q25=43.55 attn_vo:H=0.8312,top10E=0.06,eRank=396.1,q75/q25=inf mlp_w1:H=0.9174,top10E=0.12,eRank=449.3,q75/q25=4.25 mlp_w2:H=0.9646,top10E=0.05,eRank=607.9,q75/q25=3.15 vo_prod:H=0.6900,top10E=0.09,eRank=223.5,q75/q25=inf train_time:686658ms step_avg:90.35ms +[2025-08-22 18:00:04] [Rank 0] step:7601/10000 train_time:686676ms step_avg:90.34ms +[2025-08-22 18:00:04] [Rank 0] step:7601/10000 train_time:686676ms step_avg:90.34ms +[2025-08-22 18:00:06] [Rank 0] step:7621/10000 train_time:688573ms step_avg:90.35ms +[2025-08-22 18:00:06] [Rank 0] step:7621/10000 train_time:688573ms step_avg:90.35ms +[2025-08-22 18:00:08] [Rank 0] step:7641/10000 train_time:690466ms step_avg:90.36ms +[2025-08-22 18:00:08] [Rank 0] step:7641/10000 train_time:690466ms step_avg:90.36ms +[2025-08-22 18:00:10] [Rank 0] step:7661/10000 train_time:692364ms step_avg:90.38ms +[2025-08-22 18:00:10] [Rank 0] step:7661/10000 train_time:692364ms step_avg:90.38ms +[2025-08-22 18:00:12] [Rank 0] step:7681/10000 train_time:694257ms step_avg:90.39ms +[2025-08-22 18:00:12] [Rank 0] step:7681/10000 train_time:694257ms step_avg:90.39ms +[2025-08-22 18:00:14] [Rank 0] step:7701/10000 train_time:696153ms step_avg:90.40ms +[2025-08-22 18:00:14] [Rank 0] step:7701/10000 train_time:696153ms step_avg:90.40ms +[2025-08-22 18:00:15] [Rank 0] step:7721/10000 train_time:698063ms step_avg:90.41ms +[2025-08-22 18:00:15] [Rank 0] step:7721/10000 train_time:698063ms step_avg:90.41ms +[2025-08-22 18:00:17] [Rank 0] step:7741/10000 train_time:699961ms step_avg:90.42ms +[2025-08-22 18:00:17] [Rank 0] step:7741/10000 train_time:699961ms step_avg:90.42ms +[2025-08-22 18:00:19] [Rank 0] step:7761/10000 train_time:701970ms step_avg:90.45ms +[2025-08-22 18:00:19] [Rank 0] step:7761/10000 train_time:701970ms step_avg:90.45ms +[2025-08-22 18:00:21] [Rank 0] step:7781/10000 train_time:703917ms step_avg:90.47ms +[2025-08-22 18:00:21] [Rank 0] step:7781/10000 train_time:703917ms step_avg:90.47ms +[2025-08-22 18:00:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:00:23] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:00:37] [Rank 0] PRINT: step:7800/10000 val_loss:3.6333 svd_entropy: attn_qk:H=0.7997,top10E=0.22,eRank=224.2,q75/q25=43.38 attn_vo:H=0.8313,top10E=0.06,eRank=396.2,q75/q25=inf mlp_w1:H=0.9178,top10E=0.12,eRank=450.3,q75/q25=4.24 mlp_w2:H=0.9646,top10E=0.05,eRank=607.7,q75/q25=3.15 vo_prod:H=0.6902,top10E=0.09,eRank=223.9,q75/q25=inf train_time:705841ms step_avg:90.49ms +[2025-08-22 18:00:37] [Rank 0] PRINT: step:7800/10000 val_loss:3.6333 svd_entropy: attn_qk:H=0.7997,top10E=0.22,eRank=224.2,q75/q25=43.38 attn_vo:H=0.8313,top10E=0.06,eRank=396.2,q75/q25=inf mlp_w1:H=0.9178,top10E=0.12,eRank=450.3,q75/q25=4.24 mlp_w2:H=0.9646,top10E=0.05,eRank=607.7,q75/q25=3.15 vo_prod:H=0.6902,top10E=0.09,eRank=223.9,q75/q25=inf train_time:705841ms step_avg:90.49ms +[2025-08-22 18:00:37] [Rank 0] step:7801/10000 train_time:705859ms step_avg:90.48ms +[2025-08-22 18:00:37] [Rank 0] step:7801/10000 train_time:705859ms step_avg:90.48ms +[2025-08-22 18:00:39] [Rank 0] step:7821/10000 train_time:707756ms step_avg:90.49ms +[2025-08-22 18:00:39] [Rank 0] step:7821/10000 train_time:707756ms step_avg:90.49ms +[2025-08-22 18:00:41] [Rank 0] step:7841/10000 train_time:709647ms step_avg:90.50ms +[2025-08-22 18:00:41] [Rank 0] step:7841/10000 train_time:709647ms step_avg:90.50ms +[2025-08-22 18:00:43] [Rank 0] step:7861/10000 train_time:711549ms step_avg:90.52ms +[2025-08-22 18:00:43] [Rank 0] step:7861/10000 train_time:711549ms step_avg:90.52ms +[2025-08-22 18:00:45] [Rank 0] step:7881/10000 train_time:713455ms step_avg:90.53ms +[2025-08-22 18:00:45] [Rank 0] step:7881/10000 train_time:713455ms step_avg:90.53ms +[2025-08-22 18:00:46] [Rank 0] step:7901/10000 train_time:715347ms step_avg:90.54ms +[2025-08-22 18:00:46] [Rank 0] step:7901/10000 train_time:715347ms step_avg:90.54ms +[2025-08-22 18:00:48] [Rank 0] step:7921/10000 train_time:717251ms step_avg:90.55ms +[2025-08-22 18:00:48] [Rank 0] step:7921/10000 train_time:717251ms step_avg:90.55ms +[2025-08-22 18:00:50] [Rank 0] step:7941/10000 train_time:719157ms step_avg:90.56ms +[2025-08-22 18:00:50] [Rank 0] step:7941/10000 train_time:719157ms step_avg:90.56ms +[2025-08-22 18:00:52] [Rank 0] step:7961/10000 train_time:721059ms step_avg:90.57ms +[2025-08-22 18:00:52] [Rank 0] step:7961/10000 train_time:721059ms step_avg:90.57ms +[2025-08-22 18:00:54] [Rank 0] step:7981/10000 train_time:722951ms step_avg:90.58ms +[2025-08-22 18:00:54] [Rank 0] step:7981/10000 train_time:722951ms step_avg:90.58ms +[2025-08-22 18:00:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:00:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:01:10] [Rank 0] PRINT: step:8000/10000 val_loss:3.6155 svd_entropy: attn_qk:H=0.8000,top10E=0.22,eRank=224.5,q75/q25=43.31 attn_vo:H=0.8314,top10E=0.06,eRank=396.4,q75/q25=inf mlp_w1:H=0.9181,top10E=0.12,eRank=451.1,q75/q25=4.23 mlp_w2:H=0.9645,top10E=0.05,eRank=607.6,q75/q25=3.16 vo_prod:H=0.6905,top10E=0.09,eRank=224.4,q75/q25=inf train_time:724868ms step_avg:90.61ms +[2025-08-22 18:01:10] [Rank 0] PRINT: step:8000/10000 val_loss:3.6155 svd_entropy: attn_qk:H=0.8000,top10E=0.22,eRank=224.5,q75/q25=43.31 attn_vo:H=0.8314,top10E=0.06,eRank=396.4,q75/q25=inf mlp_w1:H=0.9181,top10E=0.12,eRank=451.1,q75/q25=4.23 mlp_w2:H=0.9645,top10E=0.05,eRank=607.6,q75/q25=3.16 vo_prod:H=0.6905,top10E=0.09,eRank=224.4,q75/q25=inf train_time:724868ms step_avg:90.61ms +[2025-08-22 18:01:10] [Rank 0] step:8001/10000 train_time:724887ms step_avg:90.60ms +[2025-08-22 18:01:10] [Rank 0] step:8001/10000 train_time:724887ms step_avg:90.60ms +[2025-08-22 18:01:12] [Rank 0] step:8021/10000 train_time:726777ms step_avg:90.61ms +[2025-08-22 18:01:12] [Rank 0] step:8021/10000 train_time:726777ms step_avg:90.61ms +[2025-08-22 18:01:13] [Rank 0] step:8041/10000 train_time:728681ms step_avg:90.62ms +[2025-08-22 18:01:13] [Rank 0] step:8041/10000 train_time:728681ms step_avg:90.62ms +[2025-08-22 18:01:15] [Rank 0] step:8061/10000 train_time:730582ms step_avg:90.63ms +[2025-08-22 18:01:15] [Rank 0] step:8061/10000 train_time:730582ms step_avg:90.63ms +[2025-08-22 18:01:17] [Rank 0] step:8081/10000 train_time:732470ms step_avg:90.64ms +[2025-08-22 18:01:17] [Rank 0] step:8081/10000 train_time:732470ms step_avg:90.64ms +[2025-08-22 18:01:19] [Rank 0] step:8101/10000 train_time:734376ms step_avg:90.65ms +[2025-08-22 18:01:19] [Rank 0] step:8101/10000 train_time:734376ms step_avg:90.65ms +[2025-08-22 18:01:21] [Rank 0] step:8121/10000 train_time:736343ms step_avg:90.67ms +[2025-08-22 18:01:21] [Rank 0] step:8121/10000 train_time:736343ms step_avg:90.67ms +[2025-08-22 18:01:23] [Rank 0] step:8141/10000 train_time:738266ms step_avg:90.68ms +[2025-08-22 18:01:23] [Rank 0] step:8141/10000 train_time:738266ms step_avg:90.68ms +[2025-08-22 18:01:25] [Rank 0] step:8161/10000 train_time:740243ms step_avg:90.70ms +[2025-08-22 18:01:25] [Rank 0] step:8161/10000 train_time:740243ms step_avg:90.70ms +[2025-08-22 18:01:27] [Rank 0] step:8181/10000 train_time:742169ms step_avg:90.72ms +[2025-08-22 18:01:27] [Rank 0] step:8181/10000 train_time:742169ms step_avg:90.72ms +[2025-08-22 18:01:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:01:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:01:42] [Rank 0] PRINT: step:8200/10000 val_loss:3.6028 svd_entropy: attn_qk:H=0.8002,top10E=0.22,eRank=224.8,q75/q25=43.35 attn_vo:H=0.8314,top10E=0.06,eRank=396.6,q75/q25=inf mlp_w1:H=0.9184,top10E=0.12,eRank=452.0,q75/q25=4.22 mlp_w2:H=0.9645,top10E=0.05,eRank=607.5,q75/q25=3.16 vo_prod:H=0.6907,top10E=0.09,eRank=224.8,q75/q25=inf train_time:744136ms step_avg:90.75ms +[2025-08-22 18:01:42] [Rank 0] PRINT: step:8200/10000 val_loss:3.6028 svd_entropy: attn_qk:H=0.8002,top10E=0.22,eRank=224.8,q75/q25=43.35 attn_vo:H=0.8314,top10E=0.06,eRank=396.6,q75/q25=inf mlp_w1:H=0.9184,top10E=0.12,eRank=452.0,q75/q25=4.22 mlp_w2:H=0.9645,top10E=0.05,eRank=607.5,q75/q25=3.16 vo_prod:H=0.6907,top10E=0.09,eRank=224.8,q75/q25=inf train_time:744136ms step_avg:90.75ms +[2025-08-22 18:01:43] [Rank 0] step:8201/10000 train_time:744153ms step_avg:90.74ms +[2025-08-22 18:01:43] [Rank 0] step:8201/10000 train_time:744153ms step_avg:90.74ms +[2025-08-22 18:01:45] [Rank 0] step:8221/10000 train_time:746076ms step_avg:90.75ms +[2025-08-22 18:01:45] [Rank 0] step:8221/10000 train_time:746076ms step_avg:90.75ms +[2025-08-22 18:01:46] [Rank 0] step:8241/10000 train_time:748011ms step_avg:90.77ms +[2025-08-22 18:01:46] [Rank 0] step:8241/10000 train_time:748011ms step_avg:90.77ms +[2025-08-22 18:01:48] [Rank 0] step:8261/10000 train_time:749947ms step_avg:90.78ms +[2025-08-22 18:01:48] [Rank 0] step:8261/10000 train_time:749947ms step_avg:90.78ms +[2025-08-22 18:01:50] [Rank 0] step:8281/10000 train_time:751875ms step_avg:90.80ms +[2025-08-22 18:01:50] [Rank 0] step:8281/10000 train_time:751875ms step_avg:90.80ms +[2025-08-22 18:01:52] [Rank 0] step:8301/10000 train_time:753804ms step_avg:90.81ms +[2025-08-22 18:01:52] [Rank 0] step:8301/10000 train_time:753804ms step_avg:90.81ms +[2025-08-22 18:01:54] [Rank 0] step:8321/10000 train_time:755732ms step_avg:90.82ms +[2025-08-22 18:01:54] [Rank 0] step:8321/10000 train_time:755732ms step_avg:90.82ms +[2025-08-22 18:01:56] [Rank 0] step:8341/10000 train_time:757671ms step_avg:90.84ms +[2025-08-22 18:01:56] [Rank 0] step:8341/10000 train_time:757671ms step_avg:90.84ms +[2025-08-22 18:01:58] [Rank 0] step:8361/10000 train_time:759602ms step_avg:90.85ms +[2025-08-22 18:01:58] [Rank 0] step:8361/10000 train_time:759602ms step_avg:90.85ms +[2025-08-22 18:02:00] [Rank 0] step:8381/10000 train_time:761530ms step_avg:90.86ms +[2025-08-22 18:02:00] [Rank 0] step:8381/10000 train_time:761530ms step_avg:90.86ms +[2025-08-22 18:02:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:02:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:02:16] [Rank 0] PRINT: step:8400/10000 val_loss:3.5893 svd_entropy: attn_qk:H=0.8004,top10E=0.22,eRank=225.0,q75/q25=43.10 attn_vo:H=0.8315,top10E=0.06,eRank=396.7,q75/q25=inf mlp_w1:H=0.9187,top10E=0.12,eRank=452.8,q75/q25=4.22 mlp_w2:H=0.9645,top10E=0.05,eRank=607.3,q75/q25=3.16 vo_prod:H=0.6908,top10E=0.09,eRank=225.1,q75/q25=inf train_time:763470ms step_avg:90.89ms +[2025-08-22 18:02:16] [Rank 0] PRINT: step:8400/10000 val_loss:3.5893 svd_entropy: attn_qk:H=0.8004,top10E=0.22,eRank=225.0,q75/q25=43.10 attn_vo:H=0.8315,top10E=0.06,eRank=396.7,q75/q25=inf mlp_w1:H=0.9187,top10E=0.12,eRank=452.8,q75/q25=4.22 mlp_w2:H=0.9645,top10E=0.05,eRank=607.3,q75/q25=3.16 vo_prod:H=0.6908,top10E=0.09,eRank=225.1,q75/q25=inf train_time:763470ms step_avg:90.89ms +[2025-08-22 18:02:16] [Rank 0] step:8401/10000 train_time:763488ms step_avg:90.88ms +[2025-08-22 18:02:16] [Rank 0] step:8401/10000 train_time:763488ms step_avg:90.88ms +[2025-08-22 18:02:18] [Rank 0] step:8421/10000 train_time:765401ms step_avg:90.89ms +[2025-08-22 18:02:18] [Rank 0] step:8421/10000 train_time:765401ms step_avg:90.89ms +[2025-08-22 18:02:20] [Rank 0] step:8441/10000 train_time:767325ms step_avg:90.90ms +[2025-08-22 18:02:20] [Rank 0] step:8441/10000 train_time:767325ms step_avg:90.90ms +[2025-08-22 18:02:21] [Rank 0] step:8461/10000 train_time:769245ms step_avg:90.92ms +[2025-08-22 18:02:21] [Rank 0] step:8461/10000 train_time:769245ms step_avg:90.92ms +[2025-08-22 18:02:23] [Rank 0] step:8481/10000 train_time:771176ms step_avg:90.93ms +[2025-08-22 18:02:23] [Rank 0] step:8481/10000 train_time:771176ms step_avg:90.93ms +[2025-08-22 18:02:25] [Rank 0] step:8501/10000 train_time:773197ms step_avg:90.95ms +[2025-08-22 18:02:25] [Rank 0] step:8501/10000 train_time:773197ms step_avg:90.95ms +[2025-08-22 18:02:27] [Rank 0] step:8521/10000 train_time:775125ms step_avg:90.97ms +[2025-08-22 18:02:27] [Rank 0] step:8521/10000 train_time:775125ms step_avg:90.97ms +[2025-08-22 18:02:29] [Rank 0] step:8541/10000 train_time:777066ms step_avg:90.98ms +[2025-08-22 18:02:29] [Rank 0] step:8541/10000 train_time:777066ms step_avg:90.98ms +[2025-08-22 18:02:31] [Rank 0] step:8561/10000 train_time:778999ms step_avg:90.99ms +[2025-08-22 18:02:31] [Rank 0] step:8561/10000 train_time:778999ms step_avg:90.99ms +[2025-08-22 18:02:33] [Rank 0] step:8581/10000 train_time:780929ms step_avg:91.01ms +[2025-08-22 18:02:33] [Rank 0] step:8581/10000 train_time:780929ms step_avg:91.01ms +[2025-08-22 18:02:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:02:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:02:49] [Rank 0] PRINT: step:8600/10000 val_loss:3.5788 svd_entropy: attn_qk:H=0.8006,top10E=0.22,eRank=225.2,q75/q25=42.98 attn_vo:H=0.8315,top10E=0.06,eRank=396.9,q75/q25=inf mlp_w1:H=0.9189,top10E=0.12,eRank=453.5,q75/q25=4.21 mlp_w2:H=0.9644,top10E=0.05,eRank=607.2,q75/q25=3.16 vo_prod:H=0.6910,top10E=0.09,eRank=225.4,q75/q25=inf train_time:782862ms step_avg:91.03ms +[2025-08-22 18:02:49] [Rank 0] PRINT: step:8600/10000 val_loss:3.5788 svd_entropy: attn_qk:H=0.8006,top10E=0.22,eRank=225.2,q75/q25=42.98 attn_vo:H=0.8315,top10E=0.06,eRank=396.9,q75/q25=inf mlp_w1:H=0.9189,top10E=0.12,eRank=453.5,q75/q25=4.21 mlp_w2:H=0.9644,top10E=0.05,eRank=607.2,q75/q25=3.16 vo_prod:H=0.6910,top10E=0.09,eRank=225.4,q75/q25=inf train_time:782862ms step_avg:91.03ms +[2025-08-22 18:02:49] [Rank 0] step:8601/10000 train_time:782879ms step_avg:91.02ms +[2025-08-22 18:02:49] [Rank 0] step:8601/10000 train_time:782879ms step_avg:91.02ms +[2025-08-22 18:02:51] [Rank 0] step:8621/10000 train_time:784795ms step_avg:91.03ms +[2025-08-22 18:02:51] [Rank 0] step:8621/10000 train_time:784795ms step_avg:91.03ms +[2025-08-22 18:02:53] [Rank 0] step:8641/10000 train_time:786717ms step_avg:91.04ms +[2025-08-22 18:02:53] [Rank 0] step:8641/10000 train_time:786717ms step_avg:91.04ms +[2025-08-22 18:02:55] [Rank 0] step:8661/10000 train_time:788641ms step_avg:91.06ms +[2025-08-22 18:02:55] [Rank 0] step:8661/10000 train_time:788641ms step_avg:91.06ms +[2025-08-22 18:02:56] [Rank 0] step:8681/10000 train_time:790570ms step_avg:91.07ms +[2025-08-22 18:02:56] [Rank 0] step:8681/10000 train_time:790570ms step_avg:91.07ms +[2025-08-22 18:02:58] [Rank 0] step:8701/10000 train_time:792489ms step_avg:91.08ms +[2025-08-22 18:02:58] [Rank 0] step:8701/10000 train_time:792489ms step_avg:91.08ms +[2025-08-22 18:03:00] [Rank 0] step:8721/10000 train_time:794419ms step_avg:91.09ms +[2025-08-22 18:03:00] [Rank 0] step:8721/10000 train_time:794419ms step_avg:91.09ms +[2025-08-22 18:03:02] [Rank 0] step:8741/10000 train_time:796334ms step_avg:91.10ms +[2025-08-22 18:03:02] [Rank 0] step:8741/10000 train_time:796334ms step_avg:91.10ms +[2025-08-22 18:03:04] [Rank 0] step:8761/10000 train_time:798260ms step_avg:91.12ms +[2025-08-22 18:03:04] [Rank 0] step:8761/10000 train_time:798260ms step_avg:91.12ms +[2025-08-22 18:03:06] [Rank 0] step:8781/10000 train_time:800189ms step_avg:91.13ms +[2025-08-22 18:03:06] [Rank 0] step:8781/10000 train_time:800189ms step_avg:91.13ms +[2025-08-22 18:03:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:03:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:03:22] [Rank 0] PRINT: step:8800/10000 val_loss:3.5664 svd_entropy: attn_qk:H=0.8008,top10E=0.22,eRank=225.4,q75/q25=42.89 attn_vo:H=0.8316,top10E=0.06,eRank=397.0,q75/q25=inf mlp_w1:H=0.9191,top10E=0.12,eRank=454.1,q75/q25=4.20 mlp_w2:H=0.9644,top10E=0.05,eRank=607.2,q75/q25=3.17 vo_prod:H=0.6912,top10E=0.09,eRank=225.7,q75/q25=inf train_time:802127ms step_avg:91.15ms +[2025-08-22 18:03:22] [Rank 0] PRINT: step:8800/10000 val_loss:3.5664 svd_entropy: attn_qk:H=0.8008,top10E=0.22,eRank=225.4,q75/q25=42.89 attn_vo:H=0.8316,top10E=0.06,eRank=397.0,q75/q25=inf mlp_w1:H=0.9191,top10E=0.12,eRank=454.1,q75/q25=4.20 mlp_w2:H=0.9644,top10E=0.05,eRank=607.2,q75/q25=3.17 vo_prod:H=0.6912,top10E=0.09,eRank=225.7,q75/q25=inf train_time:802127ms step_avg:91.15ms +[2025-08-22 18:03:22] [Rank 0] step:8801/10000 train_time:802145ms step_avg:91.14ms +[2025-08-22 18:03:22] [Rank 0] step:8801/10000 train_time:802145ms step_avg:91.14ms +[2025-08-22 18:03:24] [Rank 0] step:8821/10000 train_time:804065ms step_avg:91.15ms +[2025-08-22 18:03:24] [Rank 0] step:8821/10000 train_time:804065ms step_avg:91.15ms +[2025-08-22 18:03:26] [Rank 0] step:8841/10000 train_time:806007ms step_avg:91.17ms +[2025-08-22 18:03:26] [Rank 0] step:8841/10000 train_time:806007ms step_avg:91.17ms +[2025-08-22 18:03:28] [Rank 0] step:8861/10000 train_time:807982ms step_avg:91.18ms +[2025-08-22 18:03:28] [Rank 0] step:8861/10000 train_time:807982ms step_avg:91.18ms +[2025-08-22 18:03:30] [Rank 0] step:8881/10000 train_time:809909ms step_avg:91.20ms +[2025-08-22 18:03:30] [Rank 0] step:8881/10000 train_time:809909ms step_avg:91.20ms +[2025-08-22 18:03:32] [Rank 0] step:8901/10000 train_time:811838ms step_avg:91.21ms +[2025-08-22 18:03:32] [Rank 0] step:8901/10000 train_time:811838ms step_avg:91.21ms +[2025-08-22 18:03:34] [Rank 0] step:8921/10000 train_time:813781ms step_avg:91.22ms +[2025-08-22 18:03:34] [Rank 0] step:8921/10000 train_time:813781ms step_avg:91.22ms +[2025-08-22 18:03:36] [Rank 0] step:8941/10000 train_time:815713ms step_avg:91.23ms +[2025-08-22 18:03:36] [Rank 0] step:8941/10000 train_time:815713ms step_avg:91.23ms +[2025-08-22 18:03:37] [Rank 0] step:8961/10000 train_time:817640ms step_avg:91.24ms +[2025-08-22 18:03:37] [Rank 0] step:8961/10000 train_time:817640ms step_avg:91.24ms +[2025-08-22 18:03:39] [Rank 0] step:8981/10000 train_time:819565ms step_avg:91.26ms +[2025-08-22 18:03:39] [Rank 0] step:8981/10000 train_time:819565ms step_avg:91.26ms +[2025-08-22 18:03:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:03:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:03:55] [Rank 0] PRINT: step:9000/10000 val_loss:3.5554 svd_entropy: attn_qk:H=0.8009,top10E=0.22,eRank=225.5,q75/q25=42.81 attn_vo:H=0.8316,top10E=0.06,eRank=397.1,q75/q25=inf mlp_w1:H=0.9193,top10E=0.12,eRank=454.6,q75/q25=4.20 mlp_w2:H=0.9644,top10E=0.05,eRank=607.1,q75/q25=3.17 vo_prod:H=0.6913,top10E=0.09,eRank=225.9,q75/q25=inf train_time:821504ms step_avg:91.28ms +[2025-08-22 18:03:55] [Rank 0] PRINT: step:9000/10000 val_loss:3.5554 svd_entropy: attn_qk:H=0.8009,top10E=0.22,eRank=225.5,q75/q25=42.81 attn_vo:H=0.8316,top10E=0.06,eRank=397.1,q75/q25=inf mlp_w1:H=0.9193,top10E=0.12,eRank=454.6,q75/q25=4.20 mlp_w2:H=0.9644,top10E=0.05,eRank=607.1,q75/q25=3.17 vo_prod:H=0.6913,top10E=0.09,eRank=225.9,q75/q25=inf train_time:821504ms step_avg:91.28ms +[2025-08-22 18:03:55] [Rank 0] step:9001/10000 train_time:821522ms step_avg:91.27ms +[2025-08-22 18:03:55] [Rank 0] step:9001/10000 train_time:821522ms step_avg:91.27ms +[2025-08-22 18:03:57] [Rank 0] step:9021/10000 train_time:823449ms step_avg:91.28ms +[2025-08-22 18:03:57] [Rank 0] step:9021/10000 train_time:823449ms step_avg:91.28ms +[2025-08-22 18:03:59] [Rank 0] step:9041/10000 train_time:825380ms step_avg:91.29ms +[2025-08-22 18:03:59] [Rank 0] step:9041/10000 train_time:825380ms step_avg:91.29ms +[2025-08-22 18:04:01] [Rank 0] step:9061/10000 train_time:827316ms step_avg:91.31ms +[2025-08-22 18:04:01] [Rank 0] step:9061/10000 train_time:827316ms step_avg:91.31ms +[2025-08-22 18:04:03] [Rank 0] step:9081/10000 train_time:829255ms step_avg:91.32ms +[2025-08-22 18:04:03] [Rank 0] step:9081/10000 train_time:829255ms step_avg:91.32ms +[2025-08-22 18:04:05] [Rank 0] step:9101/10000 train_time:831202ms step_avg:91.33ms +[2025-08-22 18:04:05] [Rank 0] step:9101/10000 train_time:831202ms step_avg:91.33ms +[2025-08-22 18:04:07] [Rank 0] step:9121/10000 train_time:833139ms step_avg:91.34ms +[2025-08-22 18:04:07] [Rank 0] step:9121/10000 train_time:833139ms step_avg:91.34ms +[2025-08-22 18:04:09] [Rank 0] step:9141/10000 train_time:835062ms step_avg:91.35ms +[2025-08-22 18:04:09] [Rank 0] step:9141/10000 train_time:835062ms step_avg:91.35ms +[2025-08-22 18:04:11] [Rank 0] step:9161/10000 train_time:836989ms step_avg:91.36ms +[2025-08-22 18:04:11] [Rank 0] step:9161/10000 train_time:836989ms step_avg:91.36ms +[2025-08-22 18:04:13] [Rank 0] step:9181/10000 train_time:838955ms step_avg:91.38ms +[2025-08-22 18:04:13] [Rank 0] step:9181/10000 train_time:838955ms step_avg:91.38ms +[2025-08-22 18:04:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:04:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:04:28] [Rank 0] PRINT: step:9200/10000 val_loss:3.5455 svd_entropy: attn_qk:H=0.8010,top10E=0.22,eRank=225.7,q75/q25=42.95 attn_vo:H=0.8316,top10E=0.06,eRank=397.2,q75/q25=inf mlp_w1:H=0.9195,top10E=0.12,eRank=455.1,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.1,q75/q25=3.17 vo_prod:H=0.6914,top10E=0.09,eRank=226.1,q75/q25=inf train_time:840895ms step_avg:91.40ms +[2025-08-22 18:04:28] [Rank 0] PRINT: step:9200/10000 val_loss:3.5455 svd_entropy: attn_qk:H=0.8010,top10E=0.22,eRank=225.7,q75/q25=42.95 attn_vo:H=0.8316,top10E=0.06,eRank=397.2,q75/q25=inf mlp_w1:H=0.9195,top10E=0.12,eRank=455.1,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.1,q75/q25=3.17 vo_prod:H=0.6914,top10E=0.09,eRank=226.1,q75/q25=inf train_time:840895ms step_avg:91.40ms +[2025-08-22 18:04:28] [Rank 0] step:9201/10000 train_time:840911ms step_avg:91.39ms +[2025-08-22 18:04:28] [Rank 0] step:9201/10000 train_time:840911ms step_avg:91.39ms +[2025-08-22 18:04:30] [Rank 0] step:9221/10000 train_time:843011ms step_avg:91.42ms +[2025-08-22 18:04:30] [Rank 0] step:9221/10000 train_time:843011ms step_avg:91.42ms +[2025-08-22 18:04:32] [Rank 0] step:9241/10000 train_time:844860ms step_avg:91.43ms +[2025-08-22 18:04:32] [Rank 0] step:9241/10000 train_time:844860ms step_avg:91.43ms +[2025-08-22 18:04:34] [Rank 0] step:9261/10000 train_time:846800ms step_avg:91.44ms +[2025-08-22 18:04:34] [Rank 0] step:9261/10000 train_time:846800ms step_avg:91.44ms +[2025-08-22 18:04:36] [Rank 0] step:9281/10000 train_time:848723ms step_avg:91.45ms +[2025-08-22 18:04:36] [Rank 0] step:9281/10000 train_time:848723ms step_avg:91.45ms +[2025-08-22 18:04:38] [Rank 0] step:9301/10000 train_time:850650ms step_avg:91.46ms +[2025-08-22 18:04:38] [Rank 0] step:9301/10000 train_time:850650ms step_avg:91.46ms +[2025-08-22 18:04:40] [Rank 0] step:9321/10000 train_time:852585ms step_avg:91.47ms +[2025-08-22 18:04:40] [Rank 0] step:9321/10000 train_time:852585ms step_avg:91.47ms +[2025-08-22 18:04:42] [Rank 0] step:9341/10000 train_time:854518ms step_avg:91.48ms +[2025-08-22 18:04:42] [Rank 0] step:9341/10000 train_time:854518ms step_avg:91.48ms +[2025-08-22 18:04:44] [Rank 0] step:9361/10000 train_time:856457ms step_avg:91.49ms +[2025-08-22 18:04:44] [Rank 0] step:9361/10000 train_time:856457ms step_avg:91.49ms +[2025-08-22 18:04:46] [Rank 0] step:9381/10000 train_time:858402ms step_avg:91.50ms +[2025-08-22 18:04:46] [Rank 0] step:9381/10000 train_time:858402ms step_avg:91.50ms +[2025-08-22 18:04:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:04:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:05:01] [Rank 0] PRINT: step:9400/10000 val_loss:3.5359 svd_entropy: attn_qk:H=0.8011,top10E=0.22,eRank=225.8,q75/q25=42.79 attn_vo:H=0.8317,top10E=0.06,eRank=397.3,q75/q25=inf mlp_w1:H=0.9196,top10E=0.12,eRank=455.5,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.1,q75/q25=3.17 vo_prod:H=0.6915,top10E=0.09,eRank=226.3,q75/q25=inf train_time:860351ms step_avg:91.53ms +[2025-08-22 18:05:01] [Rank 0] PRINT: step:9400/10000 val_loss:3.5359 svd_entropy: attn_qk:H=0.8011,top10E=0.22,eRank=225.8,q75/q25=42.79 attn_vo:H=0.8317,top10E=0.06,eRank=397.3,q75/q25=inf mlp_w1:H=0.9196,top10E=0.12,eRank=455.5,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.1,q75/q25=3.17 vo_prod:H=0.6915,top10E=0.09,eRank=226.3,q75/q25=inf train_time:860351ms step_avg:91.53ms +[2025-08-22 18:05:01] [Rank 0] step:9401/10000 train_time:860370ms step_avg:91.52ms +[2025-08-22 18:05:01] [Rank 0] step:9401/10000 train_time:860370ms step_avg:91.52ms +[2025-08-22 18:05:03] [Rank 0] step:9421/10000 train_time:862289ms step_avg:91.53ms +[2025-08-22 18:05:03] [Rank 0] step:9421/10000 train_time:862289ms step_avg:91.53ms +[2025-08-22 18:05:05] [Rank 0] step:9441/10000 train_time:864220ms step_avg:91.54ms +[2025-08-22 18:05:05] [Rank 0] step:9441/10000 train_time:864220ms step_avg:91.54ms +[2025-08-22 18:05:07] [Rank 0] step:9461/10000 train_time:866154ms step_avg:91.55ms +[2025-08-22 18:05:07] [Rank 0] step:9461/10000 train_time:866154ms step_avg:91.55ms +[2025-08-22 18:05:09] [Rank 0] step:9481/10000 train_time:868090ms step_avg:91.56ms +[2025-08-22 18:05:09] [Rank 0] step:9481/10000 train_time:868090ms step_avg:91.56ms +[2025-08-22 18:05:11] [Rank 0] step:9501/10000 train_time:870033ms step_avg:91.57ms +[2025-08-22 18:05:11] [Rank 0] step:9501/10000 train_time:870033ms step_avg:91.57ms +[2025-08-22 18:05:13] [Rank 0] step:9521/10000 train_time:871955ms step_avg:91.58ms +[2025-08-22 18:05:13] [Rank 0] step:9521/10000 train_time:871955ms step_avg:91.58ms +[2025-08-22 18:05:15] [Rank 0] step:9541/10000 train_time:873886ms step_avg:91.59ms +[2025-08-22 18:05:15] [Rank 0] step:9541/10000 train_time:873886ms step_avg:91.59ms +[2025-08-22 18:05:17] [Rank 0] step:9561/10000 train_time:875809ms step_avg:91.60ms +[2025-08-22 18:05:17] [Rank 0] step:9561/10000 train_time:875809ms step_avg:91.60ms +[2025-08-22 18:05:19] [Rank 0] step:9581/10000 train_time:877739ms step_avg:91.61ms +[2025-08-22 18:05:19] [Rank 0] step:9581/10000 train_time:877739ms step_avg:91.61ms +[2025-08-22 18:05:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:05:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:05:34] [Rank 0] PRINT: step:9600/10000 val_loss:3.5266 svd_entropy: attn_qk:H=0.8012,top10E=0.22,eRank=225.9,q75/q25=42.76 attn_vo:H=0.8317,top10E=0.06,eRank=397.3,q75/q25=inf mlp_w1:H=0.9197,top10E=0.12,eRank=455.9,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.0,q75/q25=3.17 vo_prod:H=0.6916,top10E=0.09,eRank=226.5,q75/q25=inf train_time:879694ms step_avg:91.63ms +[2025-08-22 18:05:34] [Rank 0] PRINT: step:9600/10000 val_loss:3.5266 svd_entropy: attn_qk:H=0.8012,top10E=0.22,eRank=225.9,q75/q25=42.76 attn_vo:H=0.8317,top10E=0.06,eRank=397.3,q75/q25=inf mlp_w1:H=0.9197,top10E=0.12,eRank=455.9,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.0,q75/q25=3.17 vo_prod:H=0.6916,top10E=0.09,eRank=226.5,q75/q25=inf train_time:879694ms step_avg:91.63ms +[2025-08-22 18:05:34] [Rank 0] step:9601/10000 train_time:879712ms step_avg:91.63ms +[2025-08-22 18:05:34] [Rank 0] step:9601/10000 train_time:879712ms step_avg:91.63ms +[2025-08-22 18:05:36] [Rank 0] step:9621/10000 train_time:881699ms step_avg:91.64ms +[2025-08-22 18:05:36] [Rank 0] step:9621/10000 train_time:881699ms step_avg:91.64ms +[2025-08-22 18:05:38] [Rank 0] step:9641/10000 train_time:883629ms step_avg:91.65ms +[2025-08-22 18:05:38] [Rank 0] step:9641/10000 train_time:883629ms step_avg:91.65ms +[2025-08-22 18:05:40] [Rank 0] step:9661/10000 train_time:885589ms step_avg:91.67ms +[2025-08-22 18:05:40] [Rank 0] step:9661/10000 train_time:885589ms step_avg:91.67ms +[2025-08-22 18:05:42] [Rank 0] step:9681/10000 train_time:887544ms step_avg:91.68ms +[2025-08-22 18:05:42] [Rank 0] step:9681/10000 train_time:887544ms step_avg:91.68ms +[2025-08-22 18:05:44] [Rank 0] step:9701/10000 train_time:889513ms step_avg:91.69ms +[2025-08-22 18:05:44] [Rank 0] step:9701/10000 train_time:889513ms step_avg:91.69ms +[2025-08-22 18:05:46] [Rank 0] step:9721/10000 train_time:891467ms step_avg:91.71ms +[2025-08-22 18:05:46] [Rank 0] step:9721/10000 train_time:891467ms step_avg:91.71ms +[2025-08-22 18:05:48] [Rank 0] step:9741/10000 train_time:893441ms step_avg:91.72ms +[2025-08-22 18:05:48] [Rank 0] step:9741/10000 train_time:893441ms step_avg:91.72ms +[2025-08-22 18:05:50] [Rank 0] step:9761/10000 train_time:895404ms step_avg:91.73ms +[2025-08-22 18:05:50] [Rank 0] step:9761/10000 train_time:895404ms step_avg:91.73ms +[2025-08-22 18:05:52] [Rank 0] step:9781/10000 train_time:897371ms step_avg:91.75ms +[2025-08-22 18:05:52] [Rank 0] step:9781/10000 train_time:897371ms step_avg:91.75ms +[2025-08-22 18:05:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:05:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:06:08] [Rank 0] PRINT: step:9800/10000 val_loss:3.5186 svd_entropy: attn_qk:H=0.8013,top10E=0.22,eRank=225.9,q75/q25=42.74 attn_vo:H=0.8317,top10E=0.06,eRank=397.4,q75/q25=inf mlp_w1:H=0.9198,top10E=0.12,eRank=456.1,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.0,q75/q25=3.17 vo_prod:H=0.6917,top10E=0.09,eRank=226.6,q75/q25=inf train_time:899358ms step_avg:91.77ms +[2025-08-22 18:06:08] [Rank 0] PRINT: step:9800/10000 val_loss:3.5186 svd_entropy: attn_qk:H=0.8013,top10E=0.22,eRank=225.9,q75/q25=42.74 attn_vo:H=0.8317,top10E=0.06,eRank=397.4,q75/q25=inf mlp_w1:H=0.9198,top10E=0.12,eRank=456.1,q75/q25=4.19 mlp_w2:H=0.9644,top10E=0.05,eRank=607.0,q75/q25=3.17 vo_prod:H=0.6917,top10E=0.09,eRank=226.6,q75/q25=inf train_time:899358ms step_avg:91.77ms +[2025-08-22 18:06:08] [Rank 0] step:9801/10000 train_time:899378ms step_avg:91.76ms +[2025-08-22 18:06:08] [Rank 0] step:9801/10000 train_time:899378ms step_avg:91.76ms +[2025-08-22 18:06:10] [Rank 0] step:9821/10000 train_time:901333ms step_avg:91.78ms +[2025-08-22 18:06:10] [Rank 0] step:9821/10000 train_time:901333ms step_avg:91.78ms +[2025-08-22 18:06:12] [Rank 0] step:9841/10000 train_time:903297ms step_avg:91.79ms +[2025-08-22 18:06:12] [Rank 0] step:9841/10000 train_time:903297ms step_avg:91.79ms +[2025-08-22 18:06:14] [Rank 0] step:9861/10000 train_time:905240ms step_avg:91.80ms +[2025-08-22 18:06:14] [Rank 0] step:9861/10000 train_time:905240ms step_avg:91.80ms +[2025-08-22 18:06:16] [Rank 0] step:9881/10000 train_time:907189ms step_avg:91.81ms +[2025-08-22 18:06:16] [Rank 0] step:9881/10000 train_time:907189ms step_avg:91.81ms +[2025-08-22 18:06:18] [Rank 0] step:9901/10000 train_time:909157ms step_avg:91.82ms +[2025-08-22 18:06:18] [Rank 0] step:9901/10000 train_time:909157ms step_avg:91.82ms +[2025-08-22 18:06:20] [Rank 0] step:9921/10000 train_time:911110ms step_avg:91.84ms +[2025-08-22 18:06:20] [Rank 0] step:9921/10000 train_time:911110ms step_avg:91.84ms +[2025-08-22 18:06:22] [Rank 0] step:9941/10000 train_time:913079ms step_avg:91.85ms +[2025-08-22 18:06:22] [Rank 0] step:9941/10000 train_time:913079ms step_avg:91.85ms +[2025-08-22 18:06:23] [Rank 0] step:9961/10000 train_time:915029ms step_avg:91.86ms +[2025-08-22 18:06:23] [Rank 0] step:9961/10000 train_time:915029ms step_avg:91.86ms +[2025-08-22 18:06:25] [Rank 0] step:9981/10000 train_time:916995ms step_avg:91.87ms +[2025-08-22 18:06:25] [Rank 0] step:9981/10000 train_time:916995ms step_avg:91.87ms +[2025-08-22 18:06:27] [Rank 0] step:10000/10000 train_time:918866ms step_avg:91.89ms +[2025-08-22 18:06:27] [Rank 0] step:10000/10000 train_time:918866ms step_avg:91.89ms +[2025-08-22 18:06:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:06:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:06:41] [Rank 0] PRINT: step:10000/10000 val_loss:3.5108 svd_entropy: attn_qk:H=0.8013,top10E=0.22,eRank=225.9,q75/q25=42.71 attn_vo:H=0.8317,top10E=0.06,eRank=397.4,q75/q25=inf mlp_w1:H=0.9199,top10E=0.12,eRank=456.3,q75/q25=4.18 mlp_w2:H=0.9644,top10E=0.05,eRank=607.0,q75/q25=3.17 vo_prod:H=0.6918,top10E=0.09,eRank=226.7,q75/q25=inf train_time:918980ms step_avg:91.90ms +[2025-08-22 18:06:41] [Rank 0] PRINT: step:10000/10000 val_loss:3.5108 svd_entropy: attn_qk:H=0.8013,top10E=0.22,eRank=225.9,q75/q25=42.71 attn_vo:H=0.8317,top10E=0.06,eRank=397.4,q75/q25=inf mlp_w1:H=0.9199,top10E=0.12,eRank=456.3,q75/q25=4.18 mlp_w2:H=0.9644,top10E=0.05,eRank=607.0,q75/q25=3.17 vo_prod:H=0.6918,top10E=0.09,eRank=226.7,q75/q25=inf train_time:918980ms step_avg:91.90ms +[2025-08-22 18:06:41] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 18:06:41 2025 --- +[2025-08-22 18:06:41] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 18:06:41 2025 --- +[2025-08-22 18:06:41] [Rank 0] PRINT: Peak memory allocated: 11449 MiB reserved: 16336 MiB +[2025-08-22 18:06:41] [Rank 0] PRINT: Peak memory allocated: 11449 MiB reserved: 16336 MiB diff --git a/logs_svd_gated/mode_8_param_gated_seed_43/config.json b/logs_svd_gated/mode_8_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..0fa6ce32658830559011ca4babf9641615b6ca36 --- /dev/null +++ b/logs_svd_gated/mode_8_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 8, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "6f7d2017-3a40-4e2f-a90b-4f4d659f1e3e", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_8_param_gated_seed_43/training_log_6f7d2017-3a40-4e2f-a90b-4f4d659f1e3e.txt b/logs_svd_gated/mode_8_param_gated_seed_43/training_log_6f7d2017-3a40-4e2f-a90b-4f4d659f1e3e.txt new file mode 100644 index 0000000000000000000000000000000000000000..cd8b83b9433f971067d41e2f0e4af57be38a7fb2 --- /dev/null +++ b/logs_svd_gated/mode_8_param_gated_seed_43/training_log_6f7d2017-3a40-4e2f-a90b-4f4d659f1e3e.txt @@ -0,0 +1,2926 @@ +[2025-08-22 22:52:31] [Rank 0] PRINT: --- Script Start: Fri Aug 22 22:52:31 2025 --- +[2025-08-22 22:52:31] [Rank 0] PRINT: --- Script Start: Fri Aug 22 22:52:31 2025 --- +[2025-08-22 22:52:31] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=8, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 22:52:31] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=8, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 22:52:31] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 22:52:31] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 22:52:31] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 22:52:31] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 22:52:31] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_8_param_gated_seed_43 +[2025-08-22 22:52:31] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_8_param_gated_seed_43 +[2025-08-22 22:52:31] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 22:52:31] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 22:52:31] [Rank 0] PRINT: Constructing model... +[2025-08-22 22:52:31] [Rank 0] PRINT: Constructing model... +[2025-08-22 22:52:33] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 22:52:33] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 22:52:33] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 22:52:33] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 22:52:33] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 22:52:33] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 22:52:33] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-08-22 22:52:33] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 8 +[2025-08-22 22:52:33] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 22:52:33] [Rank 0] PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: 0.05). +[2025-08-22 22:52:33] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 22:52:33] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 22:52:33] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-08-22 22:52:33] [Rank 0] PRINT: Muon optimizer is active with 34 parameters. +[2025-08-22 22:52:33] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 22:52:33] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 22:52:33] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 22:52:33] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 22:52:33] [Rank 0] PRINT: Starting warmup... +[2025-08-22 22:52:33] [Rank 0] PRINT: Starting warmup... +[2025-08-22 22:53:18] [Rank 0] PRINT: Warmup complete. +[2025-08-22 22:53:18] [Rank 0] PRINT: Warmup complete. +[2025-08-22 22:53:18] [Rank 0] PRINT: Starting training... +[2025-08-22 22:53:18] [Rank 0] PRINT: Starting training... +[2025-08-22 22:53:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:53:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:53:36] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 22:53:36] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 22:53:38] [Rank 0] step:21/10000 train_time:1757ms step_avg:83.65ms +[2025-08-22 22:53:38] [Rank 0] step:21/10000 train_time:1757ms step_avg:83.65ms +[2025-08-22 22:53:40] [Rank 0] step:41/10000 train_time:3473ms step_avg:84.71ms +[2025-08-22 22:53:40] [Rank 0] step:41/10000 train_time:3473ms step_avg:84.71ms +[2025-08-22 22:53:42] [Rank 0] step:61/10000 train_time:5189ms step_avg:85.07ms +[2025-08-22 22:53:42] [Rank 0] step:61/10000 train_time:5189ms step_avg:85.07ms +[2025-08-22 22:53:43] [Rank 0] step:81/10000 train_time:6906ms step_avg:85.26ms +[2025-08-22 22:53:43] [Rank 0] step:81/10000 train_time:6906ms step_avg:85.26ms +[2025-08-22 22:53:45] [Rank 0] step:101/10000 train_time:8624ms step_avg:85.39ms +[2025-08-22 22:53:45] [Rank 0] step:101/10000 train_time:8624ms step_avg:85.39ms +[2025-08-22 22:53:47] [Rank 0] step:121/10000 train_time:10343ms step_avg:85.48ms +[2025-08-22 22:53:47] [Rank 0] step:121/10000 train_time:10343ms step_avg:85.48ms +[2025-08-22 22:53:48] [Rank 0] step:141/10000 train_time:12065ms step_avg:85.57ms +[2025-08-22 22:53:48] [Rank 0] step:141/10000 train_time:12065ms step_avg:85.57ms +[2025-08-22 22:53:50] [Rank 0] step:161/10000 train_time:13786ms step_avg:85.63ms +[2025-08-22 22:53:50] [Rank 0] step:161/10000 train_time:13786ms step_avg:85.63ms +[2025-08-22 22:53:52] [Rank 0] step:181/10000 train_time:15508ms step_avg:85.68ms +[2025-08-22 22:53:52] [Rank 0] step:181/10000 train_time:15508ms step_avg:85.68ms +[2025-08-22 22:53:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:53:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:54:07] [Rank 0] PRINT: step:200/10000 val_loss:5.8025 svd_entropy: attn_qk:H=0.7128,top10E=0.38,eRank=154.0,q75/q25=18.55 attn_vo:H=0.7092,top10E=0.19,eRank=185.6,q75/q25=inf mlp_w1:H=0.6456,top10E=0.50,eRank=86.5,q75/q25=8.04 mlp_w2:H=0.8795,top10E=0.11,eRank=355.6,q75/q25=12.18 vo_prod:H=0.4777,top10E=0.42,eRank=50.8,q75/q25=inf train_time:17240ms step_avg:86.20ms +[2025-08-22 22:54:07] [Rank 0] PRINT: step:200/10000 val_loss:5.8025 svd_entropy: attn_qk:H=0.7128,top10E=0.38,eRank=154.0,q75/q25=18.55 attn_vo:H=0.7092,top10E=0.19,eRank=185.6,q75/q25=inf mlp_w1:H=0.6456,top10E=0.50,eRank=86.5,q75/q25=8.04 mlp_w2:H=0.8795,top10E=0.11,eRank=355.6,q75/q25=12.18 vo_prod:H=0.4777,top10E=0.42,eRank=50.8,q75/q25=inf train_time:17240ms step_avg:86.20ms +[2025-08-22 22:54:07] [Rank 0] step:201/10000 train_time:17260ms step_avg:85.87ms +[2025-08-22 22:54:07] [Rank 0] step:201/10000 train_time:17260ms step_avg:85.87ms +[2025-08-22 22:54:09] [Rank 0] step:221/10000 train_time:18972ms step_avg:85.85ms +[2025-08-22 22:54:09] [Rank 0] step:221/10000 train_time:18972ms step_avg:85.85ms +[2025-08-22 22:54:10] [Rank 0] step:241/10000 train_time:20687ms step_avg:85.84ms +[2025-08-22 22:54:10] [Rank 0] step:241/10000 train_time:20687ms step_avg:85.84ms +[2025-08-22 22:54:12] [Rank 0] step:261/10000 train_time:22399ms step_avg:85.82ms +[2025-08-22 22:54:12] [Rank 0] step:261/10000 train_time:22399ms step_avg:85.82ms +[2025-08-22 22:54:14] [Rank 0] step:281/10000 train_time:24112ms step_avg:85.81ms +[2025-08-22 22:54:14] [Rank 0] step:281/10000 train_time:24112ms step_avg:85.81ms +[2025-08-22 22:54:16] [Rank 0] step:301/10000 train_time:25825ms step_avg:85.80ms +[2025-08-22 22:54:16] [Rank 0] step:301/10000 train_time:25825ms step_avg:85.80ms +[2025-08-22 22:54:17] [Rank 0] step:321/10000 train_time:27539ms step_avg:85.79ms +[2025-08-22 22:54:17] [Rank 0] step:321/10000 train_time:27539ms step_avg:85.79ms +[2025-08-22 22:54:19] [Rank 0] step:341/10000 train_time:29252ms step_avg:85.78ms +[2025-08-22 22:54:19] [Rank 0] step:341/10000 train_time:29252ms step_avg:85.78ms +[2025-08-22 22:54:21] [Rank 0] step:361/10000 train_time:30966ms step_avg:85.78ms +[2025-08-22 22:54:21] [Rank 0] step:361/10000 train_time:30966ms step_avg:85.78ms +[2025-08-22 22:54:23] [Rank 0] step:381/10000 train_time:32743ms step_avg:85.94ms +[2025-08-22 22:54:23] [Rank 0] step:381/10000 train_time:32743ms step_avg:85.94ms +[2025-08-22 22:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:54:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:54:38] [Rank 0] PRINT: step:400/10000 val_loss:5.3119 svd_entropy: attn_qk:H=0.7241,top10E=0.34,eRank=159.8,q75/q25=28.14 attn_vo:H=0.7440,top10E=0.13,eRank=225.5,q75/q25=inf mlp_w1:H=0.7983,top10E=0.28,eRank=204.9,q75/q25=6.67 mlp_w2:H=0.9574,top10E=0.05,eRank=581.0,q75/q25=3.62 vo_prod:H=0.5470,top10E=0.27,eRank=79.0,q75/q25=inf train_time:34517ms step_avg:86.29ms +[2025-08-22 22:54:38] [Rank 0] PRINT: step:400/10000 val_loss:5.3119 svd_entropy: attn_qk:H=0.7241,top10E=0.34,eRank=159.8,q75/q25=28.14 attn_vo:H=0.7440,top10E=0.13,eRank=225.5,q75/q25=inf mlp_w1:H=0.7983,top10E=0.28,eRank=204.9,q75/q25=6.67 mlp_w2:H=0.9574,top10E=0.05,eRank=581.0,q75/q25=3.62 vo_prod:H=0.5470,top10E=0.27,eRank=79.0,q75/q25=inf train_time:34517ms step_avg:86.29ms +[2025-08-22 22:54:38] [Rank 0] step:401/10000 train_time:34536ms step_avg:86.13ms +[2025-08-22 22:54:38] [Rank 0] step:401/10000 train_time:34536ms step_avg:86.13ms +[2025-08-22 22:54:40] [Rank 0] step:421/10000 train_time:36245ms step_avg:86.09ms +[2025-08-22 22:54:40] [Rank 0] step:421/10000 train_time:36245ms step_avg:86.09ms +[2025-08-22 22:54:41] [Rank 0] step:441/10000 train_time:37955ms step_avg:86.07ms +[2025-08-22 22:54:41] [Rank 0] step:441/10000 train_time:37955ms step_avg:86.07ms +[2025-08-22 22:54:43] [Rank 0] step:461/10000 train_time:39665ms step_avg:86.04ms +[2025-08-22 22:54:43] [Rank 0] step:461/10000 train_time:39665ms step_avg:86.04ms +[2025-08-22 22:54:45] [Rank 0] step:481/10000 train_time:41376ms step_avg:86.02ms +[2025-08-22 22:54:45] [Rank 0] step:481/10000 train_time:41376ms step_avg:86.02ms +[2025-08-22 22:54:46] [Rank 0] step:501/10000 train_time:43087ms step_avg:86.00ms +[2025-08-22 22:54:46] [Rank 0] step:501/10000 train_time:43087ms step_avg:86.00ms +[2025-08-22 22:54:48] [Rank 0] step:521/10000 train_time:44800ms step_avg:85.99ms +[2025-08-22 22:54:48] [Rank 0] step:521/10000 train_time:44800ms step_avg:85.99ms +[2025-08-22 22:54:50] [Rank 0] step:541/10000 train_time:46515ms step_avg:85.98ms +[2025-08-22 22:54:50] [Rank 0] step:541/10000 train_time:46515ms step_avg:85.98ms +[2025-08-22 22:54:52] [Rank 0] step:561/10000 train_time:48230ms step_avg:85.97ms +[2025-08-22 22:54:52] [Rank 0] step:561/10000 train_time:48230ms step_avg:85.97ms +[2025-08-22 22:54:53] [Rank 0] step:581/10000 train_time:49944ms step_avg:85.96ms +[2025-08-22 22:54:53] [Rank 0] step:581/10000 train_time:49944ms step_avg:85.96ms +[2025-08-22 22:54:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:54:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:55:09] [Rank 0] PRINT: step:600/10000 val_loss:5.0669 svd_entropy: attn_qk:H=0.7354,top10E=0.31,eRank=167.0,q75/q25=34.35 attn_vo:H=0.7768,top10E=0.10,eRank=274.9,q75/q25=inf mlp_w1:H=0.8408,top10E=0.22,eRank=269.3,q75/q25=5.55 mlp_w2:H=0.9668,top10E=0.05,eRank=616.7,q75/q25=3.05 vo_prod:H=0.5932,top10E=0.20,eRank=107.9,q75/q25=inf train_time:51671ms step_avg:86.12ms +[2025-08-22 22:55:09] [Rank 0] PRINT: step:600/10000 val_loss:5.0669 svd_entropy: attn_qk:H=0.7354,top10E=0.31,eRank=167.0,q75/q25=34.35 attn_vo:H=0.7768,top10E=0.10,eRank=274.9,q75/q25=inf mlp_w1:H=0.8408,top10E=0.22,eRank=269.3,q75/q25=5.55 mlp_w2:H=0.9668,top10E=0.05,eRank=616.7,q75/q25=3.05 vo_prod:H=0.5932,top10E=0.20,eRank=107.9,q75/q25=inf train_time:51671ms step_avg:86.12ms +[2025-08-22 22:55:09] [Rank 0] step:601/10000 train_time:51689ms step_avg:86.01ms +[2025-08-22 22:55:09] [Rank 0] step:601/10000 train_time:51689ms step_avg:86.01ms +[2025-08-22 22:55:10] [Rank 0] step:621/10000 train_time:53388ms step_avg:85.97ms +[2025-08-22 22:55:10] [Rank 0] step:621/10000 train_time:53388ms step_avg:85.97ms +[2025-08-22 22:55:12] [Rank 0] step:641/10000 train_time:55100ms step_avg:85.96ms +[2025-08-22 22:55:12] [Rank 0] step:641/10000 train_time:55100ms step_avg:85.96ms +[2025-08-22 22:55:14] [Rank 0] step:661/10000 train_time:56814ms step_avg:85.95ms +[2025-08-22 22:55:14] [Rank 0] step:661/10000 train_time:56814ms step_avg:85.95ms +[2025-08-22 22:55:16] [Rank 0] step:681/10000 train_time:58531ms step_avg:85.95ms +[2025-08-22 22:55:16] [Rank 0] step:681/10000 train_time:58531ms step_avg:85.95ms +[2025-08-22 22:55:17] [Rank 0] step:701/10000 train_time:60246ms step_avg:85.94ms +[2025-08-22 22:55:17] [Rank 0] step:701/10000 train_time:60246ms step_avg:85.94ms +[2025-08-22 22:55:19] [Rank 0] step:721/10000 train_time:61962ms step_avg:85.94ms +[2025-08-22 22:55:19] [Rank 0] step:721/10000 train_time:61962ms step_avg:85.94ms +[2025-08-22 22:55:21] [Rank 0] step:741/10000 train_time:63678ms step_avg:85.94ms +[2025-08-22 22:55:21] [Rank 0] step:741/10000 train_time:63678ms step_avg:85.94ms +[2025-08-22 22:55:22] [Rank 0] step:761/10000 train_time:65409ms step_avg:85.95ms +[2025-08-22 22:55:22] [Rank 0] step:761/10000 train_time:65409ms step_avg:85.95ms +[2025-08-22 22:55:24] [Rank 0] step:781/10000 train_time:67140ms step_avg:85.97ms +[2025-08-22 22:55:24] [Rank 0] step:781/10000 train_time:67140ms step_avg:85.97ms +[2025-08-22 22:55:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:55:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:55:39] [Rank 0] PRINT: step:800/10000 val_loss:4.8446 svd_entropy: attn_qk:H=0.7441,top10E=0.30,eRank=172.9,q75/q25=38.28 attn_vo:H=0.7934,top10E=0.08,eRank=305.9,q75/q25=inf mlp_w1:H=0.8582,top10E=0.20,eRank=301.8,q75/q25=5.21 mlp_w2:H=0.9683,top10E=0.04,eRank=622.6,q75/q25=2.97 vo_prod:H=0.6191,top10E=0.17,eRank=130.4,q75/q25=inf train_time:68958ms step_avg:86.20ms +[2025-08-22 22:55:39] [Rank 0] PRINT: step:800/10000 val_loss:4.8446 svd_entropy: attn_qk:H=0.7441,top10E=0.30,eRank=172.9,q75/q25=38.28 attn_vo:H=0.7934,top10E=0.08,eRank=305.9,q75/q25=inf mlp_w1:H=0.8582,top10E=0.20,eRank=301.8,q75/q25=5.21 mlp_w2:H=0.9683,top10E=0.04,eRank=622.6,q75/q25=2.97 vo_prod:H=0.6191,top10E=0.17,eRank=130.4,q75/q25=inf train_time:68958ms step_avg:86.20ms +[2025-08-22 22:55:39] [Rank 0] step:801/10000 train_time:68977ms step_avg:86.11ms +[2025-08-22 22:55:39] [Rank 0] step:801/10000 train_time:68977ms step_avg:86.11ms +[2025-08-22 22:55:41] [Rank 0] step:821/10000 train_time:70700ms step_avg:86.11ms +[2025-08-22 22:55:41] [Rank 0] step:821/10000 train_time:70700ms step_avg:86.11ms +[2025-08-22 22:55:43] [Rank 0] step:841/10000 train_time:72423ms step_avg:86.12ms +[2025-08-22 22:55:43] [Rank 0] step:841/10000 train_time:72423ms step_avg:86.12ms +[2025-08-22 22:55:45] [Rank 0] step:861/10000 train_time:74149ms step_avg:86.12ms +[2025-08-22 22:55:45] [Rank 0] step:861/10000 train_time:74149ms step_avg:86.12ms +[2025-08-22 22:55:46] [Rank 0] step:881/10000 train_time:75873ms step_avg:86.12ms +[2025-08-22 22:55:46] [Rank 0] step:881/10000 train_time:75873ms step_avg:86.12ms +[2025-08-22 22:55:48] [Rank 0] step:901/10000 train_time:77598ms step_avg:86.12ms +[2025-08-22 22:55:48] [Rank 0] step:901/10000 train_time:77598ms step_avg:86.12ms +[2025-08-22 22:55:50] [Rank 0] step:921/10000 train_time:79323ms step_avg:86.13ms +[2025-08-22 22:55:50] [Rank 0] step:921/10000 train_time:79323ms step_avg:86.13ms +[2025-08-22 22:55:52] [Rank 0] step:941/10000 train_time:81048ms step_avg:86.13ms +[2025-08-22 22:55:52] [Rank 0] step:941/10000 train_time:81048ms step_avg:86.13ms +[2025-08-22 22:55:53] [Rank 0] step:961/10000 train_time:82774ms step_avg:86.13ms +[2025-08-22 22:55:53] [Rank 0] step:961/10000 train_time:82774ms step_avg:86.13ms +[2025-08-22 22:55:55] [Rank 0] step:981/10000 train_time:84500ms step_avg:86.14ms +[2025-08-22 22:55:55] [Rank 0] step:981/10000 train_time:84500ms step_avg:86.14ms +[2025-08-22 22:55:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:55:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:56:11] [Rank 0] PRINT: step:1000/10000 val_loss:4.6969 svd_entropy: attn_qk:H=0.7517,top10E=0.28,eRank=178.6,q75/q25=40.88 attn_vo:H=0.8034,top10E=0.08,eRank=326.8,q75/q25=inf mlp_w1:H=0.8692,top10E=0.18,eRank=324.4,q75/q25=5.04 mlp_w2:H=0.9690,top10E=0.04,eRank=625.3,q75/q25=2.93 vo_prod:H=0.6360,top10E=0.15,eRank=147.7,q75/q25=inf train_time:86240ms step_avg:86.24ms +[2025-08-22 22:56:11] [Rank 0] PRINT: step:1000/10000 val_loss:4.6969 svd_entropy: attn_qk:H=0.7517,top10E=0.28,eRank=178.6,q75/q25=40.88 attn_vo:H=0.8034,top10E=0.08,eRank=326.8,q75/q25=inf mlp_w1:H=0.8692,top10E=0.18,eRank=324.4,q75/q25=5.04 mlp_w2:H=0.9690,top10E=0.04,eRank=625.3,q75/q25=2.93 vo_prod:H=0.6360,top10E=0.15,eRank=147.7,q75/q25=inf train_time:86240ms step_avg:86.24ms +[2025-08-22 22:56:11] [Rank 0] step:1001/10000 train_time:86258ms step_avg:86.17ms +[2025-08-22 22:56:11] [Rank 0] step:1001/10000 train_time:86258ms step_avg:86.17ms +[2025-08-22 22:56:12] [Rank 0] step:1021/10000 train_time:87967ms step_avg:86.16ms +[2025-08-22 22:56:12] [Rank 0] step:1021/10000 train_time:87967ms step_avg:86.16ms +[2025-08-22 22:56:14] [Rank 0] step:1041/10000 train_time:89689ms step_avg:86.16ms +[2025-08-22 22:56:14] [Rank 0] step:1041/10000 train_time:89689ms step_avg:86.16ms +[2025-08-22 22:56:16] [Rank 0] step:1061/10000 train_time:91411ms step_avg:86.16ms +[2025-08-22 22:56:16] [Rank 0] step:1061/10000 train_time:91411ms step_avg:86.16ms +[2025-08-22 22:56:18] [Rank 0] step:1081/10000 train_time:93136ms step_avg:86.16ms +[2025-08-22 22:56:18] [Rank 0] step:1081/10000 train_time:93136ms step_avg:86.16ms +[2025-08-22 22:56:19] [Rank 0] step:1101/10000 train_time:94859ms step_avg:86.16ms +[2025-08-22 22:56:19] [Rank 0] step:1101/10000 train_time:94859ms step_avg:86.16ms +[2025-08-22 22:56:21] [Rank 0] step:1121/10000 train_time:96585ms step_avg:86.16ms +[2025-08-22 22:56:21] [Rank 0] step:1121/10000 train_time:96585ms step_avg:86.16ms +[2025-08-22 22:56:23] [Rank 0] step:1141/10000 train_time:98310ms step_avg:86.16ms +[2025-08-22 22:56:23] [Rank 0] step:1141/10000 train_time:98310ms step_avg:86.16ms +[2025-08-22 22:56:24] [Rank 0] step:1161/10000 train_time:100038ms step_avg:86.17ms +[2025-08-22 22:56:24] [Rank 0] step:1161/10000 train_time:100038ms step_avg:86.17ms +[2025-08-22 22:56:26] [Rank 0] step:1181/10000 train_time:101767ms step_avg:86.17ms +[2025-08-22 22:56:26] [Rank 0] step:1181/10000 train_time:101767ms step_avg:86.17ms +[2025-08-22 22:56:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:56:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:56:41] [Rank 0] PRINT: step:1200/10000 val_loss:4.5313 svd_entropy: attn_qk:H=0.7570,top10E=0.28,eRank=182.8,q75/q25=42.77 attn_vo:H=0.8104,top10E=0.07,eRank=342.3,q75/q25=inf mlp_w1:H=0.8771,top10E=0.17,eRank=341.9,q75/q25=4.94 mlp_w2:H=0.9693,top10E=0.05,eRank=626.6,q75/q25=2.90 vo_prod:H=0.6480,top10E=0.14,eRank=161.6,q75/q25=inf train_time:103509ms step_avg:86.26ms +[2025-08-22 22:56:41] [Rank 0] PRINT: step:1200/10000 val_loss:4.5313 svd_entropy: attn_qk:H=0.7570,top10E=0.28,eRank=182.8,q75/q25=42.77 attn_vo:H=0.8104,top10E=0.07,eRank=342.3,q75/q25=inf mlp_w1:H=0.8771,top10E=0.17,eRank=341.9,q75/q25=4.94 mlp_w2:H=0.9693,top10E=0.05,eRank=626.6,q75/q25=2.90 vo_prod:H=0.6480,top10E=0.14,eRank=161.6,q75/q25=inf train_time:103509ms step_avg:86.26ms +[2025-08-22 22:56:42] [Rank 0] step:1201/10000 train_time:103527ms step_avg:86.20ms +[2025-08-22 22:56:42] [Rank 0] step:1201/10000 train_time:103527ms step_avg:86.20ms +[2025-08-22 22:56:43] [Rank 0] step:1221/10000 train_time:105256ms step_avg:86.20ms +[2025-08-22 22:56:43] [Rank 0] step:1221/10000 train_time:105256ms step_avg:86.20ms +[2025-08-22 22:56:45] [Rank 0] step:1241/10000 train_time:106981ms step_avg:86.21ms +[2025-08-22 22:56:45] [Rank 0] step:1241/10000 train_time:106981ms step_avg:86.21ms +[2025-08-22 22:56:47] [Rank 0] step:1261/10000 train_time:108706ms step_avg:86.21ms +[2025-08-22 22:56:47] [Rank 0] step:1261/10000 train_time:108706ms step_avg:86.21ms +[2025-08-22 22:56:48] [Rank 0] step:1281/10000 train_time:110433ms step_avg:86.21ms +[2025-08-22 22:56:48] [Rank 0] step:1281/10000 train_time:110433ms step_avg:86.21ms +[2025-08-22 22:56:50] [Rank 0] step:1301/10000 train_time:112160ms step_avg:86.21ms +[2025-08-22 22:56:50] [Rank 0] step:1301/10000 train_time:112160ms step_avg:86.21ms +[2025-08-22 22:56:52] [Rank 0] step:1321/10000 train_time:113888ms step_avg:86.21ms +[2025-08-22 22:56:52] [Rank 0] step:1321/10000 train_time:113888ms step_avg:86.21ms +[2025-08-22 22:56:54] [Rank 0] step:1341/10000 train_time:115618ms step_avg:86.22ms +[2025-08-22 22:56:54] [Rank 0] step:1341/10000 train_time:115618ms step_avg:86.22ms +[2025-08-22 22:56:55] [Rank 0] step:1361/10000 train_time:117348ms step_avg:86.22ms +[2025-08-22 22:56:55] [Rank 0] step:1361/10000 train_time:117348ms step_avg:86.22ms +[2025-08-22 22:56:57] [Rank 0] step:1381/10000 train_time:119078ms step_avg:86.23ms +[2025-08-22 22:56:57] [Rank 0] step:1381/10000 train_time:119078ms step_avg:86.23ms +[2025-08-22 22:56:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:56:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:57:12] [Rank 0] PRINT: step:1400/10000 val_loss:4.4417 svd_entropy: attn_qk:H=0.7611,top10E=0.27,eRank=186.2,q75/q25=44.35 attn_vo:H=0.8145,top10E=0.07,eRank=352.2,q75/q25=inf mlp_w1:H=0.8831,top10E=0.16,eRank=355.7,q75/q25=4.87 mlp_w2:H=0.9696,top10E=0.05,eRank=627.9,q75/q25=2.87 vo_prod:H=0.6549,top10E=0.13,eRank=170.2,q75/q25=inf train_time:120821ms step_avg:86.30ms +[2025-08-22 22:57:12] [Rank 0] PRINT: step:1400/10000 val_loss:4.4417 svd_entropy: attn_qk:H=0.7611,top10E=0.27,eRank=186.2,q75/q25=44.35 attn_vo:H=0.8145,top10E=0.07,eRank=352.2,q75/q25=inf mlp_w1:H=0.8831,top10E=0.16,eRank=355.7,q75/q25=4.87 mlp_w2:H=0.9696,top10E=0.05,eRank=627.9,q75/q25=2.87 vo_prod:H=0.6549,top10E=0.13,eRank=170.2,q75/q25=inf train_time:120821ms step_avg:86.30ms +[2025-08-22 22:57:13] [Rank 0] step:1401/10000 train_time:120839ms step_avg:86.25ms +[2025-08-22 22:57:13] [Rank 0] step:1401/10000 train_time:120839ms step_avg:86.25ms +[2025-08-22 22:57:14] [Rank 0] step:1421/10000 train_time:122556ms step_avg:86.25ms +[2025-08-22 22:57:14] [Rank 0] step:1421/10000 train_time:122556ms step_avg:86.25ms +[2025-08-22 22:57:16] [Rank 0] step:1441/10000 train_time:124284ms step_avg:86.25ms +[2025-08-22 22:57:16] [Rank 0] step:1441/10000 train_time:124284ms step_avg:86.25ms +[2025-08-22 22:57:18] [Rank 0] step:1461/10000 train_time:126012ms step_avg:86.25ms +[2025-08-22 22:57:18] [Rank 0] step:1461/10000 train_time:126012ms step_avg:86.25ms +[2025-08-22 22:57:19] [Rank 0] step:1481/10000 train_time:127742ms step_avg:86.25ms +[2025-08-22 22:57:19] [Rank 0] step:1481/10000 train_time:127742ms step_avg:86.25ms +[2025-08-22 22:57:21] [Rank 0] step:1501/10000 train_time:129483ms step_avg:86.26ms +[2025-08-22 22:57:21] [Rank 0] step:1501/10000 train_time:129483ms step_avg:86.26ms +[2025-08-22 22:57:23] [Rank 0] step:1521/10000 train_time:131225ms step_avg:86.28ms +[2025-08-22 22:57:23] [Rank 0] step:1521/10000 train_time:131225ms step_avg:86.28ms +[2025-08-22 22:57:25] [Rank 0] step:1541/10000 train_time:132969ms step_avg:86.29ms +[2025-08-22 22:57:25] [Rank 0] step:1541/10000 train_time:132969ms step_avg:86.29ms +[2025-08-22 22:57:26] [Rank 0] step:1561/10000 train_time:134713ms step_avg:86.30ms +[2025-08-22 22:57:26] [Rank 0] step:1561/10000 train_time:134713ms step_avg:86.30ms +[2025-08-22 22:57:28] [Rank 0] step:1581/10000 train_time:136458ms step_avg:86.31ms +[2025-08-22 22:57:28] [Rank 0] step:1581/10000 train_time:136458ms step_avg:86.31ms +[2025-08-22 22:57:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:57:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:57:43] [Rank 0] PRINT: step:1600/10000 val_loss:4.3378 svd_entropy: attn_qk:H=0.7644,top10E=0.27,eRank=189.1,q75/q25=45.36 attn_vo:H=0.8174,top10E=0.07,eRank=359.2,q75/q25=inf mlp_w1:H=0.8879,top10E=0.16,eRank=367.1,q75/q25=4.82 mlp_w2:H=0.9699,top10E=0.05,eRank=628.8,q75/q25=2.85 vo_prod:H=0.6595,top10E=0.13,eRank=176.3,q75/q25=inf train_time:138216ms step_avg:86.39ms +[2025-08-22 22:57:43] [Rank 0] PRINT: step:1600/10000 val_loss:4.3378 svd_entropy: attn_qk:H=0.7644,top10E=0.27,eRank=189.1,q75/q25=45.36 attn_vo:H=0.8174,top10E=0.07,eRank=359.2,q75/q25=inf mlp_w1:H=0.8879,top10E=0.16,eRank=367.1,q75/q25=4.82 mlp_w2:H=0.9699,top10E=0.05,eRank=628.8,q75/q25=2.85 vo_prod:H=0.6595,top10E=0.13,eRank=176.3,q75/q25=inf train_time:138216ms step_avg:86.39ms +[2025-08-22 22:57:44] [Rank 0] step:1601/10000 train_time:138234ms step_avg:86.34ms +[2025-08-22 22:57:44] [Rank 0] step:1601/10000 train_time:138234ms step_avg:86.34ms +[2025-08-22 22:57:45] [Rank 0] step:1621/10000 train_time:139964ms step_avg:86.34ms +[2025-08-22 22:57:45] [Rank 0] step:1621/10000 train_time:139964ms step_avg:86.34ms +[2025-08-22 22:57:47] [Rank 0] step:1641/10000 train_time:141700ms step_avg:86.35ms +[2025-08-22 22:57:47] [Rank 0] step:1641/10000 train_time:141700ms step_avg:86.35ms +[2025-08-22 22:57:49] [Rank 0] step:1661/10000 train_time:143441ms step_avg:86.36ms +[2025-08-22 22:57:49] [Rank 0] step:1661/10000 train_time:143441ms step_avg:86.36ms +[2025-08-22 22:57:51] [Rank 0] step:1681/10000 train_time:145178ms step_avg:86.36ms +[2025-08-22 22:57:51] [Rank 0] step:1681/10000 train_time:145178ms step_avg:86.36ms +[2025-08-22 22:57:52] [Rank 0] step:1701/10000 train_time:146916ms step_avg:86.37ms +[2025-08-22 22:57:52] [Rank 0] step:1701/10000 train_time:146916ms step_avg:86.37ms +[2025-08-22 22:57:54] [Rank 0] step:1721/10000 train_time:148657ms step_avg:86.38ms +[2025-08-22 22:57:54] [Rank 0] step:1721/10000 train_time:148657ms step_avg:86.38ms +[2025-08-22 22:57:56] [Rank 0] step:1741/10000 train_time:150397ms step_avg:86.39ms +[2025-08-22 22:57:56] [Rank 0] step:1741/10000 train_time:150397ms step_avg:86.39ms +[2025-08-22 22:57:58] [Rank 0] step:1761/10000 train_time:152136ms step_avg:86.39ms +[2025-08-22 22:57:58] [Rank 0] step:1761/10000 train_time:152136ms step_avg:86.39ms +[2025-08-22 22:57:59] [Rank 0] step:1781/10000 train_time:153876ms step_avg:86.40ms +[2025-08-22 22:57:59] [Rank 0] step:1781/10000 train_time:153876ms step_avg:86.40ms +[2025-08-22 22:58:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:58:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:58:15] [Rank 0] PRINT: step:1800/10000 val_loss:4.2650 svd_entropy: attn_qk:H=0.7674,top10E=0.26,eRank=191.7,q75/q25=45.88 attn_vo:H=0.8196,top10E=0.07,eRank=364.6,q75/q25=inf mlp_w1:H=0.8918,top10E=0.15,eRank=376.7,q75/q25=4.77 mlp_w2:H=0.9700,top10E=0.05,eRank=629.4,q75/q25=2.84 vo_prod:H=0.6633,top10E=0.12,eRank=181.3,q75/q25=inf train_time:155628ms step_avg:86.46ms +[2025-08-22 22:58:15] [Rank 0] PRINT: step:1800/10000 val_loss:4.2650 svd_entropy: attn_qk:H=0.7674,top10E=0.26,eRank=191.7,q75/q25=45.88 attn_vo:H=0.8196,top10E=0.07,eRank=364.6,q75/q25=inf mlp_w1:H=0.8918,top10E=0.15,eRank=376.7,q75/q25=4.77 mlp_w2:H=0.9700,top10E=0.05,eRank=629.4,q75/q25=2.84 vo_prod:H=0.6633,top10E=0.12,eRank=181.3,q75/q25=inf train_time:155628ms step_avg:86.46ms +[2025-08-22 22:58:15] [Rank 0] step:1801/10000 train_time:155647ms step_avg:86.42ms +[2025-08-22 22:58:15] [Rank 0] step:1801/10000 train_time:155647ms step_avg:86.42ms +[2025-08-22 22:58:16] [Rank 0] step:1821/10000 train_time:157375ms step_avg:86.42ms +[2025-08-22 22:58:16] [Rank 0] step:1821/10000 train_time:157375ms step_avg:86.42ms +[2025-08-22 22:58:18] [Rank 0] step:1841/10000 train_time:159109ms step_avg:86.43ms +[2025-08-22 22:58:18] [Rank 0] step:1841/10000 train_time:159109ms step_avg:86.43ms +[2025-08-22 22:58:20] [Rank 0] step:1861/10000 train_time:160843ms step_avg:86.43ms +[2025-08-22 22:58:20] [Rank 0] step:1861/10000 train_time:160843ms step_avg:86.43ms +[2025-08-22 22:58:22] [Rank 0] step:1881/10000 train_time:162578ms step_avg:86.43ms +[2025-08-22 22:58:22] [Rank 0] step:1881/10000 train_time:162578ms step_avg:86.43ms +[2025-08-22 22:58:23] [Rank 0] step:1901/10000 train_time:164318ms step_avg:86.44ms +[2025-08-22 22:58:23] [Rank 0] step:1901/10000 train_time:164318ms step_avg:86.44ms +[2025-08-22 22:58:25] [Rank 0] step:1921/10000 train_time:166057ms step_avg:86.44ms +[2025-08-22 22:58:25] [Rank 0] step:1921/10000 train_time:166057ms step_avg:86.44ms +[2025-08-22 22:58:27] [Rank 0] step:1941/10000 train_time:167796ms step_avg:86.45ms +[2025-08-22 22:58:27] [Rank 0] step:1941/10000 train_time:167796ms step_avg:86.45ms +[2025-08-22 22:58:29] [Rank 0] step:1961/10000 train_time:169535ms step_avg:86.45ms +[2025-08-22 22:58:29] [Rank 0] step:1961/10000 train_time:169535ms step_avg:86.45ms +[2025-08-22 22:58:30] [Rank 0] step:1981/10000 train_time:171277ms step_avg:86.46ms +[2025-08-22 22:58:30] [Rank 0] step:1981/10000 train_time:171277ms step_avg:86.46ms +[2025-08-22 22:58:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:58:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:58:46] [Rank 0] PRINT: step:2000/10000 val_loss:4.2296 svd_entropy: attn_qk:H=0.7700,top10E=0.26,eRank=194.0,q75/q25=46.67 attn_vo:H=0.8213,top10E=0.06,eRank=368.9,q75/q25=inf mlp_w1:H=0.8950,top10E=0.15,eRank=385.0,q75/q25=4.72 mlp_w2:H=0.9701,top10E=0.05,eRank=629.8,q75/q25=2.82 vo_prod:H=0.6662,top10E=0.12,eRank=185.4,q75/q25=inf train_time:173030ms step_avg:86.52ms +[2025-08-22 22:58:46] [Rank 0] PRINT: step:2000/10000 val_loss:4.2296 svd_entropy: attn_qk:H=0.7700,top10E=0.26,eRank=194.0,q75/q25=46.67 attn_vo:H=0.8213,top10E=0.06,eRank=368.9,q75/q25=inf mlp_w1:H=0.8950,top10E=0.15,eRank=385.0,q75/q25=4.72 mlp_w2:H=0.9701,top10E=0.05,eRank=629.8,q75/q25=2.82 vo_prod:H=0.6662,top10E=0.12,eRank=185.4,q75/q25=inf train_time:173030ms step_avg:86.52ms +[2025-08-22 22:58:46] [Rank 0] step:2001/10000 train_time:173048ms step_avg:86.48ms +[2025-08-22 22:58:46] [Rank 0] step:2001/10000 train_time:173048ms step_avg:86.48ms +[2025-08-22 22:58:48] [Rank 0] step:2021/10000 train_time:174775ms step_avg:86.48ms +[2025-08-22 22:58:48] [Rank 0] step:2021/10000 train_time:174775ms step_avg:86.48ms +[2025-08-22 22:58:49] [Rank 0] step:2041/10000 train_time:176549ms step_avg:86.50ms +[2025-08-22 22:58:49] [Rank 0] step:2041/10000 train_time:176549ms step_avg:86.50ms +[2025-08-22 22:58:51] [Rank 0] step:2061/10000 train_time:178288ms step_avg:86.51ms +[2025-08-22 22:58:51] [Rank 0] step:2061/10000 train_time:178288ms step_avg:86.51ms +[2025-08-22 22:58:53] [Rank 0] step:2081/10000 train_time:180027ms step_avg:86.51ms +[2025-08-22 22:58:53] [Rank 0] step:2081/10000 train_time:180027ms step_avg:86.51ms +[2025-08-22 22:58:55] [Rank 0] step:2101/10000 train_time:181768ms step_avg:86.51ms +[2025-08-22 22:58:55] [Rank 0] step:2101/10000 train_time:181768ms step_avg:86.51ms +[2025-08-22 22:58:56] [Rank 0] step:2121/10000 train_time:183511ms step_avg:86.52ms +[2025-08-22 22:58:56] [Rank 0] step:2121/10000 train_time:183511ms step_avg:86.52ms +[2025-08-22 22:58:58] [Rank 0] step:2141/10000 train_time:185254ms step_avg:86.53ms +[2025-08-22 22:58:58] [Rank 0] step:2141/10000 train_time:185254ms step_avg:86.53ms +[2025-08-22 22:59:00] [Rank 0] step:2161/10000 train_time:186998ms step_avg:86.53ms +[2025-08-22 22:59:00] [Rank 0] step:2161/10000 train_time:186998ms step_avg:86.53ms +[2025-08-22 22:59:01] [Rank 0] step:2181/10000 train_time:188743ms step_avg:86.54ms +[2025-08-22 22:59:01] [Rank 0] step:2181/10000 train_time:188743ms step_avg:86.54ms +[2025-08-22 22:59:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:59:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:59:17] [Rank 0] PRINT: step:2200/10000 val_loss:4.1758 svd_entropy: attn_qk:H=0.7721,top10E=0.26,eRank=196.0,q75/q25=46.97 attn_vo:H=0.8226,top10E=0.06,eRank=372.3,q75/q25=inf mlp_w1:H=0.8976,top10E=0.14,eRank=391.9,q75/q25=4.67 mlp_w2:H=0.9701,top10E=0.05,eRank=629.9,q75/q25=2.82 vo_prod:H=0.6685,top10E=0.12,eRank=188.7,q75/q25=inf train_time:190500ms step_avg:86.59ms +[2025-08-22 22:59:17] [Rank 0] PRINT: step:2200/10000 val_loss:4.1758 svd_entropy: attn_qk:H=0.7721,top10E=0.26,eRank=196.0,q75/q25=46.97 attn_vo:H=0.8226,top10E=0.06,eRank=372.3,q75/q25=inf mlp_w1:H=0.8976,top10E=0.14,eRank=391.9,q75/q25=4.67 mlp_w2:H=0.9701,top10E=0.05,eRank=629.9,q75/q25=2.82 vo_prod:H=0.6685,top10E=0.12,eRank=188.7,q75/q25=inf train_time:190500ms step_avg:86.59ms +[2025-08-22 22:59:17] [Rank 0] step:2201/10000 train_time:190519ms step_avg:86.56ms +[2025-08-22 22:59:17] [Rank 0] step:2201/10000 train_time:190519ms step_avg:86.56ms +[2025-08-22 22:59:19] [Rank 0] step:2221/10000 train_time:192249ms step_avg:86.56ms +[2025-08-22 22:59:19] [Rank 0] step:2221/10000 train_time:192249ms step_avg:86.56ms +[2025-08-22 22:59:20] [Rank 0] step:2241/10000 train_time:194023ms step_avg:86.58ms +[2025-08-22 22:59:20] [Rank 0] step:2241/10000 train_time:194023ms step_avg:86.58ms +[2025-08-22 22:59:22] [Rank 0] step:2261/10000 train_time:195803ms step_avg:86.60ms +[2025-08-22 22:59:22] [Rank 0] step:2261/10000 train_time:195803ms step_avg:86.60ms +[2025-08-22 22:59:24] [Rank 0] step:2281/10000 train_time:197586ms step_avg:86.62ms +[2025-08-22 22:59:24] [Rank 0] step:2281/10000 train_time:197586ms step_avg:86.62ms +[2025-08-22 22:59:26] [Rank 0] step:2301/10000 train_time:199366ms step_avg:86.64ms +[2025-08-22 22:59:26] [Rank 0] step:2301/10000 train_time:199366ms step_avg:86.64ms +[2025-08-22 22:59:28] [Rank 0] step:2321/10000 train_time:201148ms step_avg:86.66ms +[2025-08-22 22:59:28] [Rank 0] step:2321/10000 train_time:201148ms step_avg:86.66ms +[2025-08-22 22:59:29] [Rank 0] step:2341/10000 train_time:202930ms step_avg:86.69ms +[2025-08-22 22:59:29] [Rank 0] step:2341/10000 train_time:202930ms step_avg:86.69ms +[2025-08-22 22:59:31] [Rank 0] step:2361/10000 train_time:204713ms step_avg:86.71ms +[2025-08-22 22:59:31] [Rank 0] step:2361/10000 train_time:204713ms step_avg:86.71ms +[2025-08-22 22:59:33] [Rank 0] step:2381/10000 train_time:206497ms step_avg:86.73ms +[2025-08-22 22:59:33] [Rank 0] step:2381/10000 train_time:206497ms step_avg:86.73ms +[2025-08-22 22:59:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:59:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 22:59:48] [Rank 0] PRINT: step:2400/10000 val_loss:4.1099 svd_entropy: attn_qk:H=0.7736,top10E=0.26,eRank=197.4,q75/q25=46.87 attn_vo:H=0.8236,top10E=0.06,eRank=374.9,q75/q25=inf mlp_w1:H=0.9000,top10E=0.14,eRank=398.1,q75/q25=4.61 mlp_w2:H=0.9702,top10E=0.05,eRank=629.9,q75/q25=2.81 vo_prod:H=0.6702,top10E=0.12,eRank=191.1,q75/q25=inf train_time:208292ms step_avg:86.79ms +[2025-08-22 22:59:48] [Rank 0] PRINT: step:2400/10000 val_loss:4.1099 svd_entropy: attn_qk:H=0.7736,top10E=0.26,eRank=197.4,q75/q25=46.87 attn_vo:H=0.8236,top10E=0.06,eRank=374.9,q75/q25=inf mlp_w1:H=0.9000,top10E=0.14,eRank=398.1,q75/q25=4.61 mlp_w2:H=0.9702,top10E=0.05,eRank=629.9,q75/q25=2.81 vo_prod:H=0.6702,top10E=0.12,eRank=191.1,q75/q25=inf train_time:208292ms step_avg:86.79ms +[2025-08-22 22:59:48] [Rank 0] step:2401/10000 train_time:208310ms step_avg:86.76ms +[2025-08-22 22:59:48] [Rank 0] step:2401/10000 train_time:208310ms step_avg:86.76ms +[2025-08-22 22:59:50] [Rank 0] step:2421/10000 train_time:210104ms step_avg:86.78ms +[2025-08-22 22:59:50] [Rank 0] step:2421/10000 train_time:210104ms step_avg:86.78ms +[2025-08-22 22:59:52] [Rank 0] step:2441/10000 train_time:211883ms step_avg:86.80ms +[2025-08-22 22:59:52] [Rank 0] step:2441/10000 train_time:211883ms step_avg:86.80ms +[2025-08-22 22:59:54] [Rank 0] step:2461/10000 train_time:213663ms step_avg:86.82ms +[2025-08-22 22:59:54] [Rank 0] step:2461/10000 train_time:213663ms step_avg:86.82ms +[2025-08-22 22:59:56] [Rank 0] step:2481/10000 train_time:215448ms step_avg:86.84ms +[2025-08-22 22:59:56] [Rank 0] step:2481/10000 train_time:215448ms step_avg:86.84ms +[2025-08-22 22:59:57] [Rank 0] step:2501/10000 train_time:217229ms step_avg:86.86ms +[2025-08-22 22:59:57] [Rank 0] step:2501/10000 train_time:217229ms step_avg:86.86ms +[2025-08-22 22:59:59] [Rank 0] step:2521/10000 train_time:219011ms step_avg:86.87ms +[2025-08-22 22:59:59] [Rank 0] step:2521/10000 train_time:219011ms step_avg:86.87ms +[2025-08-22 23:00:01] [Rank 0] step:2541/10000 train_time:220794ms step_avg:86.89ms +[2025-08-22 23:00:01] [Rank 0] step:2541/10000 train_time:220794ms step_avg:86.89ms +[2025-08-22 23:00:03] [Rank 0] step:2561/10000 train_time:222578ms step_avg:86.91ms +[2025-08-22 23:00:03] [Rank 0] step:2561/10000 train_time:222578ms step_avg:86.91ms +[2025-08-22 23:00:04] [Rank 0] step:2581/10000 train_time:224362ms step_avg:86.93ms +[2025-08-22 23:00:04] [Rank 0] step:2581/10000 train_time:224362ms step_avg:86.93ms +[2025-08-22 23:00:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:00:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:00:20] [Rank 0] PRINT: step:2600/10000 val_loss:4.0780 svd_entropy: attn_qk:H=0.7755,top10E=0.25,eRank=199.2,q75/q25=46.88 attn_vo:H=0.8245,top10E=0.06,eRank=377.2,q75/q25=inf mlp_w1:H=0.9021,top10E=0.14,eRank=403.7,q75/q25=4.58 mlp_w2:H=0.9701,top10E=0.05,eRank=629.9,q75/q25=2.81 vo_prod:H=0.6719,top10E=0.12,eRank=193.5,q75/q25=inf train_time:226158ms step_avg:86.98ms +[2025-08-22 23:00:20] [Rank 0] PRINT: step:2600/10000 val_loss:4.0780 svd_entropy: attn_qk:H=0.7755,top10E=0.25,eRank=199.2,q75/q25=46.88 attn_vo:H=0.8245,top10E=0.06,eRank=377.2,q75/q25=inf mlp_w1:H=0.9021,top10E=0.14,eRank=403.7,q75/q25=4.58 mlp_w2:H=0.9701,top10E=0.05,eRank=629.9,q75/q25=2.81 vo_prod:H=0.6719,top10E=0.12,eRank=193.5,q75/q25=inf train_time:226158ms step_avg:86.98ms +[2025-08-22 23:00:20] [Rank 0] step:2601/10000 train_time:226176ms step_avg:86.96ms +[2025-08-22 23:00:20] [Rank 0] step:2601/10000 train_time:226176ms step_avg:86.96ms +[2025-08-22 23:00:22] [Rank 0] step:2621/10000 train_time:227950ms step_avg:86.97ms +[2025-08-22 23:00:22] [Rank 0] step:2621/10000 train_time:227950ms step_avg:86.97ms +[2025-08-22 23:00:24] [Rank 0] step:2641/10000 train_time:229730ms step_avg:86.99ms +[2025-08-22 23:00:24] [Rank 0] step:2641/10000 train_time:229730ms step_avg:86.99ms +[2025-08-22 23:00:25] [Rank 0] step:2661/10000 train_time:231513ms step_avg:87.00ms +[2025-08-22 23:00:25] [Rank 0] step:2661/10000 train_time:231513ms step_avg:87.00ms +[2025-08-22 23:00:27] [Rank 0] step:2681/10000 train_time:233294ms step_avg:87.02ms +[2025-08-22 23:00:27] [Rank 0] step:2681/10000 train_time:233294ms step_avg:87.02ms +[2025-08-22 23:00:29] [Rank 0] step:2701/10000 train_time:235077ms step_avg:87.03ms +[2025-08-22 23:00:29] [Rank 0] step:2701/10000 train_time:235077ms step_avg:87.03ms +[2025-08-22 23:00:31] [Rank 0] step:2721/10000 train_time:236861ms step_avg:87.05ms +[2025-08-22 23:00:31] [Rank 0] step:2721/10000 train_time:236861ms step_avg:87.05ms +[2025-08-22 23:00:32] [Rank 0] step:2741/10000 train_time:238644ms step_avg:87.06ms +[2025-08-22 23:00:32] [Rank 0] step:2741/10000 train_time:238644ms step_avg:87.06ms +[2025-08-22 23:00:34] [Rank 0] step:2761/10000 train_time:240429ms step_avg:87.08ms +[2025-08-22 23:00:34] [Rank 0] step:2761/10000 train_time:240429ms step_avg:87.08ms +[2025-08-22 23:00:36] [Rank 0] step:2781/10000 train_time:242214ms step_avg:87.10ms +[2025-08-22 23:00:36] [Rank 0] step:2781/10000 train_time:242214ms step_avg:87.10ms +[2025-08-22 23:00:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:00:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:00:51] [Rank 0] PRINT: step:2800/10000 val_loss:4.0526 svd_entropy: attn_qk:H=0.7773,top10E=0.25,eRank=200.9,q75/q25=46.93 attn_vo:H=0.8252,top10E=0.06,eRank=379.2,q75/q25=inf mlp_w1:H=0.9040,top10E=0.14,eRank=408.9,q75/q25=4.54 mlp_w2:H=0.9701,top10E=0.05,eRank=629.7,q75/q25=2.80 vo_prod:H=0.6733,top10E=0.12,eRank=195.6,q75/q25=inf train_time:244013ms step_avg:87.15ms +[2025-08-22 23:00:51] [Rank 0] PRINT: step:2800/10000 val_loss:4.0526 svd_entropy: attn_qk:H=0.7773,top10E=0.25,eRank=200.9,q75/q25=46.93 attn_vo:H=0.8252,top10E=0.06,eRank=379.2,q75/q25=inf mlp_w1:H=0.9040,top10E=0.14,eRank=408.9,q75/q25=4.54 mlp_w2:H=0.9701,top10E=0.05,eRank=629.7,q75/q25=2.80 vo_prod:H=0.6733,top10E=0.12,eRank=195.6,q75/q25=inf train_time:244013ms step_avg:87.15ms +[2025-08-22 23:00:52] [Rank 0] step:2801/10000 train_time:244031ms step_avg:87.12ms +[2025-08-22 23:00:52] [Rank 0] step:2801/10000 train_time:244031ms step_avg:87.12ms +[2025-08-22 23:00:53] [Rank 0] step:2821/10000 train_time:245804ms step_avg:87.13ms +[2025-08-22 23:00:53] [Rank 0] step:2821/10000 train_time:245804ms step_avg:87.13ms +[2025-08-22 23:00:55] [Rank 0] step:2841/10000 train_time:247587ms step_avg:87.15ms +[2025-08-22 23:00:55] [Rank 0] step:2841/10000 train_time:247587ms step_avg:87.15ms +[2025-08-22 23:00:57] [Rank 0] step:2861/10000 train_time:249369ms step_avg:87.16ms +[2025-08-22 23:00:57] [Rank 0] step:2861/10000 train_time:249369ms step_avg:87.16ms +[2025-08-22 23:00:59] [Rank 0] step:2881/10000 train_time:251153ms step_avg:87.18ms +[2025-08-22 23:00:59] [Rank 0] step:2881/10000 train_time:251153ms step_avg:87.18ms +[2025-08-22 23:01:00] [Rank 0] step:2901/10000 train_time:252938ms step_avg:87.19ms +[2025-08-22 23:01:00] [Rank 0] step:2901/10000 train_time:252938ms step_avg:87.19ms +[2025-08-22 23:01:02] [Rank 0] step:2921/10000 train_time:254726ms step_avg:87.21ms +[2025-08-22 23:01:02] [Rank 0] step:2921/10000 train_time:254726ms step_avg:87.21ms +[2025-08-22 23:01:04] [Rank 0] step:2941/10000 train_time:256513ms step_avg:87.22ms +[2025-08-22 23:01:04] [Rank 0] step:2941/10000 train_time:256513ms step_avg:87.22ms +[2025-08-22 23:01:06] [Rank 0] step:2961/10000 train_time:258300ms step_avg:87.23ms +[2025-08-22 23:01:06] [Rank 0] step:2961/10000 train_time:258300ms step_avg:87.23ms +[2025-08-22 23:01:08] [Rank 0] step:2981/10000 train_time:260093ms step_avg:87.25ms +[2025-08-22 23:01:08] [Rank 0] step:2981/10000 train_time:260093ms step_avg:87.25ms +[2025-08-22 23:01:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:01:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:01:23] [Rank 0] PRINT: step:3000/10000 val_loss:4.0163 svd_entropy: attn_qk:H=0.7788,top10E=0.25,eRank=202.4,q75/q25=46.77 attn_vo:H=0.8258,top10E=0.06,eRank=380.8,q75/q25=inf mlp_w1:H=0.9057,top10E=0.14,eRank=413.6,q75/q25=4.50 mlp_w2:H=0.9701,top10E=0.05,eRank=629.6,q75/q25=2.81 vo_prod:H=0.6746,top10E=0.12,eRank=197.6,q75/q25=inf train_time:261901ms step_avg:87.30ms +[2025-08-22 23:01:23] [Rank 0] PRINT: step:3000/10000 val_loss:4.0163 svd_entropy: attn_qk:H=0.7788,top10E=0.25,eRank=202.4,q75/q25=46.77 attn_vo:H=0.8258,top10E=0.06,eRank=380.8,q75/q25=inf mlp_w1:H=0.9057,top10E=0.14,eRank=413.6,q75/q25=4.50 mlp_w2:H=0.9701,top10E=0.05,eRank=629.6,q75/q25=2.81 vo_prod:H=0.6746,top10E=0.12,eRank=197.6,q75/q25=inf train_time:261901ms step_avg:87.30ms +[2025-08-22 23:01:23] [Rank 0] step:3001/10000 train_time:261919ms step_avg:87.28ms +[2025-08-22 23:01:23] [Rank 0] step:3001/10000 train_time:261919ms step_avg:87.28ms +[2025-08-22 23:01:25] [Rank 0] step:3021/10000 train_time:263704ms step_avg:87.29ms +[2025-08-22 23:01:25] [Rank 0] step:3021/10000 train_time:263704ms step_avg:87.29ms +[2025-08-22 23:01:27] [Rank 0] step:3041/10000 train_time:265491ms step_avg:87.30ms +[2025-08-22 23:01:27] [Rank 0] step:3041/10000 train_time:265491ms step_avg:87.30ms +[2025-08-22 23:01:29] [Rank 0] step:3061/10000 train_time:267279ms step_avg:87.32ms +[2025-08-22 23:01:29] [Rank 0] step:3061/10000 train_time:267279ms step_avg:87.32ms +[2025-08-22 23:01:30] [Rank 0] step:3081/10000 train_time:269068ms step_avg:87.33ms +[2025-08-22 23:01:30] [Rank 0] step:3081/10000 train_time:269068ms step_avg:87.33ms +[2025-08-22 23:01:32] [Rank 0] step:3101/10000 train_time:270857ms step_avg:87.34ms +[2025-08-22 23:01:32] [Rank 0] step:3101/10000 train_time:270857ms step_avg:87.34ms +[2025-08-22 23:01:34] [Rank 0] step:3121/10000 train_time:272645ms step_avg:87.36ms +[2025-08-22 23:01:34] [Rank 0] step:3121/10000 train_time:272645ms step_avg:87.36ms +[2025-08-22 23:01:36] [Rank 0] step:3141/10000 train_time:274435ms step_avg:87.37ms +[2025-08-22 23:01:36] [Rank 0] step:3141/10000 train_time:274435ms step_avg:87.37ms +[2025-08-22 23:01:37] [Rank 0] step:3161/10000 train_time:276226ms step_avg:87.39ms +[2025-08-22 23:01:37] [Rank 0] step:3161/10000 train_time:276226ms step_avg:87.39ms +[2025-08-22 23:01:39] [Rank 0] step:3181/10000 train_time:278017ms step_avg:87.40ms +[2025-08-22 23:01:39] [Rank 0] step:3181/10000 train_time:278017ms step_avg:87.40ms +[2025-08-22 23:01:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:01:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:01:55] [Rank 0] PRINT: step:3200/10000 val_loss:3.9904 svd_entropy: attn_qk:H=0.7802,top10E=0.25,eRank=203.8,q75/q25=46.83 attn_vo:H=0.8264,top10E=0.06,eRank=382.4,q75/q25=inf mlp_w1:H=0.9072,top10E=0.13,eRank=417.8,q75/q25=4.46 mlp_w2:H=0.9700,top10E=0.05,eRank=629.3,q75/q25=2.80 vo_prod:H=0.6758,top10E=0.11,eRank=199.4,q75/q25=inf train_time:279821ms step_avg:87.44ms +[2025-08-22 23:01:55] [Rank 0] PRINT: step:3200/10000 val_loss:3.9904 svd_entropy: attn_qk:H=0.7802,top10E=0.25,eRank=203.8,q75/q25=46.83 attn_vo:H=0.8264,top10E=0.06,eRank=382.4,q75/q25=inf mlp_w1:H=0.9072,top10E=0.13,eRank=417.8,q75/q25=4.46 mlp_w2:H=0.9700,top10E=0.05,eRank=629.3,q75/q25=2.80 vo_prod:H=0.6758,top10E=0.11,eRank=199.4,q75/q25=inf train_time:279821ms step_avg:87.44ms +[2025-08-22 23:01:55] [Rank 0] step:3201/10000 train_time:279839ms step_avg:87.42ms +[2025-08-22 23:01:55] [Rank 0] step:3201/10000 train_time:279839ms step_avg:87.42ms +[2025-08-22 23:01:57] [Rank 0] step:3221/10000 train_time:281626ms step_avg:87.43ms +[2025-08-22 23:01:57] [Rank 0] step:3221/10000 train_time:281626ms step_avg:87.43ms +[2025-08-22 23:01:58] [Rank 0] step:3241/10000 train_time:283412ms step_avg:87.45ms +[2025-08-22 23:01:58] [Rank 0] step:3241/10000 train_time:283412ms step_avg:87.45ms +[2025-08-22 23:02:00] [Rank 0] step:3261/10000 train_time:285199ms step_avg:87.46ms +[2025-08-22 23:02:00] [Rank 0] step:3261/10000 train_time:285199ms step_avg:87.46ms +[2025-08-22 23:02:02] [Rank 0] step:3281/10000 train_time:286989ms step_avg:87.47ms +[2025-08-22 23:02:02] [Rank 0] step:3281/10000 train_time:286989ms step_avg:87.47ms +[2025-08-22 23:02:04] [Rank 0] step:3301/10000 train_time:288778ms step_avg:87.48ms +[2025-08-22 23:02:04] [Rank 0] step:3301/10000 train_time:288778ms step_avg:87.48ms +[2025-08-22 23:02:06] [Rank 0] step:3321/10000 train_time:290569ms step_avg:87.49ms +[2025-08-22 23:02:06] [Rank 0] step:3321/10000 train_time:290569ms step_avg:87.49ms +[2025-08-22 23:02:07] [Rank 0] step:3341/10000 train_time:292360ms step_avg:87.51ms +[2025-08-22 23:02:07] [Rank 0] step:3341/10000 train_time:292360ms step_avg:87.51ms +[2025-08-22 23:02:09] [Rank 0] step:3361/10000 train_time:294152ms step_avg:87.52ms +[2025-08-22 23:02:09] [Rank 0] step:3361/10000 train_time:294152ms step_avg:87.52ms +[2025-08-22 23:02:11] [Rank 0] step:3381/10000 train_time:295944ms step_avg:87.53ms +[2025-08-22 23:02:11] [Rank 0] step:3381/10000 train_time:295944ms step_avg:87.53ms +[2025-08-22 23:02:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:02:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:02:26] [Rank 0] PRINT: step:3400/10000 val_loss:3.9677 svd_entropy: attn_qk:H=0.7817,top10E=0.25,eRank=205.3,q75/q25=46.84 attn_vo:H=0.8269,top10E=0.06,eRank=383.7,q75/q25=inf mlp_w1:H=0.9087,top10E=0.13,eRank=422.0,q75/q25=4.43 mlp_w2:H=0.9699,top10E=0.05,eRank=629.1,q75/q25=2.81 vo_prod:H=0.6770,top10E=0.11,eRank=201.2,q75/q25=inf train_time:297750ms step_avg:87.57ms +[2025-08-22 23:02:26] [Rank 0] PRINT: step:3400/10000 val_loss:3.9677 svd_entropy: attn_qk:H=0.7817,top10E=0.25,eRank=205.3,q75/q25=46.84 attn_vo:H=0.8269,top10E=0.06,eRank=383.7,q75/q25=inf mlp_w1:H=0.9087,top10E=0.13,eRank=422.0,q75/q25=4.43 mlp_w2:H=0.9699,top10E=0.05,eRank=629.1,q75/q25=2.81 vo_prod:H=0.6770,top10E=0.11,eRank=201.2,q75/q25=inf train_time:297750ms step_avg:87.57ms +[2025-08-22 23:02:26] [Rank 0] step:3401/10000 train_time:297768ms step_avg:87.55ms +[2025-08-22 23:02:26] [Rank 0] step:3401/10000 train_time:297768ms step_avg:87.55ms +[2025-08-22 23:02:28] [Rank 0] step:3421/10000 train_time:299566ms step_avg:87.57ms +[2025-08-22 23:02:28] [Rank 0] step:3421/10000 train_time:299566ms step_avg:87.57ms +[2025-08-22 23:02:30] [Rank 0] step:3441/10000 train_time:301358ms step_avg:87.58ms +[2025-08-22 23:02:30] [Rank 0] step:3441/10000 train_time:301358ms step_avg:87.58ms +[2025-08-22 23:02:32] [Rank 0] step:3461/10000 train_time:303150ms step_avg:87.59ms +[2025-08-22 23:02:32] [Rank 0] step:3461/10000 train_time:303150ms step_avg:87.59ms +[2025-08-22 23:02:34] [Rank 0] step:3481/10000 train_time:304944ms step_avg:87.60ms +[2025-08-22 23:02:34] [Rank 0] step:3481/10000 train_time:304944ms step_avg:87.60ms +[2025-08-22 23:02:35] [Rank 0] step:3501/10000 train_time:306740ms step_avg:87.61ms +[2025-08-22 23:02:35] [Rank 0] step:3501/10000 train_time:306740ms step_avg:87.61ms +[2025-08-22 23:02:37] [Rank 0] step:3521/10000 train_time:308537ms step_avg:87.63ms +[2025-08-22 23:02:37] [Rank 0] step:3521/10000 train_time:308537ms step_avg:87.63ms +[2025-08-22 23:02:39] [Rank 0] step:3541/10000 train_time:310332ms step_avg:87.64ms +[2025-08-22 23:02:39] [Rank 0] step:3541/10000 train_time:310332ms step_avg:87.64ms +[2025-08-22 23:02:41] [Rank 0] step:3561/10000 train_time:312134ms step_avg:87.65ms +[2025-08-22 23:02:41] [Rank 0] step:3561/10000 train_time:312134ms step_avg:87.65ms +[2025-08-22 23:02:43] [Rank 0] step:3581/10000 train_time:313931ms step_avg:87.67ms +[2025-08-22 23:02:43] [Rank 0] step:3581/10000 train_time:313931ms step_avg:87.67ms +[2025-08-22 23:02:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:02:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:02:58] [Rank 0] PRINT: step:3600/10000 val_loss:3.9591 svd_entropy: attn_qk:H=0.7831,top10E=0.24,eRank=206.6,q75/q25=46.40 attn_vo:H=0.8273,top10E=0.06,eRank=384.9,q75/q25=inf mlp_w1:H=0.9100,top10E=0.13,eRank=425.7,q75/q25=4.40 mlp_w2:H=0.9699,top10E=0.05,eRank=628.8,q75/q25=2.80 vo_prod:H=0.6779,top10E=0.11,eRank=202.7,q75/q25=inf train_time:315743ms step_avg:87.71ms +[2025-08-22 23:02:58] [Rank 0] PRINT: step:3600/10000 val_loss:3.9591 svd_entropy: attn_qk:H=0.7831,top10E=0.24,eRank=206.6,q75/q25=46.40 attn_vo:H=0.8273,top10E=0.06,eRank=384.9,q75/q25=inf mlp_w1:H=0.9100,top10E=0.13,eRank=425.7,q75/q25=4.40 mlp_w2:H=0.9699,top10E=0.05,eRank=628.8,q75/q25=2.80 vo_prod:H=0.6779,top10E=0.11,eRank=202.7,q75/q25=inf train_time:315743ms step_avg:87.71ms +[2025-08-22 23:02:58] [Rank 0] step:3601/10000 train_time:315762ms step_avg:87.69ms +[2025-08-22 23:02:58] [Rank 0] step:3601/10000 train_time:315762ms step_avg:87.69ms +[2025-08-22 23:03:00] [Rank 0] step:3621/10000 train_time:317552ms step_avg:87.70ms +[2025-08-22 23:03:00] [Rank 0] step:3621/10000 train_time:317552ms step_avg:87.70ms +[2025-08-22 23:03:02] [Rank 0] step:3641/10000 train_time:319338ms step_avg:87.71ms +[2025-08-22 23:03:02] [Rank 0] step:3641/10000 train_time:319338ms step_avg:87.71ms +[2025-08-22 23:03:04] [Rank 0] step:3661/10000 train_time:321127ms step_avg:87.72ms +[2025-08-22 23:03:04] [Rank 0] step:3661/10000 train_time:321127ms step_avg:87.72ms +[2025-08-22 23:03:05] [Rank 0] step:3681/10000 train_time:322917ms step_avg:87.73ms +[2025-08-22 23:03:05] [Rank 0] step:3681/10000 train_time:322917ms step_avg:87.73ms +[2025-08-22 23:03:07] [Rank 0] step:3701/10000 train_time:324706ms step_avg:87.73ms +[2025-08-22 23:03:07] [Rank 0] step:3701/10000 train_time:324706ms step_avg:87.73ms +[2025-08-22 23:03:09] [Rank 0] step:3721/10000 train_time:326524ms step_avg:87.75ms +[2025-08-22 23:03:09] [Rank 0] step:3721/10000 train_time:326524ms step_avg:87.75ms +[2025-08-22 23:03:11] [Rank 0] step:3741/10000 train_time:328351ms step_avg:87.77ms +[2025-08-22 23:03:11] [Rank 0] step:3741/10000 train_time:328351ms step_avg:87.77ms +[2025-08-22 23:03:13] [Rank 0] step:3761/10000 train_time:330176ms step_avg:87.79ms +[2025-08-22 23:03:13] [Rank 0] step:3761/10000 train_time:330176ms step_avg:87.79ms +[2025-08-22 23:03:14] [Rank 0] step:3781/10000 train_time:332004ms step_avg:87.81ms +[2025-08-22 23:03:14] [Rank 0] step:3781/10000 train_time:332004ms step_avg:87.81ms +[2025-08-22 23:03:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:03:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:03:30] [Rank 0] PRINT: step:3800/10000 val_loss:3.9238 svd_entropy: attn_qk:H=0.7840,top10E=0.24,eRank=207.7,q75/q25=46.30 attn_vo:H=0.8277,top10E=0.06,eRank=385.8,q75/q25=inf mlp_w1:H=0.9112,top10E=0.13,eRank=429.0,q75/q25=4.36 mlp_w2:H=0.9698,top10E=0.05,eRank=628.4,q75/q25=2.81 vo_prod:H=0.6788,top10E=0.11,eRank=204.0,q75/q25=inf train_time:333844ms step_avg:87.85ms +[2025-08-22 23:03:30] [Rank 0] PRINT: step:3800/10000 val_loss:3.9238 svd_entropy: attn_qk:H=0.7840,top10E=0.24,eRank=207.7,q75/q25=46.30 attn_vo:H=0.8277,top10E=0.06,eRank=385.8,q75/q25=inf mlp_w1:H=0.9112,top10E=0.13,eRank=429.0,q75/q25=4.36 mlp_w2:H=0.9698,top10E=0.05,eRank=628.4,q75/q25=2.81 vo_prod:H=0.6788,top10E=0.11,eRank=204.0,q75/q25=inf train_time:333844ms step_avg:87.85ms +[2025-08-22 23:03:30] [Rank 0] step:3801/10000 train_time:333862ms step_avg:87.84ms +[2025-08-22 23:03:30] [Rank 0] step:3801/10000 train_time:333862ms step_avg:87.84ms +[2025-08-22 23:03:32] [Rank 0] step:3821/10000 train_time:335695ms step_avg:87.86ms +[2025-08-22 23:03:32] [Rank 0] step:3821/10000 train_time:335695ms step_avg:87.86ms +[2025-08-22 23:03:34] [Rank 0] step:3841/10000 train_time:337521ms step_avg:87.87ms +[2025-08-22 23:03:34] [Rank 0] step:3841/10000 train_time:337521ms step_avg:87.87ms +[2025-08-22 23:03:35] [Rank 0] step:3861/10000 train_time:339345ms step_avg:87.89ms +[2025-08-22 23:03:35] [Rank 0] step:3861/10000 train_time:339345ms step_avg:87.89ms +[2025-08-22 23:03:37] [Rank 0] step:3881/10000 train_time:341169ms step_avg:87.91ms +[2025-08-22 23:03:37] [Rank 0] step:3881/10000 train_time:341169ms step_avg:87.91ms +[2025-08-22 23:03:39] [Rank 0] step:3901/10000 train_time:342995ms step_avg:87.92ms +[2025-08-22 23:03:39] [Rank 0] step:3901/10000 train_time:342995ms step_avg:87.92ms +[2025-08-22 23:03:41] [Rank 0] step:3921/10000 train_time:344821ms step_avg:87.94ms +[2025-08-22 23:03:41] [Rank 0] step:3921/10000 train_time:344821ms step_avg:87.94ms +[2025-08-22 23:03:43] [Rank 0] step:3941/10000 train_time:346648ms step_avg:87.96ms +[2025-08-22 23:03:43] [Rank 0] step:3941/10000 train_time:346648ms step_avg:87.96ms +[2025-08-22 23:03:45] [Rank 0] step:3961/10000 train_time:348472ms step_avg:87.98ms +[2025-08-22 23:03:45] [Rank 0] step:3961/10000 train_time:348472ms step_avg:87.98ms +[2025-08-22 23:03:46] [Rank 0] step:3981/10000 train_time:350299ms step_avg:87.99ms +[2025-08-22 23:03:46] [Rank 0] step:3981/10000 train_time:350299ms step_avg:87.99ms +[2025-08-22 23:03:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:03:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:04:02] [Rank 0] PRINT: step:4000/10000 val_loss:3.9029 svd_entropy: attn_qk:H=0.7852,top10E=0.24,eRank=208.8,q75/q25=45.86 attn_vo:H=0.8280,top10E=0.06,eRank=386.7,q75/q25=inf mlp_w1:H=0.9123,top10E=0.13,eRank=432.2,q75/q25=4.33 mlp_w2:H=0.9697,top10E=0.05,eRank=628.2,q75/q25=2.81 vo_prod:H=0.6796,top10E=0.11,eRank=205.3,q75/q25=inf train_time:352139ms step_avg:88.03ms +[2025-08-22 23:04:02] [Rank 0] PRINT: step:4000/10000 val_loss:3.9029 svd_entropy: attn_qk:H=0.7852,top10E=0.24,eRank=208.8,q75/q25=45.86 attn_vo:H=0.8280,top10E=0.06,eRank=386.7,q75/q25=inf mlp_w1:H=0.9123,top10E=0.13,eRank=432.2,q75/q25=4.33 mlp_w2:H=0.9697,top10E=0.05,eRank=628.2,q75/q25=2.81 vo_prod:H=0.6796,top10E=0.11,eRank=205.3,q75/q25=inf train_time:352139ms step_avg:88.03ms +[2025-08-22 23:04:02] [Rank 0] step:4001/10000 train_time:352158ms step_avg:88.02ms +[2025-08-22 23:04:02] [Rank 0] step:4001/10000 train_time:352158ms step_avg:88.02ms +[2025-08-22 23:04:04] [Rank 0] step:4021/10000 train_time:353981ms step_avg:88.03ms +[2025-08-22 23:04:04] [Rank 0] step:4021/10000 train_time:353981ms step_avg:88.03ms +[2025-08-22 23:04:06] [Rank 0] step:4041/10000 train_time:355806ms step_avg:88.05ms +[2025-08-22 23:04:06] [Rank 0] step:4041/10000 train_time:355806ms step_avg:88.05ms +[2025-08-22 23:04:07] [Rank 0] step:4061/10000 train_time:357630ms step_avg:88.06ms +[2025-08-22 23:04:07] [Rank 0] step:4061/10000 train_time:357630ms step_avg:88.06ms +[2025-08-22 23:04:09] [Rank 0] step:4081/10000 train_time:359496ms step_avg:88.09ms +[2025-08-22 23:04:09] [Rank 0] step:4081/10000 train_time:359496ms step_avg:88.09ms +[2025-08-22 23:04:11] [Rank 0] step:4101/10000 train_time:361323ms step_avg:88.11ms +[2025-08-22 23:04:11] [Rank 0] step:4101/10000 train_time:361323ms step_avg:88.11ms +[2025-08-22 23:04:13] [Rank 0] step:4121/10000 train_time:363149ms step_avg:88.12ms +[2025-08-22 23:04:13] [Rank 0] step:4121/10000 train_time:363149ms step_avg:88.12ms +[2025-08-22 23:04:15] [Rank 0] step:4141/10000 train_time:364979ms step_avg:88.14ms +[2025-08-22 23:04:15] [Rank 0] step:4141/10000 train_time:364979ms step_avg:88.14ms +[2025-08-22 23:04:17] [Rank 0] step:4161/10000 train_time:366808ms step_avg:88.15ms +[2025-08-22 23:04:17] [Rank 0] step:4161/10000 train_time:366808ms step_avg:88.15ms +[2025-08-22 23:04:18] [Rank 0] step:4181/10000 train_time:368636ms step_avg:88.17ms +[2025-08-22 23:04:18] [Rank 0] step:4181/10000 train_time:368636ms step_avg:88.17ms +[2025-08-22 23:04:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:04:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:04:34] [Rank 0] PRINT: step:4200/10000 val_loss:3.8957 svd_entropy: attn_qk:H=0.7863,top10E=0.24,eRank=209.9,q75/q25=45.69 attn_vo:H=0.8283,top10E=0.06,eRank=387.5,q75/q25=inf mlp_w1:H=0.9134,top10E=0.13,eRank=435.2,q75/q25=4.31 mlp_w2:H=0.9696,top10E=0.05,eRank=627.8,q75/q25=2.81 vo_prod:H=0.6803,top10E=0.11,eRank=206.4,q75/q25=inf train_time:370477ms step_avg:88.21ms +[2025-08-22 23:04:34] [Rank 0] PRINT: step:4200/10000 val_loss:3.8957 svd_entropy: attn_qk:H=0.7863,top10E=0.24,eRank=209.9,q75/q25=45.69 attn_vo:H=0.8283,top10E=0.06,eRank=387.5,q75/q25=inf mlp_w1:H=0.9134,top10E=0.13,eRank=435.2,q75/q25=4.31 mlp_w2:H=0.9696,top10E=0.05,eRank=627.8,q75/q25=2.81 vo_prod:H=0.6803,top10E=0.11,eRank=206.4,q75/q25=inf train_time:370477ms step_avg:88.21ms +[2025-08-22 23:04:34] [Rank 0] step:4201/10000 train_time:370495ms step_avg:88.19ms +[2025-08-22 23:04:34] [Rank 0] step:4201/10000 train_time:370495ms step_avg:88.19ms +[2025-08-22 23:04:36] [Rank 0] step:4221/10000 train_time:372327ms step_avg:88.21ms +[2025-08-22 23:04:36] [Rank 0] step:4221/10000 train_time:372327ms step_avg:88.21ms +[2025-08-22 23:04:38] [Rank 0] step:4241/10000 train_time:374156ms step_avg:88.22ms +[2025-08-22 23:04:38] [Rank 0] step:4241/10000 train_time:374156ms step_avg:88.22ms +[2025-08-22 23:04:39] [Rank 0] step:4261/10000 train_time:375983ms step_avg:88.24ms +[2025-08-22 23:04:39] [Rank 0] step:4261/10000 train_time:375983ms step_avg:88.24ms +[2025-08-22 23:04:41] [Rank 0] step:4281/10000 train_time:377814ms step_avg:88.25ms +[2025-08-22 23:04:41] [Rank 0] step:4281/10000 train_time:377814ms step_avg:88.25ms +[2025-08-22 23:04:43] [Rank 0] step:4301/10000 train_time:379642ms step_avg:88.27ms +[2025-08-22 23:04:43] [Rank 0] step:4301/10000 train_time:379642ms step_avg:88.27ms +[2025-08-22 23:04:45] [Rank 0] step:4321/10000 train_time:381474ms step_avg:88.28ms +[2025-08-22 23:04:45] [Rank 0] step:4321/10000 train_time:381474ms step_avg:88.28ms +[2025-08-22 23:04:47] [Rank 0] step:4341/10000 train_time:383303ms step_avg:88.30ms +[2025-08-22 23:04:47] [Rank 0] step:4341/10000 train_time:383303ms step_avg:88.30ms +[2025-08-22 23:04:49] [Rank 0] step:4361/10000 train_time:385134ms step_avg:88.31ms +[2025-08-22 23:04:49] [Rank 0] step:4361/10000 train_time:385134ms step_avg:88.31ms +[2025-08-22 23:04:50] [Rank 0] step:4381/10000 train_time:386965ms step_avg:88.33ms +[2025-08-22 23:04:50] [Rank 0] step:4381/10000 train_time:386965ms step_avg:88.33ms +[2025-08-22 23:04:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:04:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:05:06] [Rank 0] PRINT: step:4400/10000 val_loss:3.8733 svd_entropy: attn_qk:H=0.7874,top10E=0.24,eRank=211.1,q75/q25=45.43 attn_vo:H=0.8286,top10E=0.06,eRank=388.3,q75/q25=inf mlp_w1:H=0.9143,top10E=0.13,eRank=438.0,q75/q25=4.28 mlp_w2:H=0.9696,top10E=0.05,eRank=627.5,q75/q25=2.82 vo_prod:H=0.6810,top10E=0.11,eRank=207.5,q75/q25=inf train_time:388808ms step_avg:88.37ms +[2025-08-22 23:05:06] [Rank 0] PRINT: step:4400/10000 val_loss:3.8733 svd_entropy: attn_qk:H=0.7874,top10E=0.24,eRank=211.1,q75/q25=45.43 attn_vo:H=0.8286,top10E=0.06,eRank=388.3,q75/q25=inf mlp_w1:H=0.9143,top10E=0.13,eRank=438.0,q75/q25=4.28 mlp_w2:H=0.9696,top10E=0.05,eRank=627.5,q75/q25=2.82 vo_prod:H=0.6810,top10E=0.11,eRank=207.5,q75/q25=inf train_time:388808ms step_avg:88.37ms +[2025-08-22 23:05:06] [Rank 0] step:4401/10000 train_time:388827ms step_avg:88.35ms +[2025-08-22 23:05:06] [Rank 0] step:4401/10000 train_time:388827ms step_avg:88.35ms +[2025-08-22 23:05:08] [Rank 0] step:4421/10000 train_time:390638ms step_avg:88.36ms +[2025-08-22 23:05:08] [Rank 0] step:4421/10000 train_time:390638ms step_avg:88.36ms +[2025-08-22 23:05:10] [Rank 0] step:4441/10000 train_time:392461ms step_avg:88.37ms +[2025-08-22 23:05:10] [Rank 0] step:4441/10000 train_time:392461ms step_avg:88.37ms +[2025-08-22 23:05:11] [Rank 0] step:4461/10000 train_time:394290ms step_avg:88.39ms +[2025-08-22 23:05:11] [Rank 0] step:4461/10000 train_time:394290ms step_avg:88.39ms +[2025-08-22 23:05:13] [Rank 0] step:4481/10000 train_time:396124ms step_avg:88.40ms +[2025-08-22 23:05:13] [Rank 0] step:4481/10000 train_time:396124ms step_avg:88.40ms +[2025-08-22 23:05:15] [Rank 0] step:4501/10000 train_time:397955ms step_avg:88.41ms +[2025-08-22 23:05:15] [Rank 0] step:4501/10000 train_time:397955ms step_avg:88.41ms +[2025-08-22 23:05:17] [Rank 0] step:4521/10000 train_time:399789ms step_avg:88.43ms +[2025-08-22 23:05:17] [Rank 0] step:4521/10000 train_time:399789ms step_avg:88.43ms +[2025-08-22 23:05:19] [Rank 0] step:4541/10000 train_time:401622ms step_avg:88.44ms +[2025-08-22 23:05:19] [Rank 0] step:4541/10000 train_time:401622ms step_avg:88.44ms +[2025-08-22 23:05:21] [Rank 0] step:4561/10000 train_time:403455ms step_avg:88.46ms +[2025-08-22 23:05:21] [Rank 0] step:4561/10000 train_time:403455ms step_avg:88.46ms +[2025-08-22 23:05:22] [Rank 0] step:4581/10000 train_time:405293ms step_avg:88.47ms +[2025-08-22 23:05:22] [Rank 0] step:4581/10000 train_time:405293ms step_avg:88.47ms +[2025-08-22 23:05:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:05:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:05:38] [Rank 0] PRINT: step:4600/10000 val_loss:3.8559 svd_entropy: attn_qk:H=0.7883,top10E=0.24,eRank=212.0,q75/q25=45.12 attn_vo:H=0.8288,top10E=0.06,eRank=388.9,q75/q25=inf mlp_w1:H=0.9152,top10E=0.12,eRank=440.7,q75/q25=4.26 mlp_w2:H=0.9695,top10E=0.05,eRank=627.2,q75/q25=2.82 vo_prod:H=0.6817,top10E=0.11,eRank=208.7,q75/q25=inf train_time:407143ms step_avg:88.51ms +[2025-08-22 23:05:38] [Rank 0] PRINT: step:4600/10000 val_loss:3.8559 svd_entropy: attn_qk:H=0.7883,top10E=0.24,eRank=212.0,q75/q25=45.12 attn_vo:H=0.8288,top10E=0.06,eRank=388.9,q75/q25=inf mlp_w1:H=0.9152,top10E=0.12,eRank=440.7,q75/q25=4.26 mlp_w2:H=0.9695,top10E=0.05,eRank=627.2,q75/q25=2.82 vo_prod:H=0.6817,top10E=0.11,eRank=208.7,q75/q25=inf train_time:407143ms step_avg:88.51ms +[2025-08-22 23:05:38] [Rank 0] step:4601/10000 train_time:407161ms step_avg:88.49ms +[2025-08-22 23:05:38] [Rank 0] step:4601/10000 train_time:407161ms step_avg:88.49ms +[2025-08-22 23:05:40] [Rank 0] step:4621/10000 train_time:408981ms step_avg:88.50ms +[2025-08-22 23:05:40] [Rank 0] step:4621/10000 train_time:408981ms step_avg:88.50ms +[2025-08-22 23:05:42] [Rank 0] step:4641/10000 train_time:410814ms step_avg:88.52ms +[2025-08-22 23:05:42] [Rank 0] step:4641/10000 train_time:410814ms step_avg:88.52ms +[2025-08-22 23:05:44] [Rank 0] step:4661/10000 train_time:412646ms step_avg:88.53ms +[2025-08-22 23:05:44] [Rank 0] step:4661/10000 train_time:412646ms step_avg:88.53ms +[2025-08-22 23:05:45] [Rank 0] step:4681/10000 train_time:414479ms step_avg:88.54ms +[2025-08-22 23:05:45] [Rank 0] step:4681/10000 train_time:414479ms step_avg:88.54ms +[2025-08-22 23:05:47] [Rank 0] step:4701/10000 train_time:416312ms step_avg:88.56ms +[2025-08-22 23:05:47] [Rank 0] step:4701/10000 train_time:416312ms step_avg:88.56ms +[2025-08-22 23:05:49] [Rank 0] step:4721/10000 train_time:418145ms step_avg:88.57ms +[2025-08-22 23:05:49] [Rank 0] step:4721/10000 train_time:418145ms step_avg:88.57ms +[2025-08-22 23:05:51] [Rank 0] step:4741/10000 train_time:419979ms step_avg:88.58ms +[2025-08-22 23:05:51] [Rank 0] step:4741/10000 train_time:419979ms step_avg:88.58ms +[2025-08-22 23:05:53] [Rank 0] step:4761/10000 train_time:421812ms step_avg:88.60ms +[2025-08-22 23:05:53] [Rank 0] step:4761/10000 train_time:421812ms step_avg:88.60ms +[2025-08-22 23:05:55] [Rank 0] step:4781/10000 train_time:423647ms step_avg:88.61ms +[2025-08-22 23:05:55] [Rank 0] step:4781/10000 train_time:423647ms step_avg:88.61ms +[2025-08-22 23:05:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:05:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:06:10] [Rank 0] PRINT: step:4800/10000 val_loss:3.8485 svd_entropy: attn_qk:H=0.7892,top10E=0.24,eRank=213.0,q75/q25=45.04 attn_vo:H=0.8290,top10E=0.06,eRank=389.6,q75/q25=inf mlp_w1:H=0.9161,top10E=0.12,eRank=443.2,q75/q25=4.23 mlp_w2:H=0.9694,top10E=0.05,eRank=626.9,q75/q25=2.82 vo_prod:H=0.6823,top10E=0.11,eRank=209.7,q75/q25=inf train_time:425496ms step_avg:88.64ms +[2025-08-22 23:06:10] [Rank 0] PRINT: step:4800/10000 val_loss:3.8485 svd_entropy: attn_qk:H=0.7892,top10E=0.24,eRank=213.0,q75/q25=45.04 attn_vo:H=0.8290,top10E=0.06,eRank=389.6,q75/q25=inf mlp_w1:H=0.9161,top10E=0.12,eRank=443.2,q75/q25=4.23 mlp_w2:H=0.9694,top10E=0.05,eRank=626.9,q75/q25=2.82 vo_prod:H=0.6823,top10E=0.11,eRank=209.7,q75/q25=inf train_time:425496ms step_avg:88.64ms +[2025-08-22 23:06:10] [Rank 0] step:4801/10000 train_time:425514ms step_avg:88.63ms +[2025-08-22 23:06:10] [Rank 0] step:4801/10000 train_time:425514ms step_avg:88.63ms +[2025-08-22 23:06:12] [Rank 0] step:4821/10000 train_time:427340ms step_avg:88.64ms +[2025-08-22 23:06:12] [Rank 0] step:4821/10000 train_time:427340ms step_avg:88.64ms +[2025-08-22 23:06:14] [Rank 0] step:4841/10000 train_time:429170ms step_avg:88.65ms +[2025-08-22 23:06:14] [Rank 0] step:4841/10000 train_time:429170ms step_avg:88.65ms +[2025-08-22 23:06:16] [Rank 0] step:4861/10000 train_time:431001ms step_avg:88.67ms +[2025-08-22 23:06:16] [Rank 0] step:4861/10000 train_time:431001ms step_avg:88.67ms +[2025-08-22 23:06:17] [Rank 0] step:4881/10000 train_time:432834ms step_avg:88.68ms +[2025-08-22 23:06:17] [Rank 0] step:4881/10000 train_time:432834ms step_avg:88.68ms +[2025-08-22 23:06:19] [Rank 0] step:4901/10000 train_time:434665ms step_avg:88.69ms +[2025-08-22 23:06:19] [Rank 0] step:4901/10000 train_time:434665ms step_avg:88.69ms +[2025-08-22 23:06:21] [Rank 0] step:4921/10000 train_time:436499ms step_avg:88.70ms +[2025-08-22 23:06:21] [Rank 0] step:4921/10000 train_time:436499ms step_avg:88.70ms +[2025-08-22 23:06:23] [Rank 0] step:4941/10000 train_time:438337ms step_avg:88.71ms +[2025-08-22 23:06:23] [Rank 0] step:4941/10000 train_time:438337ms step_avg:88.71ms +[2025-08-22 23:06:25] [Rank 0] step:4961/10000 train_time:440171ms step_avg:88.73ms +[2025-08-22 23:06:25] [Rank 0] step:4961/10000 train_time:440171ms step_avg:88.73ms +[2025-08-22 23:06:27] [Rank 0] step:4981/10000 train_time:442005ms step_avg:88.74ms +[2025-08-22 23:06:27] [Rank 0] step:4981/10000 train_time:442005ms step_avg:88.74ms +[2025-08-22 23:06:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:06:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:06:42] [Rank 0] PRINT: step:5000/10000 val_loss:3.8334 svd_entropy: attn_qk:H=0.7901,top10E=0.24,eRank=213.9,q75/q25=44.83 attn_vo:H=0.8292,top10E=0.06,eRank=390.1,q75/q25=inf mlp_w1:H=0.9170,top10E=0.12,eRank=445.7,q75/q25=4.22 mlp_w2:H=0.9693,top10E=0.05,eRank=626.6,q75/q25=2.82 vo_prod:H=0.6828,top10E=0.11,eRank=210.5,q75/q25=inf train_time:443855ms step_avg:88.77ms +[2025-08-22 23:06:42] [Rank 0] PRINT: step:5000/10000 val_loss:3.8334 svd_entropy: attn_qk:H=0.7901,top10E=0.24,eRank=213.9,q75/q25=44.83 attn_vo:H=0.8292,top10E=0.06,eRank=390.1,q75/q25=inf mlp_w1:H=0.9170,top10E=0.12,eRank=445.7,q75/q25=4.22 mlp_w2:H=0.9693,top10E=0.05,eRank=626.6,q75/q25=2.82 vo_prod:H=0.6828,top10E=0.11,eRank=210.5,q75/q25=inf train_time:443855ms step_avg:88.77ms +[2025-08-22 23:06:42] [Rank 0] step:5001/10000 train_time:443874ms step_avg:88.76ms +[2025-08-22 23:06:42] [Rank 0] step:5001/10000 train_time:443874ms step_avg:88.76ms +[2025-08-22 23:06:44] [Rank 0] step:5021/10000 train_time:445700ms step_avg:88.77ms +[2025-08-22 23:06:44] [Rank 0] step:5021/10000 train_time:445700ms step_avg:88.77ms +[2025-08-22 23:06:46] [Rank 0] step:5041/10000 train_time:447533ms step_avg:88.78ms +[2025-08-22 23:06:46] [Rank 0] step:5041/10000 train_time:447533ms step_avg:88.78ms +[2025-08-22 23:06:48] [Rank 0] step:5061/10000 train_time:449364ms step_avg:88.79ms +[2025-08-22 23:06:48] [Rank 0] step:5061/10000 train_time:449364ms step_avg:88.79ms +[2025-08-22 23:06:50] [Rank 0] step:5081/10000 train_time:451199ms step_avg:88.80ms +[2025-08-22 23:06:50] [Rank 0] step:5081/10000 train_time:451199ms step_avg:88.80ms +[2025-08-22 23:06:52] [Rank 0] step:5101/10000 train_time:453034ms step_avg:88.81ms +[2025-08-22 23:06:52] [Rank 0] step:5101/10000 train_time:453034ms step_avg:88.81ms +[2025-08-22 23:06:53] [Rank 0] step:5121/10000 train_time:454870ms step_avg:88.82ms +[2025-08-22 23:06:53] [Rank 0] step:5121/10000 train_time:454870ms step_avg:88.82ms +[2025-08-22 23:06:55] [Rank 0] step:5141/10000 train_time:456710ms step_avg:88.84ms +[2025-08-22 23:06:55] [Rank 0] step:5141/10000 train_time:456710ms step_avg:88.84ms +[2025-08-22 23:06:57] [Rank 0] step:5161/10000 train_time:458545ms step_avg:88.85ms +[2025-08-22 23:06:57] [Rank 0] step:5161/10000 train_time:458545ms step_avg:88.85ms +[2025-08-22 23:06:59] [Rank 0] step:5181/10000 train_time:460385ms step_avg:88.86ms +[2025-08-22 23:06:59] [Rank 0] step:5181/10000 train_time:460385ms step_avg:88.86ms +[2025-08-22 23:07:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:07:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:07:14] [Rank 0] PRINT: step:5200/10000 val_loss:3.8217 svd_entropy: attn_qk:H=0.7909,top10E=0.24,eRank=214.8,q75/q25=44.60 attn_vo:H=0.8294,top10E=0.06,eRank=390.5,q75/q25=inf mlp_w1:H=0.9177,top10E=0.12,eRank=447.9,q75/q25=4.19 mlp_w2:H=0.9693,top10E=0.05,eRank=626.3,q75/q25=2.83 vo_prod:H=0.6833,top10E=0.11,eRank=211.3,q75/q25=inf train_time:462261ms step_avg:88.90ms +[2025-08-22 23:07:14] [Rank 0] PRINT: step:5200/10000 val_loss:3.8217 svd_entropy: attn_qk:H=0.7909,top10E=0.24,eRank=214.8,q75/q25=44.60 attn_vo:H=0.8294,top10E=0.06,eRank=390.5,q75/q25=inf mlp_w1:H=0.9177,top10E=0.12,eRank=447.9,q75/q25=4.19 mlp_w2:H=0.9693,top10E=0.05,eRank=626.3,q75/q25=2.83 vo_prod:H=0.6833,top10E=0.11,eRank=211.3,q75/q25=inf train_time:462261ms step_avg:88.90ms +[2025-08-22 23:07:14] [Rank 0] step:5201/10000 train_time:462279ms step_avg:88.88ms +[2025-08-22 23:07:14] [Rank 0] step:5201/10000 train_time:462279ms step_avg:88.88ms +[2025-08-22 23:07:16] [Rank 0] step:5221/10000 train_time:464134ms step_avg:88.90ms +[2025-08-22 23:07:16] [Rank 0] step:5221/10000 train_time:464134ms step_avg:88.90ms +[2025-08-22 23:07:18] [Rank 0] step:5241/10000 train_time:465998ms step_avg:88.91ms +[2025-08-22 23:07:18] [Rank 0] step:5241/10000 train_time:465998ms step_avg:88.91ms +[2025-08-22 23:07:20] [Rank 0] step:5261/10000 train_time:467859ms step_avg:88.93ms +[2025-08-22 23:07:20] [Rank 0] step:5261/10000 train_time:467859ms step_avg:88.93ms +[2025-08-22 23:07:22] [Rank 0] step:5281/10000 train_time:469721ms step_avg:88.95ms +[2025-08-22 23:07:22] [Rank 0] step:5281/10000 train_time:469721ms step_avg:88.95ms +[2025-08-22 23:07:24] [Rank 0] step:5301/10000 train_time:471594ms step_avg:88.96ms +[2025-08-22 23:07:24] [Rank 0] step:5301/10000 train_time:471594ms step_avg:88.96ms +[2025-08-22 23:07:26] [Rank 0] step:5321/10000 train_time:473460ms step_avg:88.98ms +[2025-08-22 23:07:26] [Rank 0] step:5321/10000 train_time:473460ms step_avg:88.98ms +[2025-08-22 23:07:28] [Rank 0] step:5341/10000 train_time:475326ms step_avg:89.00ms +[2025-08-22 23:07:28] [Rank 0] step:5341/10000 train_time:475326ms step_avg:89.00ms +[2025-08-22 23:07:29] [Rank 0] step:5361/10000 train_time:477192ms step_avg:89.01ms +[2025-08-22 23:07:29] [Rank 0] step:5361/10000 train_time:477192ms step_avg:89.01ms +[2025-08-22 23:07:31] [Rank 0] step:5381/10000 train_time:479058ms step_avg:89.03ms +[2025-08-22 23:07:31] [Rank 0] step:5381/10000 train_time:479058ms step_avg:89.03ms +[2025-08-22 23:07:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:07:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:07:47] [Rank 0] PRINT: step:5400/10000 val_loss:3.8097 svd_entropy: attn_qk:H=0.7916,top10E=0.24,eRank=215.6,q75/q25=44.44 attn_vo:H=0.8295,top10E=0.06,eRank=390.9,q75/q25=inf mlp_w1:H=0.9184,top10E=0.12,eRank=450.0,q75/q25=4.18 mlp_w2:H=0.9692,top10E=0.05,eRank=626.0,q75/q25=2.83 vo_prod:H=0.6838,top10E=0.11,eRank=212.1,q75/q25=inf train_time:480933ms step_avg:89.06ms +[2025-08-22 23:07:47] [Rank 0] PRINT: step:5400/10000 val_loss:3.8097 svd_entropy: attn_qk:H=0.7916,top10E=0.24,eRank=215.6,q75/q25=44.44 attn_vo:H=0.8295,top10E=0.06,eRank=390.9,q75/q25=inf mlp_w1:H=0.9184,top10E=0.12,eRank=450.0,q75/q25=4.18 mlp_w2:H=0.9692,top10E=0.05,eRank=626.0,q75/q25=2.83 vo_prod:H=0.6838,top10E=0.11,eRank=212.1,q75/q25=inf train_time:480933ms step_avg:89.06ms +[2025-08-22 23:07:47] [Rank 0] step:5401/10000 train_time:480951ms step_avg:89.05ms +[2025-08-22 23:07:47] [Rank 0] step:5401/10000 train_time:480951ms step_avg:89.05ms +[2025-08-22 23:07:49] [Rank 0] step:5421/10000 train_time:482820ms step_avg:89.06ms +[2025-08-22 23:07:49] [Rank 0] step:5421/10000 train_time:482820ms step_avg:89.06ms +[2025-08-22 23:07:51] [Rank 0] step:5441/10000 train_time:484679ms step_avg:89.08ms +[2025-08-22 23:07:51] [Rank 0] step:5441/10000 train_time:484679ms step_avg:89.08ms +[2025-08-22 23:07:52] [Rank 0] step:5461/10000 train_time:486545ms step_avg:89.09ms +[2025-08-22 23:07:52] [Rank 0] step:5461/10000 train_time:486545ms step_avg:89.09ms +[2025-08-22 23:07:54] [Rank 0] step:5481/10000 train_time:488407ms step_avg:89.11ms +[2025-08-22 23:07:54] [Rank 0] step:5481/10000 train_time:488407ms step_avg:89.11ms +[2025-08-22 23:07:56] [Rank 0] step:5501/10000 train_time:490277ms step_avg:89.13ms +[2025-08-22 23:07:56] [Rank 0] step:5501/10000 train_time:490277ms step_avg:89.13ms +[2025-08-22 23:07:58] [Rank 0] step:5521/10000 train_time:492145ms step_avg:89.14ms +[2025-08-22 23:07:58] [Rank 0] step:5521/10000 train_time:492145ms step_avg:89.14ms +[2025-08-22 23:08:00] [Rank 0] step:5541/10000 train_time:494011ms step_avg:89.16ms +[2025-08-22 23:08:00] [Rank 0] step:5541/10000 train_time:494011ms step_avg:89.16ms +[2025-08-22 23:08:02] [Rank 0] step:5561/10000 train_time:495876ms step_avg:89.17ms +[2025-08-22 23:08:02] [Rank 0] step:5561/10000 train_time:495876ms step_avg:89.17ms +[2025-08-22 23:08:04] [Rank 0] step:5581/10000 train_time:497743ms step_avg:89.19ms +[2025-08-22 23:08:04] [Rank 0] step:5581/10000 train_time:497743ms step_avg:89.19ms +[2025-08-22 23:08:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:08:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:08:19] [Rank 0] PRINT: step:5600/10000 val_loss:3.8004 svd_entropy: attn_qk:H=0.7925,top10E=0.23,eRank=216.5,q75/q25=44.33 attn_vo:H=0.8296,top10E=0.06,eRank=391.3,q75/q25=inf mlp_w1:H=0.9191,top10E=0.12,eRank=451.9,q75/q25=4.15 mlp_w2:H=0.9691,top10E=0.05,eRank=625.7,q75/q25=2.83 vo_prod:H=0.6841,top10E=0.11,eRank=212.7,q75/q25=inf train_time:499691ms step_avg:89.23ms +[2025-08-22 23:08:19] [Rank 0] PRINT: step:5600/10000 val_loss:3.8004 svd_entropy: attn_qk:H=0.7925,top10E=0.23,eRank=216.5,q75/q25=44.33 attn_vo:H=0.8296,top10E=0.06,eRank=391.3,q75/q25=inf mlp_w1:H=0.9191,top10E=0.12,eRank=451.9,q75/q25=4.15 mlp_w2:H=0.9691,top10E=0.05,eRank=625.7,q75/q25=2.83 vo_prod:H=0.6841,top10E=0.11,eRank=212.7,q75/q25=inf train_time:499691ms step_avg:89.23ms +[2025-08-22 23:08:19] [Rank 0] step:5601/10000 train_time:499710ms step_avg:89.22ms +[2025-08-22 23:08:19] [Rank 0] step:5601/10000 train_time:499710ms step_avg:89.22ms +[2025-08-22 23:08:21] [Rank 0] step:5621/10000 train_time:501582ms step_avg:89.23ms +[2025-08-22 23:08:21] [Rank 0] step:5621/10000 train_time:501582ms step_avg:89.23ms +[2025-08-22 23:08:23] [Rank 0] step:5641/10000 train_time:503447ms step_avg:89.25ms +[2025-08-22 23:08:23] [Rank 0] step:5641/10000 train_time:503447ms step_avg:89.25ms +[2025-08-22 23:08:25] [Rank 0] step:5661/10000 train_time:505359ms step_avg:89.27ms +[2025-08-22 23:08:25] [Rank 0] step:5661/10000 train_time:505359ms step_avg:89.27ms +[2025-08-22 23:08:27] [Rank 0] step:5681/10000 train_time:507229ms step_avg:89.29ms +[2025-08-22 23:08:27] [Rank 0] step:5681/10000 train_time:507229ms step_avg:89.29ms +[2025-08-22 23:08:29] [Rank 0] step:5701/10000 train_time:509096ms step_avg:89.30ms +[2025-08-22 23:08:29] [Rank 0] step:5701/10000 train_time:509096ms step_avg:89.30ms +[2025-08-22 23:08:31] [Rank 0] step:5721/10000 train_time:510971ms step_avg:89.32ms +[2025-08-22 23:08:31] [Rank 0] step:5721/10000 train_time:510971ms step_avg:89.32ms +[2025-08-22 23:08:33] [Rank 0] step:5741/10000 train_time:512837ms step_avg:89.33ms +[2025-08-22 23:08:33] [Rank 0] step:5741/10000 train_time:512837ms step_avg:89.33ms +[2025-08-22 23:08:34] [Rank 0] step:5761/10000 train_time:514709ms step_avg:89.34ms +[2025-08-22 23:08:34] [Rank 0] step:5761/10000 train_time:514709ms step_avg:89.34ms +[2025-08-22 23:08:36] [Rank 0] step:5781/10000 train_time:516580ms step_avg:89.36ms +[2025-08-22 23:08:36] [Rank 0] step:5781/10000 train_time:516580ms step_avg:89.36ms +[2025-08-22 23:08:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:08:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:08:52] [Rank 0] PRINT: step:5800/10000 val_loss:3.7962 svd_entropy: attn_qk:H=0.7932,top10E=0.23,eRank=217.3,q75/q25=44.04 attn_vo:H=0.8298,top10E=0.06,eRank=391.7,q75/q25=inf mlp_w1:H=0.9197,top10E=0.12,eRank=453.9,q75/q25=4.14 mlp_w2:H=0.9690,top10E=0.05,eRank=625.4,q75/q25=2.84 vo_prod:H=0.6846,top10E=0.11,eRank=213.5,q75/q25=inf train_time:518467ms step_avg:89.39ms +[2025-08-22 23:08:52] [Rank 0] PRINT: step:5800/10000 val_loss:3.7962 svd_entropy: attn_qk:H=0.7932,top10E=0.23,eRank=217.3,q75/q25=44.04 attn_vo:H=0.8298,top10E=0.06,eRank=391.7,q75/q25=inf mlp_w1:H=0.9197,top10E=0.12,eRank=453.9,q75/q25=4.14 mlp_w2:H=0.9690,top10E=0.05,eRank=625.4,q75/q25=2.84 vo_prod:H=0.6846,top10E=0.11,eRank=213.5,q75/q25=inf train_time:518467ms step_avg:89.39ms +[2025-08-22 23:08:52] [Rank 0] step:5801/10000 train_time:518485ms step_avg:89.38ms +[2025-08-22 23:08:52] [Rank 0] step:5801/10000 train_time:518485ms step_avg:89.38ms +[2025-08-22 23:08:54] [Rank 0] step:5821/10000 train_time:520343ms step_avg:89.39ms +[2025-08-22 23:08:54] [Rank 0] step:5821/10000 train_time:520343ms step_avg:89.39ms +[2025-08-22 23:08:56] [Rank 0] step:5841/10000 train_time:522204ms step_avg:89.40ms +[2025-08-22 23:08:56] [Rank 0] step:5841/10000 train_time:522204ms step_avg:89.40ms +[2025-08-22 23:08:58] [Rank 0] step:5861/10000 train_time:524076ms step_avg:89.42ms +[2025-08-22 23:08:58] [Rank 0] step:5861/10000 train_time:524076ms step_avg:89.42ms +[2025-08-22 23:08:59] [Rank 0] step:5881/10000 train_time:525940ms step_avg:89.43ms +[2025-08-22 23:08:59] [Rank 0] step:5881/10000 train_time:525940ms step_avg:89.43ms +[2025-08-22 23:09:01] [Rank 0] step:5901/10000 train_time:527806ms step_avg:89.44ms +[2025-08-22 23:09:01] [Rank 0] step:5901/10000 train_time:527806ms step_avg:89.44ms +[2025-08-22 23:09:03] [Rank 0] step:5921/10000 train_time:529673ms step_avg:89.46ms +[2025-08-22 23:09:03] [Rank 0] step:5921/10000 train_time:529673ms step_avg:89.46ms +[2025-08-22 23:09:05] [Rank 0] step:5941/10000 train_time:531545ms step_avg:89.47ms +[2025-08-22 23:09:05] [Rank 0] step:5941/10000 train_time:531545ms step_avg:89.47ms +[2025-08-22 23:09:07] [Rank 0] step:5961/10000 train_time:533415ms step_avg:89.48ms +[2025-08-22 23:09:07] [Rank 0] step:5961/10000 train_time:533415ms step_avg:89.48ms +[2025-08-22 23:09:09] [Rank 0] step:5981/10000 train_time:535359ms step_avg:89.51ms +[2025-08-22 23:09:09] [Rank 0] step:5981/10000 train_time:535359ms step_avg:89.51ms +[2025-08-22 23:09:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:09:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:09:24] [Rank 0] PRINT: step:6000/10000 val_loss:3.7747 svd_entropy: attn_qk:H=0.7939,top10E=0.23,eRank=218.1,q75/q25=43.72 attn_vo:H=0.8299,top10E=0.06,eRank=392.0,q75/q25=inf mlp_w1:H=0.9203,top10E=0.12,eRank=455.7,q75/q25=4.12 mlp_w2:H=0.9690,top10E=0.05,eRank=625.1,q75/q25=2.84 vo_prod:H=0.6850,top10E=0.10,eRank=214.2,q75/q25=inf train_time:537237ms step_avg:89.54ms +[2025-08-22 23:09:24] [Rank 0] PRINT: step:6000/10000 val_loss:3.7747 svd_entropy: attn_qk:H=0.7939,top10E=0.23,eRank=218.1,q75/q25=43.72 attn_vo:H=0.8299,top10E=0.06,eRank=392.0,q75/q25=inf mlp_w1:H=0.9203,top10E=0.12,eRank=455.7,q75/q25=4.12 mlp_w2:H=0.9690,top10E=0.05,eRank=625.1,q75/q25=2.84 vo_prod:H=0.6850,top10E=0.10,eRank=214.2,q75/q25=inf train_time:537237ms step_avg:89.54ms +[2025-08-22 23:09:24] [Rank 0] step:6001/10000 train_time:537255ms step_avg:89.53ms +[2025-08-22 23:09:24] [Rank 0] step:6001/10000 train_time:537255ms step_avg:89.53ms +[2025-08-22 23:09:26] [Rank 0] step:6021/10000 train_time:539119ms step_avg:89.54ms +[2025-08-22 23:09:26] [Rank 0] step:6021/10000 train_time:539119ms step_avg:89.54ms +[2025-08-22 23:09:28] [Rank 0] step:6041/10000 train_time:540988ms step_avg:89.55ms +[2025-08-22 23:09:28] [Rank 0] step:6041/10000 train_time:540988ms step_avg:89.55ms +[2025-08-22 23:09:30] [Rank 0] step:6061/10000 train_time:542858ms step_avg:89.57ms +[2025-08-22 23:09:30] [Rank 0] step:6061/10000 train_time:542858ms step_avg:89.57ms +[2025-08-22 23:09:32] [Rank 0] step:6081/10000 train_time:544725ms step_avg:89.58ms +[2025-08-22 23:09:32] [Rank 0] step:6081/10000 train_time:544725ms step_avg:89.58ms +[2025-08-22 23:09:34] [Rank 0] step:6101/10000 train_time:546597ms step_avg:89.59ms +[2025-08-22 23:09:34] [Rank 0] step:6101/10000 train_time:546597ms step_avg:89.59ms +[2025-08-22 23:09:36] [Rank 0] step:6121/10000 train_time:548738ms step_avg:89.65ms +[2025-08-22 23:09:36] [Rank 0] step:6121/10000 train_time:548738ms step_avg:89.65ms +[2025-08-22 23:09:38] [Rank 0] step:6141/10000 train_time:550617ms step_avg:89.66ms +[2025-08-22 23:09:38] [Rank 0] step:6141/10000 train_time:550617ms step_avg:89.66ms +[2025-08-22 23:09:40] [Rank 0] step:6161/10000 train_time:552488ms step_avg:89.68ms +[2025-08-22 23:09:40] [Rank 0] step:6161/10000 train_time:552488ms step_avg:89.68ms +[2025-08-22 23:09:42] [Rank 0] step:6181/10000 train_time:554359ms step_avg:89.69ms +[2025-08-22 23:09:42] [Rank 0] step:6181/10000 train_time:554359ms step_avg:89.69ms +[2025-08-22 23:09:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:09:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:09:57] [Rank 0] PRINT: step:6200/10000 val_loss:3.7607 svd_entropy: attn_qk:H=0.7946,top10E=0.23,eRank=218.8,q75/q25=43.45 attn_vo:H=0.8300,top10E=0.06,eRank=392.4,q75/q25=inf mlp_w1:H=0.9209,top10E=0.12,eRank=457.6,q75/q25=4.11 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.85 vo_prod:H=0.6854,top10E=0.10,eRank=214.8,q75/q25=inf train_time:556243ms step_avg:89.72ms +[2025-08-22 23:09:57] [Rank 0] PRINT: step:6200/10000 val_loss:3.7607 svd_entropy: attn_qk:H=0.7946,top10E=0.23,eRank=218.8,q75/q25=43.45 attn_vo:H=0.8300,top10E=0.06,eRank=392.4,q75/q25=inf mlp_w1:H=0.9209,top10E=0.12,eRank=457.6,q75/q25=4.11 mlp_w2:H=0.9689,top10E=0.05,eRank=624.8,q75/q25=2.85 vo_prod:H=0.6854,top10E=0.10,eRank=214.8,q75/q25=inf train_time:556243ms step_avg:89.72ms +[2025-08-22 23:09:57] [Rank 0] step:6201/10000 train_time:556262ms step_avg:89.71ms +[2025-08-22 23:09:57] [Rank 0] step:6201/10000 train_time:556262ms step_avg:89.71ms +[2025-08-22 23:09:59] [Rank 0] step:6221/10000 train_time:558113ms step_avg:89.71ms +[2025-08-22 23:09:59] [Rank 0] step:6221/10000 train_time:558113ms step_avg:89.71ms +[2025-08-22 23:10:01] [Rank 0] step:6241/10000 train_time:559977ms step_avg:89.73ms +[2025-08-22 23:10:01] [Rank 0] step:6241/10000 train_time:559977ms step_avg:89.73ms +[2025-08-22 23:10:03] [Rank 0] step:6261/10000 train_time:561848ms step_avg:89.74ms +[2025-08-22 23:10:03] [Rank 0] step:6261/10000 train_time:561848ms step_avg:89.74ms +[2025-08-22 23:10:05] [Rank 0] step:6281/10000 train_time:563720ms step_avg:89.75ms +[2025-08-22 23:10:05] [Rank 0] step:6281/10000 train_time:563720ms step_avg:89.75ms +[2025-08-22 23:10:07] [Rank 0] step:6301/10000 train_time:565592ms step_avg:89.76ms +[2025-08-22 23:10:07] [Rank 0] step:6301/10000 train_time:565592ms step_avg:89.76ms +[2025-08-22 23:10:08] [Rank 0] step:6321/10000 train_time:567464ms step_avg:89.77ms +[2025-08-22 23:10:08] [Rank 0] step:6321/10000 train_time:567464ms step_avg:89.77ms +[2025-08-22 23:10:10] [Rank 0] step:6341/10000 train_time:569334ms step_avg:89.79ms +[2025-08-22 23:10:10] [Rank 0] step:6341/10000 train_time:569334ms step_avg:89.79ms +[2025-08-22 23:10:12] [Rank 0] step:6361/10000 train_time:571288ms step_avg:89.81ms +[2025-08-22 23:10:12] [Rank 0] step:6361/10000 train_time:571288ms step_avg:89.81ms +[2025-08-22 23:10:14] [Rank 0] step:6381/10000 train_time:573225ms step_avg:89.83ms +[2025-08-22 23:10:14] [Rank 0] step:6381/10000 train_time:573225ms step_avg:89.83ms +[2025-08-22 23:10:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:10:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:10:30] [Rank 0] PRINT: step:6400/10000 val_loss:3.7496 svd_entropy: attn_qk:H=0.7951,top10E=0.23,eRank=219.4,q75/q25=43.42 attn_vo:H=0.8301,top10E=0.06,eRank=392.6,q75/q25=inf mlp_w1:H=0.9215,top10E=0.12,eRank=459.1,q75/q25=4.10 mlp_w2:H=0.9688,top10E=0.05,eRank=624.5,q75/q25=2.85 vo_prod:H=0.6857,top10E=0.10,eRank=215.3,q75/q25=inf train_time:575106ms step_avg:89.86ms +[2025-08-22 23:10:30] [Rank 0] PRINT: step:6400/10000 val_loss:3.7496 svd_entropy: attn_qk:H=0.7951,top10E=0.23,eRank=219.4,q75/q25=43.42 attn_vo:H=0.8301,top10E=0.06,eRank=392.6,q75/q25=inf mlp_w1:H=0.9215,top10E=0.12,eRank=459.1,q75/q25=4.10 mlp_w2:H=0.9688,top10E=0.05,eRank=624.5,q75/q25=2.85 vo_prod:H=0.6857,top10E=0.10,eRank=215.3,q75/q25=inf train_time:575106ms step_avg:89.86ms +[2025-08-22 23:10:30] [Rank 0] step:6401/10000 train_time:575125ms step_avg:89.85ms +[2025-08-22 23:10:30] [Rank 0] step:6401/10000 train_time:575125ms step_avg:89.85ms +[2025-08-22 23:10:32] [Rank 0] step:6421/10000 train_time:576997ms step_avg:89.86ms +[2025-08-22 23:10:32] [Rank 0] step:6421/10000 train_time:576997ms step_avg:89.86ms +[2025-08-22 23:10:34] [Rank 0] step:6441/10000 train_time:578867ms step_avg:89.87ms +[2025-08-22 23:10:34] [Rank 0] step:6441/10000 train_time:578867ms step_avg:89.87ms +[2025-08-22 23:10:35] [Rank 0] step:6461/10000 train_time:580744ms step_avg:89.88ms +[2025-08-22 23:10:35] [Rank 0] step:6461/10000 train_time:580744ms step_avg:89.88ms +[2025-08-22 23:10:37] [Rank 0] step:6481/10000 train_time:582625ms step_avg:89.90ms +[2025-08-22 23:10:37] [Rank 0] step:6481/10000 train_time:582625ms step_avg:89.90ms +[2025-08-22 23:10:39] [Rank 0] step:6501/10000 train_time:584497ms step_avg:89.91ms +[2025-08-22 23:10:39] [Rank 0] step:6501/10000 train_time:584497ms step_avg:89.91ms +[2025-08-22 23:10:41] [Rank 0] step:6521/10000 train_time:586368ms step_avg:89.92ms +[2025-08-22 23:10:41] [Rank 0] step:6521/10000 train_time:586368ms step_avg:89.92ms +[2025-08-22 23:10:43] [Rank 0] step:6541/10000 train_time:588246ms step_avg:89.93ms +[2025-08-22 23:10:43] [Rank 0] step:6541/10000 train_time:588246ms step_avg:89.93ms +[2025-08-22 23:10:45] [Rank 0] step:6561/10000 train_time:590123ms step_avg:89.94ms +[2025-08-22 23:10:45] [Rank 0] step:6561/10000 train_time:590123ms step_avg:89.94ms +[2025-08-22 23:10:47] [Rank 0] step:6581/10000 train_time:591994ms step_avg:89.96ms +[2025-08-22 23:10:47] [Rank 0] step:6581/10000 train_time:591994ms step_avg:89.96ms +[2025-08-22 23:10:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:10:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:11:02] [Rank 0] PRINT: step:6600/10000 val_loss:3.7339 svd_entropy: attn_qk:H=0.7958,top10E=0.23,eRank=220.1,q75/q25=43.20 attn_vo:H=0.8302,top10E=0.06,eRank=392.9,q75/q25=inf mlp_w1:H=0.9219,top10E=0.12,eRank=460.6,q75/q25=4.08 mlp_w2:H=0.9687,top10E=0.05,eRank=624.2,q75/q25=2.85 vo_prod:H=0.6860,top10E=0.10,eRank=215.9,q75/q25=inf train_time:593882ms step_avg:89.98ms +[2025-08-22 23:11:02] [Rank 0] PRINT: step:6600/10000 val_loss:3.7339 svd_entropy: attn_qk:H=0.7958,top10E=0.23,eRank=220.1,q75/q25=43.20 attn_vo:H=0.8302,top10E=0.06,eRank=392.9,q75/q25=inf mlp_w1:H=0.9219,top10E=0.12,eRank=460.6,q75/q25=4.08 mlp_w2:H=0.9687,top10E=0.05,eRank=624.2,q75/q25=2.85 vo_prod:H=0.6860,top10E=0.10,eRank=215.9,q75/q25=inf train_time:593882ms step_avg:89.98ms +[2025-08-22 23:11:02] [Rank 0] step:6601/10000 train_time:593900ms step_avg:89.97ms +[2025-08-22 23:11:02] [Rank 0] step:6601/10000 train_time:593900ms step_avg:89.97ms +[2025-08-22 23:11:04] [Rank 0] step:6621/10000 train_time:595764ms step_avg:89.98ms +[2025-08-22 23:11:04] [Rank 0] step:6621/10000 train_time:595764ms step_avg:89.98ms +[2025-08-22 23:11:06] [Rank 0] step:6641/10000 train_time:597641ms step_avg:89.99ms +[2025-08-22 23:11:06] [Rank 0] step:6641/10000 train_time:597641ms step_avg:89.99ms +[2025-08-22 23:11:08] [Rank 0] step:6661/10000 train_time:599510ms step_avg:90.00ms +[2025-08-22 23:11:08] [Rank 0] step:6661/10000 train_time:599510ms step_avg:90.00ms +[2025-08-22 23:11:10] [Rank 0] step:6681/10000 train_time:601396ms step_avg:90.02ms +[2025-08-22 23:11:10] [Rank 0] step:6681/10000 train_time:601396ms step_avg:90.02ms +[2025-08-22 23:11:12] [Rank 0] step:6701/10000 train_time:603304ms step_avg:90.03ms +[2025-08-22 23:11:12] [Rank 0] step:6701/10000 train_time:603304ms step_avg:90.03ms +[2025-08-22 23:11:14] [Rank 0] step:6721/10000 train_time:605204ms step_avg:90.05ms +[2025-08-22 23:11:14] [Rank 0] step:6721/10000 train_time:605204ms step_avg:90.05ms +[2025-08-22 23:11:16] [Rank 0] step:6741/10000 train_time:607175ms step_avg:90.07ms +[2025-08-22 23:11:16] [Rank 0] step:6741/10000 train_time:607175ms step_avg:90.07ms +[2025-08-22 23:11:18] [Rank 0] step:6761/10000 train_time:609131ms step_avg:90.09ms +[2025-08-22 23:11:18] [Rank 0] step:6761/10000 train_time:609131ms step_avg:90.09ms +[2025-08-22 23:11:19] [Rank 0] step:6781/10000 train_time:611033ms step_avg:90.11ms +[2025-08-22 23:11:19] [Rank 0] step:6781/10000 train_time:611033ms step_avg:90.11ms +[2025-08-22 23:11:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:11:21] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:11:35] [Rank 0] PRINT: step:6800/10000 val_loss:3.7186 svd_entropy: attn_qk:H=0.7962,top10E=0.23,eRank=220.5,q75/q25=43.13 attn_vo:H=0.8302,top10E=0.06,eRank=393.1,q75/q25=inf mlp_w1:H=0.9224,top10E=0.12,eRank=462.0,q75/q25=4.07 mlp_w2:H=0.9687,top10E=0.05,eRank=624.0,q75/q25=2.85 vo_prod:H=0.6862,top10E=0.10,eRank=216.2,q75/q25=inf train_time:612950ms step_avg:90.14ms +[2025-08-22 23:11:35] [Rank 0] PRINT: step:6800/10000 val_loss:3.7186 svd_entropy: attn_qk:H=0.7962,top10E=0.23,eRank=220.5,q75/q25=43.13 attn_vo:H=0.8302,top10E=0.06,eRank=393.1,q75/q25=inf mlp_w1:H=0.9224,top10E=0.12,eRank=462.0,q75/q25=4.07 mlp_w2:H=0.9687,top10E=0.05,eRank=624.0,q75/q25=2.85 vo_prod:H=0.6862,top10E=0.10,eRank=216.2,q75/q25=inf train_time:612950ms step_avg:90.14ms +[2025-08-22 23:11:35] [Rank 0] step:6801/10000 train_time:612968ms step_avg:90.13ms +[2025-08-22 23:11:35] [Rank 0] step:6801/10000 train_time:612968ms step_avg:90.13ms +[2025-08-22 23:11:37] [Rank 0] step:6821/10000 train_time:614846ms step_avg:90.14ms +[2025-08-22 23:11:37] [Rank 0] step:6821/10000 train_time:614846ms step_avg:90.14ms +[2025-08-22 23:11:39] [Rank 0] step:6841/10000 train_time:616745ms step_avg:90.15ms +[2025-08-22 23:11:39] [Rank 0] step:6841/10000 train_time:616745ms step_avg:90.15ms +[2025-08-22 23:11:41] [Rank 0] step:6861/10000 train_time:618638ms step_avg:90.17ms +[2025-08-22 23:11:41] [Rank 0] step:6861/10000 train_time:618638ms step_avg:90.17ms +[2025-08-22 23:11:43] [Rank 0] step:6881/10000 train_time:620539ms step_avg:90.18ms +[2025-08-22 23:11:43] [Rank 0] step:6881/10000 train_time:620539ms step_avg:90.18ms +[2025-08-22 23:11:45] [Rank 0] step:6901/10000 train_time:622434ms step_avg:90.19ms +[2025-08-22 23:11:45] [Rank 0] step:6901/10000 train_time:622434ms step_avg:90.19ms +[2025-08-22 23:11:46] [Rank 0] step:6921/10000 train_time:624327ms step_avg:90.21ms +[2025-08-22 23:11:46] [Rank 0] step:6921/10000 train_time:624327ms step_avg:90.21ms +[2025-08-22 23:11:48] [Rank 0] step:6941/10000 train_time:626231ms step_avg:90.22ms +[2025-08-22 23:11:48] [Rank 0] step:6941/10000 train_time:626231ms step_avg:90.22ms +[2025-08-22 23:11:50] [Rank 0] step:6961/10000 train_time:628143ms step_avg:90.24ms +[2025-08-22 23:11:50] [Rank 0] step:6961/10000 train_time:628143ms step_avg:90.24ms +[2025-08-22 23:11:52] [Rank 0] step:6981/10000 train_time:630048ms step_avg:90.25ms +[2025-08-22 23:11:52] [Rank 0] step:6981/10000 train_time:630048ms step_avg:90.25ms +[2025-08-22 23:11:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:11:54] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:12:08] [Rank 0] PRINT: step:7000/10000 val_loss:3.7008 svd_entropy: attn_qk:H=0.7966,top10E=0.23,eRank=221.0,q75/q25=42.81 attn_vo:H=0.8303,top10E=0.06,eRank=393.3,q75/q25=inf mlp_w1:H=0.9228,top10E=0.12,eRank=463.3,q75/q25=4.05 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.86 vo_prod:H=0.6865,top10E=0.10,eRank=216.7,q75/q25=inf train_time:631963ms step_avg:90.28ms +[2025-08-22 23:12:08] [Rank 0] PRINT: step:7000/10000 val_loss:3.7008 svd_entropy: attn_qk:H=0.7966,top10E=0.23,eRank=221.0,q75/q25=42.81 attn_vo:H=0.8303,top10E=0.06,eRank=393.3,q75/q25=inf mlp_w1:H=0.9228,top10E=0.12,eRank=463.3,q75/q25=4.05 mlp_w2:H=0.9686,top10E=0.05,eRank=623.8,q75/q25=2.86 vo_prod:H=0.6865,top10E=0.10,eRank=216.7,q75/q25=inf train_time:631963ms step_avg:90.28ms +[2025-08-22 23:12:08] [Rank 0] step:7001/10000 train_time:631982ms step_avg:90.27ms +[2025-08-22 23:12:08] [Rank 0] step:7001/10000 train_time:631982ms step_avg:90.27ms +[2025-08-22 23:12:10] [Rank 0] step:7021/10000 train_time:633862ms step_avg:90.28ms +[2025-08-22 23:12:10] [Rank 0] step:7021/10000 train_time:633862ms step_avg:90.28ms +[2025-08-22 23:12:12] [Rank 0] step:7041/10000 train_time:635756ms step_avg:90.29ms +[2025-08-22 23:12:12] [Rank 0] step:7041/10000 train_time:635756ms step_avg:90.29ms +[2025-08-22 23:12:13] [Rank 0] step:7061/10000 train_time:637653ms step_avg:90.31ms +[2025-08-22 23:12:13] [Rank 0] step:7061/10000 train_time:637653ms step_avg:90.31ms +[2025-08-22 23:12:15] [Rank 0] step:7081/10000 train_time:639549ms step_avg:90.32ms +[2025-08-22 23:12:15] [Rank 0] step:7081/10000 train_time:639549ms step_avg:90.32ms +[2025-08-22 23:12:17] [Rank 0] step:7101/10000 train_time:641545ms step_avg:90.35ms +[2025-08-22 23:12:17] [Rank 0] step:7101/10000 train_time:641545ms step_avg:90.35ms +[2025-08-22 23:12:19] [Rank 0] step:7121/10000 train_time:643427ms step_avg:90.36ms +[2025-08-22 23:12:19] [Rank 0] step:7121/10000 train_time:643427ms step_avg:90.36ms +[2025-08-22 23:12:21] [Rank 0] step:7141/10000 train_time:645327ms step_avg:90.37ms +[2025-08-22 23:12:21] [Rank 0] step:7141/10000 train_time:645327ms step_avg:90.37ms +[2025-08-22 23:12:23] [Rank 0] step:7161/10000 train_time:647231ms step_avg:90.38ms +[2025-08-22 23:12:23] [Rank 0] step:7161/10000 train_time:647231ms step_avg:90.38ms +[2025-08-22 23:12:25] [Rank 0] step:7181/10000 train_time:649135ms step_avg:90.40ms +[2025-08-22 23:12:25] [Rank 0] step:7181/10000 train_time:649135ms step_avg:90.40ms +[2025-08-22 23:12:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:12:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:12:40] [Rank 0] PRINT: step:7200/10000 val_loss:3.6903 svd_entropy: attn_qk:H=0.7970,top10E=0.23,eRank=221.4,q75/q25=42.48 attn_vo:H=0.8304,top10E=0.06,eRank=393.5,q75/q25=inf mlp_w1:H=0.9232,top10E=0.12,eRank=464.5,q75/q25=4.04 mlp_w2:H=0.9686,top10E=0.05,eRank=623.6,q75/q25=2.86 vo_prod:H=0.6868,top10E=0.10,eRank=217.2,q75/q25=inf train_time:651053ms step_avg:90.42ms +[2025-08-22 23:12:40] [Rank 0] PRINT: step:7200/10000 val_loss:3.6903 svd_entropy: attn_qk:H=0.7970,top10E=0.23,eRank=221.4,q75/q25=42.48 attn_vo:H=0.8304,top10E=0.06,eRank=393.5,q75/q25=inf mlp_w1:H=0.9232,top10E=0.12,eRank=464.5,q75/q25=4.04 mlp_w2:H=0.9686,top10E=0.05,eRank=623.6,q75/q25=2.86 vo_prod:H=0.6868,top10E=0.10,eRank=217.2,q75/q25=inf train_time:651053ms step_avg:90.42ms +[2025-08-22 23:12:41] [Rank 0] step:7201/10000 train_time:651072ms step_avg:90.41ms +[2025-08-22 23:12:41] [Rank 0] step:7201/10000 train_time:651072ms step_avg:90.41ms +[2025-08-22 23:12:42] [Rank 0] step:7221/10000 train_time:652973ms step_avg:90.43ms +[2025-08-22 23:12:42] [Rank 0] step:7221/10000 train_time:652973ms step_avg:90.43ms +[2025-08-22 23:12:44] [Rank 0] step:7241/10000 train_time:654873ms step_avg:90.44ms +[2025-08-22 23:12:44] [Rank 0] step:7241/10000 train_time:654873ms step_avg:90.44ms +[2025-08-22 23:12:46] [Rank 0] step:7261/10000 train_time:656771ms step_avg:90.45ms +[2025-08-22 23:12:46] [Rank 0] step:7261/10000 train_time:656771ms step_avg:90.45ms +[2025-08-22 23:12:48] [Rank 0] step:7281/10000 train_time:658684ms step_avg:90.47ms +[2025-08-22 23:12:48] [Rank 0] step:7281/10000 train_time:658684ms step_avg:90.47ms +[2025-08-22 23:12:50] [Rank 0] step:7301/10000 train_time:660588ms step_avg:90.48ms +[2025-08-22 23:12:50] [Rank 0] step:7301/10000 train_time:660588ms step_avg:90.48ms +[2025-08-22 23:12:52] [Rank 0] step:7321/10000 train_time:662504ms step_avg:90.49ms +[2025-08-22 23:12:52] [Rank 0] step:7321/10000 train_time:662504ms step_avg:90.49ms +[2025-08-22 23:12:54] [Rank 0] step:7341/10000 train_time:664408ms step_avg:90.51ms +[2025-08-22 23:12:54] [Rank 0] step:7341/10000 train_time:664408ms step_avg:90.51ms +[2025-08-22 23:12:56] [Rank 0] step:7361/10000 train_time:666324ms step_avg:90.52ms +[2025-08-22 23:12:56] [Rank 0] step:7361/10000 train_time:666324ms step_avg:90.52ms +[2025-08-22 23:12:58] [Rank 0] step:7381/10000 train_time:668235ms step_avg:90.53ms +[2025-08-22 23:12:58] [Rank 0] step:7381/10000 train_time:668235ms step_avg:90.53ms +[2025-08-22 23:13:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:13:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:13:13] [Rank 0] PRINT: step:7400/10000 val_loss:3.6707 svd_entropy: attn_qk:H=0.7974,top10E=0.23,eRank=221.9,q75/q25=42.36 attn_vo:H=0.8304,top10E=0.06,eRank=393.7,q75/q25=inf mlp_w1:H=0.9236,top10E=0.11,eRank=465.5,q75/q25=4.03 mlp_w2:H=0.9685,top10E=0.05,eRank=623.4,q75/q25=2.86 vo_prod:H=0.6870,top10E=0.10,eRank=217.6,q75/q25=inf train_time:670139ms step_avg:90.56ms +[2025-08-22 23:13:13] [Rank 0] PRINT: step:7400/10000 val_loss:3.6707 svd_entropy: attn_qk:H=0.7974,top10E=0.23,eRank=221.9,q75/q25=42.36 attn_vo:H=0.8304,top10E=0.06,eRank=393.7,q75/q25=inf mlp_w1:H=0.9236,top10E=0.11,eRank=465.5,q75/q25=4.03 mlp_w2:H=0.9685,top10E=0.05,eRank=623.4,q75/q25=2.86 vo_prod:H=0.6870,top10E=0.10,eRank=217.6,q75/q25=inf train_time:670139ms step_avg:90.56ms +[2025-08-22 23:13:13] [Rank 0] step:7401/10000 train_time:670158ms step_avg:90.55ms +[2025-08-22 23:13:13] [Rank 0] step:7401/10000 train_time:670158ms step_avg:90.55ms +[2025-08-22 23:13:15] [Rank 0] step:7421/10000 train_time:672062ms step_avg:90.56ms +[2025-08-22 23:13:15] [Rank 0] step:7421/10000 train_time:672062ms step_avg:90.56ms +[2025-08-22 23:13:17] [Rank 0] step:7441/10000 train_time:673959ms step_avg:90.57ms +[2025-08-22 23:13:17] [Rank 0] step:7441/10000 train_time:673959ms step_avg:90.57ms +[2025-08-22 23:13:19] [Rank 0] step:7461/10000 train_time:675860ms step_avg:90.59ms +[2025-08-22 23:13:19] [Rank 0] step:7461/10000 train_time:675860ms step_avg:90.59ms +[2025-08-22 23:13:21] [Rank 0] step:7481/10000 train_time:677841ms step_avg:90.61ms +[2025-08-22 23:13:21] [Rank 0] step:7481/10000 train_time:677841ms step_avg:90.61ms +[2025-08-22 23:13:23] [Rank 0] step:7501/10000 train_time:679774ms step_avg:90.62ms +[2025-08-22 23:13:23] [Rank 0] step:7501/10000 train_time:679774ms step_avg:90.62ms +[2025-08-22 23:13:25] [Rank 0] step:7521/10000 train_time:681683ms step_avg:90.64ms +[2025-08-22 23:13:25] [Rank 0] step:7521/10000 train_time:681683ms step_avg:90.64ms +[2025-08-22 23:13:27] [Rank 0] step:7541/10000 train_time:683599ms step_avg:90.65ms +[2025-08-22 23:13:27] [Rank 0] step:7541/10000 train_time:683599ms step_avg:90.65ms +[2025-08-22 23:13:29] [Rank 0] step:7561/10000 train_time:685495ms step_avg:90.66ms +[2025-08-22 23:13:29] [Rank 0] step:7561/10000 train_time:685495ms step_avg:90.66ms +[2025-08-22 23:13:31] [Rank 0] step:7581/10000 train_time:687410ms step_avg:90.68ms +[2025-08-22 23:13:31] [Rank 0] step:7581/10000 train_time:687410ms step_avg:90.68ms +[2025-08-22 23:13:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:13:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:13:46] [Rank 0] PRINT: step:7600/10000 val_loss:3.6606 svd_entropy: attn_qk:H=0.7978,top10E=0.23,eRank=222.3,q75/q25=42.17 attn_vo:H=0.8305,top10E=0.06,eRank=393.9,q75/q25=inf mlp_w1:H=0.9239,top10E=0.11,eRank=466.5,q75/q25=4.02 mlp_w2:H=0.9685,top10E=0.05,eRank=623.3,q75/q25=2.87 vo_prod:H=0.6873,top10E=0.10,eRank=218.0,q75/q25=inf train_time:689336ms step_avg:90.70ms +[2025-08-22 23:13:46] [Rank 0] PRINT: step:7600/10000 val_loss:3.6606 svd_entropy: attn_qk:H=0.7978,top10E=0.23,eRank=222.3,q75/q25=42.17 attn_vo:H=0.8305,top10E=0.06,eRank=393.9,q75/q25=inf mlp_w1:H=0.9239,top10E=0.11,eRank=466.5,q75/q25=4.02 mlp_w2:H=0.9685,top10E=0.05,eRank=623.3,q75/q25=2.87 vo_prod:H=0.6873,top10E=0.10,eRank=218.0,q75/q25=inf train_time:689336ms step_avg:90.70ms +[2025-08-22 23:13:46] [Rank 0] step:7601/10000 train_time:689354ms step_avg:90.69ms +[2025-08-22 23:13:46] [Rank 0] step:7601/10000 train_time:689354ms step_avg:90.69ms +[2025-08-22 23:13:48] [Rank 0] step:7621/10000 train_time:691253ms step_avg:90.70ms +[2025-08-22 23:13:48] [Rank 0] step:7621/10000 train_time:691253ms step_avg:90.70ms +[2025-08-22 23:13:50] [Rank 0] step:7641/10000 train_time:693153ms step_avg:90.71ms +[2025-08-22 23:13:50] [Rank 0] step:7641/10000 train_time:693153ms step_avg:90.71ms +[2025-08-22 23:13:52] [Rank 0] step:7661/10000 train_time:695058ms step_avg:90.73ms +[2025-08-22 23:13:52] [Rank 0] step:7661/10000 train_time:695058ms step_avg:90.73ms +[2025-08-22 23:13:54] [Rank 0] step:7681/10000 train_time:696957ms step_avg:90.74ms +[2025-08-22 23:13:54] [Rank 0] step:7681/10000 train_time:696957ms step_avg:90.74ms +[2025-08-22 23:13:56] [Rank 0] step:7701/10000 train_time:698858ms step_avg:90.75ms +[2025-08-22 23:13:56] [Rank 0] step:7701/10000 train_time:698858ms step_avg:90.75ms +[2025-08-22 23:13:58] [Rank 0] step:7721/10000 train_time:700773ms step_avg:90.76ms +[2025-08-22 23:13:58] [Rank 0] step:7721/10000 train_time:700773ms step_avg:90.76ms +[2025-08-22 23:14:00] [Rank 0] step:7741/10000 train_time:702677ms step_avg:90.77ms +[2025-08-22 23:14:00] [Rank 0] step:7741/10000 train_time:702677ms step_avg:90.77ms +[2025-08-22 23:14:02] [Rank 0] step:7761/10000 train_time:704591ms step_avg:90.79ms +[2025-08-22 23:14:02] [Rank 0] step:7761/10000 train_time:704591ms step_avg:90.79ms +[2025-08-22 23:14:04] [Rank 0] step:7781/10000 train_time:706496ms step_avg:90.80ms +[2025-08-22 23:14:04] [Rank 0] step:7781/10000 train_time:706496ms step_avg:90.80ms +[2025-08-22 23:14:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:14:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:14:19] [Rank 0] PRINT: step:7800/10000 val_loss:3.6472 svd_entropy: attn_qk:H=0.7981,top10E=0.23,eRank=222.6,q75/q25=42.05 attn_vo:H=0.8306,top10E=0.06,eRank=394.0,q75/q25=inf mlp_w1:H=0.9242,top10E=0.11,eRank=467.5,q75/q25=4.01 mlp_w2:H=0.9685,top10E=0.05,eRank=623.2,q75/q25=2.86 vo_prod:H=0.6875,top10E=0.10,eRank=218.3,q75/q25=inf train_time:708423ms step_avg:90.82ms +[2025-08-22 23:14:19] [Rank 0] PRINT: step:7800/10000 val_loss:3.6472 svd_entropy: attn_qk:H=0.7981,top10E=0.23,eRank=222.6,q75/q25=42.05 attn_vo:H=0.8306,top10E=0.06,eRank=394.0,q75/q25=inf mlp_w1:H=0.9242,top10E=0.11,eRank=467.5,q75/q25=4.01 mlp_w2:H=0.9685,top10E=0.05,eRank=623.2,q75/q25=2.86 vo_prod:H=0.6875,top10E=0.10,eRank=218.3,q75/q25=inf train_time:708423ms step_avg:90.82ms +[2025-08-22 23:14:19] [Rank 0] step:7801/10000 train_time:708442ms step_avg:90.81ms +[2025-08-22 23:14:19] [Rank 0] step:7801/10000 train_time:708442ms step_avg:90.81ms +[2025-08-22 23:14:21] [Rank 0] step:7821/10000 train_time:710344ms step_avg:90.83ms +[2025-08-22 23:14:21] [Rank 0] step:7821/10000 train_time:710344ms step_avg:90.83ms +[2025-08-22 23:14:23] [Rank 0] step:7841/10000 train_time:712239ms step_avg:90.84ms +[2025-08-22 23:14:23] [Rank 0] step:7841/10000 train_time:712239ms step_avg:90.84ms +[2025-08-22 23:14:25] [Rank 0] step:7861/10000 train_time:714203ms step_avg:90.85ms +[2025-08-22 23:14:25] [Rank 0] step:7861/10000 train_time:714203ms step_avg:90.85ms +[2025-08-22 23:14:27] [Rank 0] step:7881/10000 train_time:716114ms step_avg:90.87ms +[2025-08-22 23:14:27] [Rank 0] step:7881/10000 train_time:716114ms step_avg:90.87ms +[2025-08-22 23:14:29] [Rank 0] step:7901/10000 train_time:718014ms step_avg:90.88ms +[2025-08-22 23:14:29] [Rank 0] step:7901/10000 train_time:718014ms step_avg:90.88ms +[2025-08-22 23:14:31] [Rank 0] step:7921/10000 train_time:719923ms step_avg:90.89ms +[2025-08-22 23:14:31] [Rank 0] step:7921/10000 train_time:719923ms step_avg:90.89ms +[2025-08-22 23:14:33] [Rank 0] step:7941/10000 train_time:721837ms step_avg:90.90ms +[2025-08-22 23:14:33] [Rank 0] step:7941/10000 train_time:721837ms step_avg:90.90ms +[2025-08-22 23:14:34] [Rank 0] step:7961/10000 train_time:723747ms step_avg:90.91ms +[2025-08-22 23:14:34] [Rank 0] step:7961/10000 train_time:723747ms step_avg:90.91ms +[2025-08-22 23:14:36] [Rank 0] step:7981/10000 train_time:725646ms step_avg:90.92ms +[2025-08-22 23:14:36] [Rank 0] step:7981/10000 train_time:725646ms step_avg:90.92ms +[2025-08-22 23:14:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:14:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:14:52] [Rank 0] PRINT: step:8000/10000 val_loss:3.6293 svd_entropy: attn_qk:H=0.7984,top10E=0.23,eRank=223.0,q75/q25=41.95 attn_vo:H=0.8306,top10E=0.06,eRank=394.2,q75/q25=inf mlp_w1:H=0.9245,top10E=0.11,eRank=468.3,q75/q25=4.00 mlp_w2:H=0.9684,top10E=0.05,eRank=623.1,q75/q25=2.86 vo_prod:H=0.6877,top10E=0.10,eRank=218.7,q75/q25=inf train_time:727570ms step_avg:90.95ms +[2025-08-22 23:14:52] [Rank 0] PRINT: step:8000/10000 val_loss:3.6293 svd_entropy: attn_qk:H=0.7984,top10E=0.23,eRank=223.0,q75/q25=41.95 attn_vo:H=0.8306,top10E=0.06,eRank=394.2,q75/q25=inf mlp_w1:H=0.9245,top10E=0.11,eRank=468.3,q75/q25=4.00 mlp_w2:H=0.9684,top10E=0.05,eRank=623.1,q75/q25=2.86 vo_prod:H=0.6877,top10E=0.10,eRank=218.7,q75/q25=inf train_time:727570ms step_avg:90.95ms +[2025-08-22 23:14:52] [Rank 0] step:8001/10000 train_time:727589ms step_avg:90.94ms +[2025-08-22 23:14:52] [Rank 0] step:8001/10000 train_time:727589ms step_avg:90.94ms +[2025-08-22 23:14:54] [Rank 0] step:8021/10000 train_time:729480ms step_avg:90.95ms +[2025-08-22 23:14:54] [Rank 0] step:8021/10000 train_time:729480ms step_avg:90.95ms +[2025-08-22 23:14:56] [Rank 0] step:8041/10000 train_time:731394ms step_avg:90.96ms +[2025-08-22 23:14:56] [Rank 0] step:8041/10000 train_time:731394ms step_avg:90.96ms +[2025-08-22 23:14:58] [Rank 0] step:8061/10000 train_time:733302ms step_avg:90.97ms +[2025-08-22 23:14:58] [Rank 0] step:8061/10000 train_time:733302ms step_avg:90.97ms +[2025-08-22 23:15:00] [Rank 0] step:8081/10000 train_time:735202ms step_avg:90.98ms +[2025-08-22 23:15:00] [Rank 0] step:8081/10000 train_time:735202ms step_avg:90.98ms +[2025-08-22 23:15:02] [Rank 0] step:8101/10000 train_time:737118ms step_avg:90.99ms +[2025-08-22 23:15:02] [Rank 0] step:8101/10000 train_time:737118ms step_avg:90.99ms +[2025-08-22 23:15:03] [Rank 0] step:8121/10000 train_time:739024ms step_avg:91.00ms +[2025-08-22 23:15:03] [Rank 0] step:8121/10000 train_time:739024ms step_avg:91.00ms +[2025-08-22 23:15:05] [Rank 0] step:8141/10000 train_time:740965ms step_avg:91.02ms +[2025-08-22 23:15:05] [Rank 0] step:8141/10000 train_time:740965ms step_avg:91.02ms +[2025-08-22 23:15:07] [Rank 0] step:8161/10000 train_time:742891ms step_avg:91.03ms +[2025-08-22 23:15:07] [Rank 0] step:8161/10000 train_time:742891ms step_avg:91.03ms +[2025-08-22 23:15:09] [Rank 0] step:8181/10000 train_time:744827ms step_avg:91.04ms +[2025-08-22 23:15:09] [Rank 0] step:8181/10000 train_time:744827ms step_avg:91.04ms +[2025-08-22 23:15:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:15:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:15:25] [Rank 0] PRINT: step:8200/10000 val_loss:3.6189 svd_entropy: attn_qk:H=0.7986,top10E=0.23,eRank=223.2,q75/q25=41.80 attn_vo:H=0.8306,top10E=0.06,eRank=394.3,q75/q25=inf mlp_w1:H=0.9247,top10E=0.11,eRank=469.0,q75/q25=4.00 mlp_w2:H=0.9684,top10E=0.05,eRank=623.0,q75/q25=2.87 vo_prod:H=0.6878,top10E=0.10,eRank=219.0,q75/q25=inf train_time:746796ms step_avg:91.07ms +[2025-08-22 23:15:25] [Rank 0] PRINT: step:8200/10000 val_loss:3.6189 svd_entropy: attn_qk:H=0.7986,top10E=0.23,eRank=223.2,q75/q25=41.80 attn_vo:H=0.8306,top10E=0.06,eRank=394.3,q75/q25=inf mlp_w1:H=0.9247,top10E=0.11,eRank=469.0,q75/q25=4.00 mlp_w2:H=0.9684,top10E=0.05,eRank=623.0,q75/q25=2.87 vo_prod:H=0.6878,top10E=0.10,eRank=219.0,q75/q25=inf train_time:746796ms step_avg:91.07ms +[2025-08-22 23:15:25] [Rank 0] step:8201/10000 train_time:746815ms step_avg:91.06ms +[2025-08-22 23:15:25] [Rank 0] step:8201/10000 train_time:746815ms step_avg:91.06ms +[2025-08-22 23:15:27] [Rank 0] step:8221/10000 train_time:748795ms step_avg:91.08ms +[2025-08-22 23:15:27] [Rank 0] step:8221/10000 train_time:748795ms step_avg:91.08ms +[2025-08-22 23:15:29] [Rank 0] step:8241/10000 train_time:750778ms step_avg:91.10ms +[2025-08-22 23:15:29] [Rank 0] step:8241/10000 train_time:750778ms step_avg:91.10ms +[2025-08-22 23:15:31] [Rank 0] step:8261/10000 train_time:752716ms step_avg:91.12ms +[2025-08-22 23:15:31] [Rank 0] step:8261/10000 train_time:752716ms step_avg:91.12ms +[2025-08-22 23:15:33] [Rank 0] step:8281/10000 train_time:754643ms step_avg:91.13ms +[2025-08-22 23:15:33] [Rank 0] step:8281/10000 train_time:754643ms step_avg:91.13ms +[2025-08-22 23:15:35] [Rank 0] step:8301/10000 train_time:756574ms step_avg:91.14ms +[2025-08-22 23:15:35] [Rank 0] step:8301/10000 train_time:756574ms step_avg:91.14ms +[2025-08-22 23:15:37] [Rank 0] step:8321/10000 train_time:758498ms step_avg:91.15ms +[2025-08-22 23:15:37] [Rank 0] step:8321/10000 train_time:758498ms step_avg:91.15ms +[2025-08-22 23:15:39] [Rank 0] step:8341/10000 train_time:760435ms step_avg:91.17ms +[2025-08-22 23:15:39] [Rank 0] step:8341/10000 train_time:760435ms step_avg:91.17ms +[2025-08-22 23:15:41] [Rank 0] step:8361/10000 train_time:762370ms step_avg:91.18ms +[2025-08-22 23:15:41] [Rank 0] step:8361/10000 train_time:762370ms step_avg:91.18ms +[2025-08-22 23:15:43] [Rank 0] step:8381/10000 train_time:764298ms step_avg:91.19ms +[2025-08-22 23:15:43] [Rank 0] step:8381/10000 train_time:764298ms step_avg:91.19ms +[2025-08-22 23:15:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:15:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:15:59] [Rank 0] PRINT: step:8400/10000 val_loss:3.6043 svd_entropy: attn_qk:H=0.7988,top10E=0.23,eRank=223.4,q75/q25=41.60 attn_vo:H=0.8307,top10E=0.06,eRank=394.4,q75/q25=inf mlp_w1:H=0.9250,top10E=0.11,eRank=469.8,q75/q25=3.99 mlp_w2:H=0.9684,top10E=0.05,eRank=622.9,q75/q25=2.87 vo_prod:H=0.6880,top10E=0.10,eRank=219.2,q75/q25=inf train_time:766245ms step_avg:91.22ms +[2025-08-22 23:15:59] [Rank 0] PRINT: step:8400/10000 val_loss:3.6043 svd_entropy: attn_qk:H=0.7988,top10E=0.23,eRank=223.4,q75/q25=41.60 attn_vo:H=0.8307,top10E=0.06,eRank=394.4,q75/q25=inf mlp_w1:H=0.9250,top10E=0.11,eRank=469.8,q75/q25=3.99 mlp_w2:H=0.9684,top10E=0.05,eRank=622.9,q75/q25=2.87 vo_prod:H=0.6880,top10E=0.10,eRank=219.2,q75/q25=inf train_time:766245ms step_avg:91.22ms +[2025-08-22 23:15:59] [Rank 0] step:8401/10000 train_time:766263ms step_avg:91.21ms +[2025-08-22 23:15:59] [Rank 0] step:8401/10000 train_time:766263ms step_avg:91.21ms +[2025-08-22 23:16:01] [Rank 0] step:8421/10000 train_time:768176ms step_avg:91.22ms +[2025-08-22 23:16:01] [Rank 0] step:8421/10000 train_time:768176ms step_avg:91.22ms +[2025-08-22 23:16:03] [Rank 0] step:8441/10000 train_time:770105ms step_avg:91.23ms +[2025-08-22 23:16:03] [Rank 0] step:8441/10000 train_time:770105ms step_avg:91.23ms +[2025-08-22 23:16:05] [Rank 0] step:8461/10000 train_time:772028ms step_avg:91.25ms +[2025-08-22 23:16:05] [Rank 0] step:8461/10000 train_time:772028ms step_avg:91.25ms +[2025-08-22 23:16:06] [Rank 0] step:8481/10000 train_time:773962ms step_avg:91.26ms +[2025-08-22 23:16:06] [Rank 0] step:8481/10000 train_time:773962ms step_avg:91.26ms +[2025-08-22 23:16:08] [Rank 0] step:8501/10000 train_time:775916ms step_avg:91.27ms +[2025-08-22 23:16:08] [Rank 0] step:8501/10000 train_time:775916ms step_avg:91.27ms +[2025-08-22 23:16:10] [Rank 0] step:8521/10000 train_time:777849ms step_avg:91.29ms +[2025-08-22 23:16:10] [Rank 0] step:8521/10000 train_time:777849ms step_avg:91.29ms +[2025-08-22 23:16:12] [Rank 0] step:8541/10000 train_time:779793ms step_avg:91.30ms +[2025-08-22 23:16:12] [Rank 0] step:8541/10000 train_time:779793ms step_avg:91.30ms +[2025-08-22 23:16:14] [Rank 0] step:8561/10000 train_time:781735ms step_avg:91.31ms +[2025-08-22 23:16:14] [Rank 0] step:8561/10000 train_time:781735ms step_avg:91.31ms +[2025-08-22 23:16:16] [Rank 0] step:8581/10000 train_time:783670ms step_avg:91.33ms +[2025-08-22 23:16:16] [Rank 0] step:8581/10000 train_time:783670ms step_avg:91.33ms +[2025-08-22 23:16:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:16:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:16:32] [Rank 0] PRINT: step:8600/10000 val_loss:3.5926 svd_entropy: attn_qk:H=0.7990,top10E=0.23,eRank=223.7,q75/q25=41.53 attn_vo:H=0.8307,top10E=0.06,eRank=394.5,q75/q25=inf mlp_w1:H=0.9252,top10E=0.11,eRank=470.4,q75/q25=3.98 mlp_w2:H=0.9684,top10E=0.05,eRank=622.8,q75/q25=2.87 vo_prod:H=0.6881,top10E=0.10,eRank=219.5,q75/q25=inf train_time:785611ms step_avg:91.35ms +[2025-08-22 23:16:32] [Rank 0] PRINT: step:8600/10000 val_loss:3.5926 svd_entropy: attn_qk:H=0.7990,top10E=0.23,eRank=223.7,q75/q25=41.53 attn_vo:H=0.8307,top10E=0.06,eRank=394.5,q75/q25=inf mlp_w1:H=0.9252,top10E=0.11,eRank=470.4,q75/q25=3.98 mlp_w2:H=0.9684,top10E=0.05,eRank=622.8,q75/q25=2.87 vo_prod:H=0.6881,top10E=0.10,eRank=219.5,q75/q25=inf train_time:785611ms step_avg:91.35ms +[2025-08-22 23:16:32] [Rank 0] step:8601/10000 train_time:785630ms step_avg:91.34ms +[2025-08-22 23:16:32] [Rank 0] step:8601/10000 train_time:785630ms step_avg:91.34ms +[2025-08-22 23:16:34] [Rank 0] step:8621/10000 train_time:787565ms step_avg:91.35ms +[2025-08-22 23:16:34] [Rank 0] step:8621/10000 train_time:787565ms step_avg:91.35ms +[2025-08-22 23:16:36] [Rank 0] step:8641/10000 train_time:789496ms step_avg:91.37ms +[2025-08-22 23:16:36] [Rank 0] step:8641/10000 train_time:789496ms step_avg:91.37ms +[2025-08-22 23:16:38] [Rank 0] step:8661/10000 train_time:791427ms step_avg:91.38ms +[2025-08-22 23:16:38] [Rank 0] step:8661/10000 train_time:791427ms step_avg:91.38ms +[2025-08-22 23:16:40] [Rank 0] step:8681/10000 train_time:793361ms step_avg:91.39ms +[2025-08-22 23:16:40] [Rank 0] step:8681/10000 train_time:793361ms step_avg:91.39ms +[2025-08-22 23:16:42] [Rank 0] step:8701/10000 train_time:795286ms step_avg:91.40ms +[2025-08-22 23:16:42] [Rank 0] step:8701/10000 train_time:795286ms step_avg:91.40ms +[2025-08-22 23:16:44] [Rank 0] step:8721/10000 train_time:797224ms step_avg:91.41ms +[2025-08-22 23:16:44] [Rank 0] step:8721/10000 train_time:797224ms step_avg:91.41ms +[2025-08-22 23:16:46] [Rank 0] step:8741/10000 train_time:799152ms step_avg:91.43ms +[2025-08-22 23:16:46] [Rank 0] step:8741/10000 train_time:799152ms step_avg:91.43ms +[2025-08-22 23:16:48] [Rank 0] step:8761/10000 train_time:801087ms step_avg:91.44ms +[2025-08-22 23:16:48] [Rank 0] step:8761/10000 train_time:801087ms step_avg:91.44ms +[2025-08-22 23:16:50] [Rank 0] step:8781/10000 train_time:803023ms step_avg:91.45ms +[2025-08-22 23:16:50] [Rank 0] step:8781/10000 train_time:803023ms step_avg:91.45ms +[2025-08-22 23:16:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:16:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:17:05] [Rank 0] PRINT: step:8800/10000 val_loss:3.5819 svd_entropy: attn_qk:H=0.7992,top10E=0.23,eRank=223.9,q75/q25=41.36 attn_vo:H=0.8307,top10E=0.06,eRank=394.6,q75/q25=inf mlp_w1:H=0.9254,top10E=0.11,eRank=471.0,q75/q25=3.98 mlp_w2:H=0.9683,top10E=0.05,eRank=622.7,q75/q25=2.87 vo_prod:H=0.6883,top10E=0.10,eRank=219.7,q75/q25=inf train_time:804974ms step_avg:91.47ms +[2025-08-22 23:17:05] [Rank 0] PRINT: step:8800/10000 val_loss:3.5819 svd_entropy: attn_qk:H=0.7992,top10E=0.23,eRank=223.9,q75/q25=41.36 attn_vo:H=0.8307,top10E=0.06,eRank=394.6,q75/q25=inf mlp_w1:H=0.9254,top10E=0.11,eRank=471.0,q75/q25=3.98 mlp_w2:H=0.9683,top10E=0.05,eRank=622.7,q75/q25=2.87 vo_prod:H=0.6883,top10E=0.10,eRank=219.7,q75/q25=inf train_time:804974ms step_avg:91.47ms +[2025-08-22 23:17:06] [Rank 0] step:8801/10000 train_time:804992ms step_avg:91.47ms +[2025-08-22 23:17:06] [Rank 0] step:8801/10000 train_time:804992ms step_avg:91.47ms +[2025-08-22 23:17:07] [Rank 0] step:8821/10000 train_time:806905ms step_avg:91.48ms +[2025-08-22 23:17:07] [Rank 0] step:8821/10000 train_time:806905ms step_avg:91.48ms +[2025-08-22 23:17:09] [Rank 0] step:8841/10000 train_time:808855ms step_avg:91.49ms +[2025-08-22 23:17:09] [Rank 0] step:8841/10000 train_time:808855ms step_avg:91.49ms +[2025-08-22 23:17:11] [Rank 0] step:8861/10000 train_time:810783ms step_avg:91.50ms +[2025-08-22 23:17:11] [Rank 0] step:8861/10000 train_time:810783ms step_avg:91.50ms +[2025-08-22 23:17:13] [Rank 0] step:8881/10000 train_time:812717ms step_avg:91.51ms +[2025-08-22 23:17:13] [Rank 0] step:8881/10000 train_time:812717ms step_avg:91.51ms +[2025-08-22 23:17:15] [Rank 0] step:8901/10000 train_time:814653ms step_avg:91.52ms +[2025-08-22 23:17:15] [Rank 0] step:8901/10000 train_time:814653ms step_avg:91.52ms +[2025-08-22 23:17:17] [Rank 0] step:8921/10000 train_time:816604ms step_avg:91.54ms +[2025-08-22 23:17:17] [Rank 0] step:8921/10000 train_time:816604ms step_avg:91.54ms +[2025-08-22 23:17:19] [Rank 0] step:8941/10000 train_time:818545ms step_avg:91.55ms +[2025-08-22 23:17:19] [Rank 0] step:8941/10000 train_time:818545ms step_avg:91.55ms +[2025-08-22 23:17:21] [Rank 0] step:8961/10000 train_time:820481ms step_avg:91.56ms +[2025-08-22 23:17:21] [Rank 0] step:8961/10000 train_time:820481ms step_avg:91.56ms +[2025-08-22 23:17:23] [Rank 0] step:8981/10000 train_time:822418ms step_avg:91.57ms +[2025-08-22 23:17:23] [Rank 0] step:8981/10000 train_time:822418ms step_avg:91.57ms +[2025-08-22 23:17:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:17:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:17:39] [Rank 0] PRINT: step:9000/10000 val_loss:3.5700 svd_entropy: attn_qk:H=0.7994,top10E=0.23,eRank=224.0,q75/q25=41.27 attn_vo:H=0.8308,top10E=0.06,eRank=394.7,q75/q25=inf mlp_w1:H=0.9255,top10E=0.11,eRank=471.5,q75/q25=3.97 mlp_w2:H=0.9683,top10E=0.05,eRank=622.7,q75/q25=2.88 vo_prod:H=0.6884,top10E=0.10,eRank=219.9,q75/q25=inf train_time:824366ms step_avg:91.60ms +[2025-08-22 23:17:39] [Rank 0] PRINT: step:9000/10000 val_loss:3.5700 svd_entropy: attn_qk:H=0.7994,top10E=0.23,eRank=224.0,q75/q25=41.27 attn_vo:H=0.8308,top10E=0.06,eRank=394.7,q75/q25=inf mlp_w1:H=0.9255,top10E=0.11,eRank=471.5,q75/q25=3.97 mlp_w2:H=0.9683,top10E=0.05,eRank=622.7,q75/q25=2.88 vo_prod:H=0.6884,top10E=0.10,eRank=219.9,q75/q25=inf train_time:824366ms step_avg:91.60ms +[2025-08-22 23:17:39] [Rank 0] step:9001/10000 train_time:824384ms step_avg:91.59ms +[2025-08-22 23:17:39] [Rank 0] step:9001/10000 train_time:824384ms step_avg:91.59ms +[2025-08-22 23:17:41] [Rank 0] step:9021/10000 train_time:826306ms step_avg:91.60ms +[2025-08-22 23:17:41] [Rank 0] step:9021/10000 train_time:826306ms step_avg:91.60ms +[2025-08-22 23:17:43] [Rank 0] step:9041/10000 train_time:828238ms step_avg:91.61ms +[2025-08-22 23:17:43] [Rank 0] step:9041/10000 train_time:828238ms step_avg:91.61ms +[2025-08-22 23:17:45] [Rank 0] step:9061/10000 train_time:830176ms step_avg:91.62ms +[2025-08-22 23:17:45] [Rank 0] step:9061/10000 train_time:830176ms step_avg:91.62ms +[2025-08-22 23:17:47] [Rank 0] step:9081/10000 train_time:832115ms step_avg:91.63ms +[2025-08-22 23:17:47] [Rank 0] step:9081/10000 train_time:832115ms step_avg:91.63ms +[2025-08-22 23:17:49] [Rank 0] step:9101/10000 train_time:834062ms step_avg:91.65ms +[2025-08-22 23:17:49] [Rank 0] step:9101/10000 train_time:834062ms step_avg:91.65ms +[2025-08-22 23:17:51] [Rank 0] step:9121/10000 train_time:836000ms step_avg:91.66ms +[2025-08-22 23:17:51] [Rank 0] step:9121/10000 train_time:836000ms step_avg:91.66ms +[2025-08-22 23:17:53] [Rank 0] step:9141/10000 train_time:837923ms step_avg:91.67ms +[2025-08-22 23:17:53] [Rank 0] step:9141/10000 train_time:837923ms step_avg:91.67ms +[2025-08-22 23:17:54] [Rank 0] step:9161/10000 train_time:839850ms step_avg:91.68ms +[2025-08-22 23:17:54] [Rank 0] step:9161/10000 train_time:839850ms step_avg:91.68ms +[2025-08-22 23:17:56] [Rank 0] step:9181/10000 train_time:841816ms step_avg:91.69ms +[2025-08-22 23:17:56] [Rank 0] step:9181/10000 train_time:841816ms step_avg:91.69ms +[2025-08-22 23:17:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:17:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:18:12] [Rank 0] PRINT: step:9200/10000 val_loss:3.5604 svd_entropy: attn_qk:H=0.7995,top10E=0.23,eRank=224.2,q75/q25=41.21 attn_vo:H=0.8308,top10E=0.06,eRank=394.7,q75/q25=inf mlp_w1:H=0.9257,top10E=0.11,eRank=471.9,q75/q25=3.96 mlp_w2:H=0.9683,top10E=0.05,eRank=622.6,q75/q25=2.88 vo_prod:H=0.6885,top10E=0.10,eRank=220.1,q75/q25=inf train_time:843755ms step_avg:91.71ms +[2025-08-22 23:18:12] [Rank 0] PRINT: step:9200/10000 val_loss:3.5604 svd_entropy: attn_qk:H=0.7995,top10E=0.23,eRank=224.2,q75/q25=41.21 attn_vo:H=0.8308,top10E=0.06,eRank=394.7,q75/q25=inf mlp_w1:H=0.9257,top10E=0.11,eRank=471.9,q75/q25=3.96 mlp_w2:H=0.9683,top10E=0.05,eRank=622.6,q75/q25=2.88 vo_prod:H=0.6885,top10E=0.10,eRank=220.1,q75/q25=inf train_time:843755ms step_avg:91.71ms +[2025-08-22 23:18:12] [Rank 0] step:9201/10000 train_time:843774ms step_avg:91.70ms +[2025-08-22 23:18:12] [Rank 0] step:9201/10000 train_time:843774ms step_avg:91.70ms +[2025-08-22 23:18:14] [Rank 0] step:9221/10000 train_time:845716ms step_avg:91.72ms +[2025-08-22 23:18:14] [Rank 0] step:9221/10000 train_time:845716ms step_avg:91.72ms +[2025-08-22 23:18:16] [Rank 0] step:9241/10000 train_time:847656ms step_avg:91.73ms +[2025-08-22 23:18:16] [Rank 0] step:9241/10000 train_time:847656ms step_avg:91.73ms +[2025-08-22 23:18:18] [Rank 0] step:9261/10000 train_time:849595ms step_avg:91.74ms +[2025-08-22 23:18:18] [Rank 0] step:9261/10000 train_time:849595ms step_avg:91.74ms +[2025-08-22 23:18:20] [Rank 0] step:9281/10000 train_time:851518ms step_avg:91.75ms +[2025-08-22 23:18:20] [Rank 0] step:9281/10000 train_time:851518ms step_avg:91.75ms +[2025-08-22 23:18:22] [Rank 0] step:9301/10000 train_time:853445ms step_avg:91.76ms +[2025-08-22 23:18:22] [Rank 0] step:9301/10000 train_time:853445ms step_avg:91.76ms +[2025-08-22 23:18:24] [Rank 0] step:9321/10000 train_time:855383ms step_avg:91.77ms +[2025-08-22 23:18:24] [Rank 0] step:9321/10000 train_time:855383ms step_avg:91.77ms +[2025-08-22 23:18:26] [Rank 0] step:9341/10000 train_time:857319ms step_avg:91.78ms +[2025-08-22 23:18:26] [Rank 0] step:9341/10000 train_time:857319ms step_avg:91.78ms +[2025-08-22 23:18:28] [Rank 0] step:9361/10000 train_time:859262ms step_avg:91.79ms +[2025-08-22 23:18:28] [Rank 0] step:9361/10000 train_time:859262ms step_avg:91.79ms +[2025-08-22 23:18:30] [Rank 0] step:9381/10000 train_time:861211ms step_avg:91.80ms +[2025-08-22 23:18:30] [Rank 0] step:9381/10000 train_time:861211ms step_avg:91.80ms +[2025-08-22 23:18:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:18:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:18:46] [Rank 0] PRINT: step:9400/10000 val_loss:3.5512 svd_entropy: attn_qk:H=0.7996,top10E=0.23,eRank=224.3,q75/q25=41.10 attn_vo:H=0.8308,top10E=0.06,eRank=394.8,q75/q25=inf mlp_w1:H=0.9258,top10E=0.11,eRank=472.3,q75/q25=3.96 mlp_w2:H=0.9683,top10E=0.05,eRank=622.6,q75/q25=2.88 vo_prod:H=0.6885,top10E=0.10,eRank=220.2,q75/q25=inf train_time:863165ms step_avg:91.83ms +[2025-08-22 23:18:46] [Rank 0] PRINT: step:9400/10000 val_loss:3.5512 svd_entropy: attn_qk:H=0.7996,top10E=0.23,eRank=224.3,q75/q25=41.10 attn_vo:H=0.8308,top10E=0.06,eRank=394.8,q75/q25=inf mlp_w1:H=0.9258,top10E=0.11,eRank=472.3,q75/q25=3.96 mlp_w2:H=0.9683,top10E=0.05,eRank=622.6,q75/q25=2.88 vo_prod:H=0.6885,top10E=0.10,eRank=220.2,q75/q25=inf train_time:863165ms step_avg:91.83ms +[2025-08-22 23:18:46] [Rank 0] step:9401/10000 train_time:863184ms step_avg:91.82ms +[2025-08-22 23:18:46] [Rank 0] step:9401/10000 train_time:863184ms step_avg:91.82ms +[2025-08-22 23:18:48] [Rank 0] step:9421/10000 train_time:865110ms step_avg:91.83ms +[2025-08-22 23:18:48] [Rank 0] step:9421/10000 train_time:865110ms step_avg:91.83ms +[2025-08-22 23:18:50] [Rank 0] step:9441/10000 train_time:867046ms step_avg:91.84ms +[2025-08-22 23:18:50] [Rank 0] step:9441/10000 train_time:867046ms step_avg:91.84ms +[2025-08-22 23:18:52] [Rank 0] step:9461/10000 train_time:868984ms step_avg:91.85ms +[2025-08-22 23:18:52] [Rank 0] step:9461/10000 train_time:868984ms step_avg:91.85ms +[2025-08-22 23:18:54] [Rank 0] step:9481/10000 train_time:870923ms step_avg:91.86ms +[2025-08-22 23:18:54] [Rank 0] step:9481/10000 train_time:870923ms step_avg:91.86ms +[2025-08-22 23:18:56] [Rank 0] step:9501/10000 train_time:872869ms step_avg:91.87ms +[2025-08-22 23:18:56] [Rank 0] step:9501/10000 train_time:872869ms step_avg:91.87ms +[2025-08-22 23:18:58] [Rank 0] step:9521/10000 train_time:874797ms step_avg:91.88ms +[2025-08-22 23:18:58] [Rank 0] step:9521/10000 train_time:874797ms step_avg:91.88ms +[2025-08-22 23:18:59] [Rank 0] step:9541/10000 train_time:876734ms step_avg:91.89ms +[2025-08-22 23:18:59] [Rank 0] step:9541/10000 train_time:876734ms step_avg:91.89ms +[2025-08-22 23:19:01] [Rank 0] step:9561/10000 train_time:878665ms step_avg:91.90ms +[2025-08-22 23:19:01] [Rank 0] step:9561/10000 train_time:878665ms step_avg:91.90ms +[2025-08-22 23:19:03] [Rank 0] step:9581/10000 train_time:880605ms step_avg:91.91ms +[2025-08-22 23:19:03] [Rank 0] step:9581/10000 train_time:880605ms step_avg:91.91ms +[2025-08-22 23:19:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:19:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:19:19] [Rank 0] PRINT: step:9600/10000 val_loss:3.5417 svd_entropy: attn_qk:H=0.7997,top10E=0.23,eRank=224.3,q75/q25=41.03 attn_vo:H=0.8308,top10E=0.06,eRank=394.8,q75/q25=inf mlp_w1:H=0.9259,top10E=0.11,eRank=472.5,q75/q25=3.96 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.88 vo_prod:H=0.6886,top10E=0.10,eRank=220.3,q75/q25=inf train_time:882569ms step_avg:91.93ms +[2025-08-22 23:19:19] [Rank 0] PRINT: step:9600/10000 val_loss:3.5417 svd_entropy: attn_qk:H=0.7997,top10E=0.23,eRank=224.3,q75/q25=41.03 attn_vo:H=0.8308,top10E=0.06,eRank=394.8,q75/q25=inf mlp_w1:H=0.9259,top10E=0.11,eRank=472.5,q75/q25=3.96 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.88 vo_prod:H=0.6886,top10E=0.10,eRank=220.3,q75/q25=inf train_time:882569ms step_avg:91.93ms +[2025-08-22 23:19:19] [Rank 0] step:9601/10000 train_time:882587ms step_avg:91.93ms +[2025-08-22 23:19:19] [Rank 0] step:9601/10000 train_time:882587ms step_avg:91.93ms +[2025-08-22 23:19:21] [Rank 0] step:9621/10000 train_time:884526ms step_avg:91.94ms +[2025-08-22 23:19:21] [Rank 0] step:9621/10000 train_time:884526ms step_avg:91.94ms +[2025-08-22 23:19:23] [Rank 0] step:9641/10000 train_time:886467ms step_avg:91.95ms +[2025-08-22 23:19:23] [Rank 0] step:9641/10000 train_time:886467ms step_avg:91.95ms +[2025-08-22 23:19:25] [Rank 0] step:9661/10000 train_time:888436ms step_avg:91.96ms +[2025-08-22 23:19:25] [Rank 0] step:9661/10000 train_time:888436ms step_avg:91.96ms +[2025-08-22 23:19:27] [Rank 0] step:9681/10000 train_time:890397ms step_avg:91.97ms +[2025-08-22 23:19:27] [Rank 0] step:9681/10000 train_time:890397ms step_avg:91.97ms +[2025-08-22 23:19:29] [Rank 0] step:9701/10000 train_time:892373ms step_avg:91.99ms +[2025-08-22 23:19:29] [Rank 0] step:9701/10000 train_time:892373ms step_avg:91.99ms +[2025-08-22 23:19:31] [Rank 0] step:9721/10000 train_time:894339ms step_avg:92.00ms +[2025-08-22 23:19:31] [Rank 0] step:9721/10000 train_time:894339ms step_avg:92.00ms +[2025-08-22 23:19:33] [Rank 0] step:9741/10000 train_time:896321ms step_avg:92.02ms +[2025-08-22 23:19:33] [Rank 0] step:9741/10000 train_time:896321ms step_avg:92.02ms +[2025-08-22 23:19:35] [Rank 0] step:9761/10000 train_time:898290ms step_avg:92.03ms +[2025-08-22 23:19:35] [Rank 0] step:9761/10000 train_time:898290ms step_avg:92.03ms +[2025-08-22 23:19:37] [Rank 0] step:9781/10000 train_time:900263ms step_avg:92.04ms +[2025-08-22 23:19:37] [Rank 0] step:9781/10000 train_time:900263ms step_avg:92.04ms +[2025-08-22 23:19:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:19:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:19:53] [Rank 0] PRINT: step:9800/10000 val_loss:3.5334 svd_entropy: attn_qk:H=0.7997,top10E=0.23,eRank=224.4,q75/q25=41.01 attn_vo:H=0.8308,top10E=0.06,eRank=394.9,q75/q25=inf mlp_w1:H=0.9260,top10E=0.11,eRank=472.8,q75/q25=3.95 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.88 vo_prod:H=0.6886,top10E=0.10,eRank=220.4,q75/q25=inf train_time:902255ms step_avg:92.07ms +[2025-08-22 23:19:53] [Rank 0] PRINT: step:9800/10000 val_loss:3.5334 svd_entropy: attn_qk:H=0.7997,top10E=0.23,eRank=224.4,q75/q25=41.01 attn_vo:H=0.8308,top10E=0.06,eRank=394.9,q75/q25=inf mlp_w1:H=0.9260,top10E=0.11,eRank=472.8,q75/q25=3.95 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.88 vo_prod:H=0.6886,top10E=0.10,eRank=220.4,q75/q25=inf train_time:902255ms step_avg:92.07ms +[2025-08-22 23:19:53] [Rank 0] step:9801/10000 train_time:902273ms step_avg:92.06ms +[2025-08-22 23:19:53] [Rank 0] step:9801/10000 train_time:902273ms step_avg:92.06ms +[2025-08-22 23:19:55] [Rank 0] step:9821/10000 train_time:904231ms step_avg:92.07ms +[2025-08-22 23:19:55] [Rank 0] step:9821/10000 train_time:904231ms step_avg:92.07ms +[2025-08-22 23:19:57] [Rank 0] step:9841/10000 train_time:906202ms step_avg:92.08ms +[2025-08-22 23:19:57] [Rank 0] step:9841/10000 train_time:906202ms step_avg:92.08ms +[2025-08-22 23:19:59] [Rank 0] step:9861/10000 train_time:908153ms step_avg:92.10ms +[2025-08-22 23:19:59] [Rank 0] step:9861/10000 train_time:908153ms step_avg:92.10ms +[2025-08-22 23:20:01] [Rank 0] step:9881/10000 train_time:910107ms step_avg:92.11ms +[2025-08-22 23:20:01] [Rank 0] step:9881/10000 train_time:910107ms step_avg:92.11ms +[2025-08-22 23:20:03] [Rank 0] step:9901/10000 train_time:912084ms step_avg:92.12ms +[2025-08-22 23:20:03] [Rank 0] step:9901/10000 train_time:912084ms step_avg:92.12ms +[2025-08-22 23:20:05] [Rank 0] step:9921/10000 train_time:914044ms step_avg:92.13ms +[2025-08-22 23:20:05] [Rank 0] step:9921/10000 train_time:914044ms step_avg:92.13ms +[2025-08-22 23:20:07] [Rank 0] step:9941/10000 train_time:916019ms step_avg:92.15ms +[2025-08-22 23:20:07] [Rank 0] step:9941/10000 train_time:916019ms step_avg:92.15ms +[2025-08-22 23:20:09] [Rank 0] step:9961/10000 train_time:917979ms step_avg:92.16ms +[2025-08-22 23:20:09] [Rank 0] step:9961/10000 train_time:917979ms step_avg:92.16ms +[2025-08-22 23:20:11] [Rank 0] step:9981/10000 train_time:919950ms step_avg:92.17ms +[2025-08-22 23:20:11] [Rank 0] step:9981/10000 train_time:919950ms step_avg:92.17ms +[2025-08-22 23:20:13] [Rank 0] step:10000/10000 train_time:921826ms step_avg:92.18ms +[2025-08-22 23:20:13] [Rank 0] step:10000/10000 train_time:921826ms step_avg:92.18ms +[2025-08-22 23:20:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:20:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:20:27] [Rank 0] PRINT: step:10000/10000 val_loss:3.5258 svd_entropy: attn_qk:H=0.7997,top10E=0.23,eRank=224.4,q75/q25=40.99 attn_vo:H=0.8309,top10E=0.06,eRank=394.9,q75/q25=inf mlp_w1:H=0.9260,top10E=0.11,eRank=473.0,q75/q25=3.95 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.88 vo_prod:H=0.6887,top10E=0.10,eRank=220.5,q75/q25=inf train_time:921942ms step_avg:92.19ms +[2025-08-22 23:20:27] [Rank 0] PRINT: step:10000/10000 val_loss:3.5258 svd_entropy: attn_qk:H=0.7997,top10E=0.23,eRank=224.4,q75/q25=40.99 attn_vo:H=0.8309,top10E=0.06,eRank=394.9,q75/q25=inf mlp_w1:H=0.9260,top10E=0.11,eRank=473.0,q75/q25=3.95 mlp_w2:H=0.9683,top10E=0.05,eRank=622.5,q75/q25=2.88 vo_prod:H=0.6887,top10E=0.10,eRank=220.5,q75/q25=inf train_time:921942ms step_avg:92.19ms +[2025-08-22 23:20:27] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 23:20:27 2025 --- +[2025-08-22 23:20:27] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 23:20:27 2025 --- +[2025-08-22 23:20:27] [Rank 0] PRINT: Peak memory allocated: 11393 MiB reserved: 16336 MiB +[2025-08-22 23:20:27] [Rank 0] PRINT: Peak memory allocated: 11393 MiB reserved: 16336 MiB diff --git a/logs_svd_gated/mode_9_param_gated_seed_41/config.json b/logs_svd_gated/mode_9_param_gated_seed_41/config.json new file mode 100644 index 0000000000000000000000000000000000000000..81c1c8c9bffc324d999805414ddf3df455558a16 --- /dev/null +++ b/logs_svd_gated/mode_9_param_gated_seed_41/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 41, + "optimizer_mode": 9, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "a4c2a877-b45e-4e50-b6ab-3b54f256f9e0", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_9_param_gated_seed_41/training_log_a4c2a877-b45e-4e50-b6ab-3b54f256f9e0.txt b/logs_svd_gated/mode_9_param_gated_seed_41/training_log_a4c2a877-b45e-4e50-b6ab-3b54f256f9e0.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8c866fccfc918784b34c5b63542997070eecedb --- /dev/null +++ b/logs_svd_gated/mode_9_param_gated_seed_41/training_log_a4c2a877-b45e-4e50-b6ab-3b54f256f9e0.txt @@ -0,0 +1,2926 @@ +[2025-08-22 12:53:37] [Rank 0] PRINT: --- Script Start: Fri Aug 22 12:53:37 2025 --- +[2025-08-22 12:53:37] [Rank 0] PRINT: --- Script Start: Fri Aug 22 12:53:37 2025 --- +[2025-08-22 12:53:37] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=9, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 12:53:37] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=41, optimizer_mode=9, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 12:53:37] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 12:53:37] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 12:53:37] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 12:53:37] [Rank 0] PRINT: Using fixed seed: 41 +[2025-08-22 12:53:37] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_9_param_gated_seed_41 +[2025-08-22 12:53:37] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_9_param_gated_seed_41 +[2025-08-22 12:53:37] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 12:53:37] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 12:53:37] [Rank 0] PRINT: Constructing model... +[2025-08-22 12:53:37] [Rank 0] PRINT: Constructing model... +[2025-08-22 12:53:39] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 12:53:39] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 12:53:39] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 12:53:39] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 12:53:39] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 12:53:39] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 12:53:39] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 9 +[2025-08-22 12:53:39] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 9 +[2025-08-22 12:53:39] [Rank 0] PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: 0.05). +[2025-08-22 12:53:39] [Rank 0] PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: 0.05). +[2025-08-22 12:53:39] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 12:53:39] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 12:53:39] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 12:53:39] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 12:53:39] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 12:53:39] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 12:53:40] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 12:53:40] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 12:53:40] [Rank 0] PRINT: Starting warmup... +[2025-08-22 12:53:40] [Rank 0] PRINT: Starting warmup... +[2025-08-22 12:54:26] [Rank 0] PRINT: Warmup complete. +[2025-08-22 12:54:26] [Rank 0] PRINT: Warmup complete. +[2025-08-22 12:54:27] [Rank 0] PRINT: Starting training... +[2025-08-22 12:54:27] [Rank 0] PRINT: Starting training... +[2025-08-22 12:54:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:54:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:54:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 12:54:44] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4623,top10E=0.02,eRank=232.8,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 12:54:46] [Rank 0] step:21/10000 train_time:1833ms step_avg:87.30ms +[2025-08-22 12:54:46] [Rank 0] step:21/10000 train_time:1833ms step_avg:87.30ms +[2025-08-22 12:54:48] [Rank 0] step:41/10000 train_time:3625ms step_avg:88.41ms +[2025-08-22 12:54:48] [Rank 0] step:41/10000 train_time:3625ms step_avg:88.41ms +[2025-08-22 12:54:50] [Rank 0] step:61/10000 train_time:5419ms step_avg:88.84ms +[2025-08-22 12:54:50] [Rank 0] step:61/10000 train_time:5419ms step_avg:88.84ms +[2025-08-22 12:54:51] [Rank 0] step:81/10000 train_time:7216ms step_avg:89.09ms +[2025-08-22 12:54:51] [Rank 0] step:81/10000 train_time:7216ms step_avg:89.09ms +[2025-08-22 12:54:53] [Rank 0] step:101/10000 train_time:9012ms step_avg:89.23ms +[2025-08-22 12:54:53] [Rank 0] step:101/10000 train_time:9012ms step_avg:89.23ms +[2025-08-22 12:54:55] [Rank 0] step:121/10000 train_time:10810ms step_avg:89.34ms +[2025-08-22 12:54:55] [Rank 0] step:121/10000 train_time:10810ms step_avg:89.34ms +[2025-08-22 12:54:57] [Rank 0] step:141/10000 train_time:12607ms step_avg:89.41ms +[2025-08-22 12:54:57] [Rank 0] step:141/10000 train_time:12607ms step_avg:89.41ms +[2025-08-22 12:54:59] [Rank 0] step:161/10000 train_time:14405ms step_avg:89.47ms +[2025-08-22 12:54:59] [Rank 0] step:161/10000 train_time:14405ms step_avg:89.47ms +[2025-08-22 12:55:00] [Rank 0] step:181/10000 train_time:16202ms step_avg:89.52ms +[2025-08-22 12:55:00] [Rank 0] step:181/10000 train_time:16202ms step_avg:89.52ms +[2025-08-22 12:55:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:55:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:55:16] [Rank 0] PRINT: step:200/10000 val_loss:5.7403 svd_entropy: attn_qk:H=0.3888,top10E=0.81,eRank=19.1,q75/q25=23.69 attn_vo:H=0.6119,top10E=0.47,eRank=132.8,q75/q25=26.40 mlp_w1:H=0.9054,top10E=0.09,eRank=415.1,q75/q25=6.45 mlp_w2:H=0.8756,top10E=0.11,eRank=342.4,q75/q25=13.29 vo_prod:H=0.3374,top10E=0.83,eRank=19.8,q75/q25=232.27 train_time:18005ms step_avg:90.03ms +[2025-08-22 12:55:16] [Rank 0] PRINT: step:200/10000 val_loss:5.7403 svd_entropy: attn_qk:H=0.3888,top10E=0.81,eRank=19.1,q75/q25=23.69 attn_vo:H=0.6119,top10E=0.47,eRank=132.8,q75/q25=26.40 mlp_w1:H=0.9054,top10E=0.09,eRank=415.1,q75/q25=6.45 mlp_w2:H=0.8756,top10E=0.11,eRank=342.4,q75/q25=13.29 vo_prod:H=0.3374,top10E=0.83,eRank=19.8,q75/q25=232.27 train_time:18005ms step_avg:90.03ms +[2025-08-22 12:55:16] [Rank 0] step:201/10000 train_time:18025ms step_avg:89.68ms +[2025-08-22 12:55:16] [Rank 0] step:201/10000 train_time:18025ms step_avg:89.68ms +[2025-08-22 12:55:18] [Rank 0] step:221/10000 train_time:19816ms step_avg:89.66ms +[2025-08-22 12:55:18] [Rank 0] step:221/10000 train_time:19816ms step_avg:89.66ms +[2025-08-22 12:55:19] [Rank 0] step:241/10000 train_time:21610ms step_avg:89.67ms +[2025-08-22 12:55:19] [Rank 0] step:241/10000 train_time:21610ms step_avg:89.67ms +[2025-08-22 12:55:21] [Rank 0] step:261/10000 train_time:23406ms step_avg:89.68ms +[2025-08-22 12:55:21] [Rank 0] step:261/10000 train_time:23406ms step_avg:89.68ms +[2025-08-22 12:55:23] [Rank 0] step:281/10000 train_time:25202ms step_avg:89.69ms +[2025-08-22 12:55:23] [Rank 0] step:281/10000 train_time:25202ms step_avg:89.69ms +[2025-08-22 12:55:25] [Rank 0] step:301/10000 train_time:27000ms step_avg:89.70ms +[2025-08-22 12:55:25] [Rank 0] step:301/10000 train_time:27000ms step_avg:89.70ms +[2025-08-22 12:55:27] [Rank 0] step:321/10000 train_time:28798ms step_avg:89.71ms +[2025-08-22 12:55:27] [Rank 0] step:321/10000 train_time:28798ms step_avg:89.71ms +[2025-08-22 12:55:28] [Rank 0] step:341/10000 train_time:30599ms step_avg:89.73ms +[2025-08-22 12:55:28] [Rank 0] step:341/10000 train_time:30599ms step_avg:89.73ms +[2025-08-22 12:55:30] [Rank 0] step:361/10000 train_time:32399ms step_avg:89.75ms +[2025-08-22 12:55:30] [Rank 0] step:361/10000 train_time:32399ms step_avg:89.75ms +[2025-08-22 12:55:32] [Rank 0] step:381/10000 train_time:34198ms step_avg:89.76ms +[2025-08-22 12:55:32] [Rank 0] step:381/10000 train_time:34198ms step_avg:89.76ms +[2025-08-22 12:55:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:55:34] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:55:47] [Rank 0] PRINT: step:400/10000 val_loss:5.2808 svd_entropy: attn_qk:H=0.4118,top10E=0.78,eRank=23.1,q75/q25=27.88 attn_vo:H=0.6569,top10E=0.42,eRank=168.1,q75/q25=23.49 mlp_w1:H=0.9158,top10E=0.08,eRank=449.5,q75/q25=6.94 mlp_w2:H=0.9226,top10E=0.07,eRank=470.4,q75/q25=8.82 vo_prod:H=0.4271,top10E=0.74,eRank=39.4,q75/q25=234.45 train_time:36004ms step_avg:90.01ms +[2025-08-22 12:55:47] [Rank 0] PRINT: step:400/10000 val_loss:5.2808 svd_entropy: attn_qk:H=0.4118,top10E=0.78,eRank=23.1,q75/q25=27.88 attn_vo:H=0.6569,top10E=0.42,eRank=168.1,q75/q25=23.49 mlp_w1:H=0.9158,top10E=0.08,eRank=449.5,q75/q25=6.94 mlp_w2:H=0.9226,top10E=0.07,eRank=470.4,q75/q25=8.82 vo_prod:H=0.4271,top10E=0.74,eRank=39.4,q75/q25=234.45 train_time:36004ms step_avg:90.01ms +[2025-08-22 12:55:48] [Rank 0] step:401/10000 train_time:36025ms step_avg:89.84ms +[2025-08-22 12:55:48] [Rank 0] step:401/10000 train_time:36025ms step_avg:89.84ms +[2025-08-22 12:55:49] [Rank 0] step:421/10000 train_time:37823ms step_avg:89.84ms +[2025-08-22 12:55:49] [Rank 0] step:421/10000 train_time:37823ms step_avg:89.84ms +[2025-08-22 12:55:51] [Rank 0] step:441/10000 train_time:39618ms step_avg:89.84ms +[2025-08-22 12:55:51] [Rank 0] step:441/10000 train_time:39618ms step_avg:89.84ms +[2025-08-22 12:55:53] [Rank 0] step:461/10000 train_time:41414ms step_avg:89.83ms +[2025-08-22 12:55:53] [Rank 0] step:461/10000 train_time:41414ms step_avg:89.83ms +[2025-08-22 12:55:55] [Rank 0] step:481/10000 train_time:43209ms step_avg:89.83ms +[2025-08-22 12:55:55] [Rank 0] step:481/10000 train_time:43209ms step_avg:89.83ms +[2025-08-22 12:55:57] [Rank 0] step:501/10000 train_time:45004ms step_avg:89.83ms +[2025-08-22 12:55:57] [Rank 0] step:501/10000 train_time:45004ms step_avg:89.83ms +[2025-08-22 12:55:58] [Rank 0] step:521/10000 train_time:46800ms step_avg:89.83ms +[2025-08-22 12:55:58] [Rank 0] step:521/10000 train_time:46800ms step_avg:89.83ms +[2025-08-22 12:56:00] [Rank 0] step:541/10000 train_time:48598ms step_avg:89.83ms +[2025-08-22 12:56:00] [Rank 0] step:541/10000 train_time:48598ms step_avg:89.83ms +[2025-08-22 12:56:02] [Rank 0] step:561/10000 train_time:50395ms step_avg:89.83ms +[2025-08-22 12:56:02] [Rank 0] step:561/10000 train_time:50395ms step_avg:89.83ms +[2025-08-22 12:56:04] [Rank 0] step:581/10000 train_time:52193ms step_avg:89.83ms +[2025-08-22 12:56:04] [Rank 0] step:581/10000 train_time:52193ms step_avg:89.83ms +[2025-08-22 12:56:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:56:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:56:19] [Rank 0] PRINT: step:600/10000 val_loss:5.0460 svd_entropy: attn_qk:H=0.4344,top10E=0.75,eRank=28.9,q75/q25=39.86 attn_vo:H=0.6879,top10E=0.38,eRank=179.8,q75/q25=25.23 mlp_w1:H=0.9198,top10E=0.07,eRank=461.4,q75/q25=7.12 mlp_w2:H=0.9270,top10E=0.07,eRank=485.8,q75/q25=8.56 vo_prod:H=0.4830,top10E=0.67,eRank=48.4,q75/q25=295.41 train_time:53998ms step_avg:90.00ms +[2025-08-22 12:56:19] [Rank 0] PRINT: step:600/10000 val_loss:5.0460 svd_entropy: attn_qk:H=0.4344,top10E=0.75,eRank=28.9,q75/q25=39.86 attn_vo:H=0.6879,top10E=0.38,eRank=179.8,q75/q25=25.23 mlp_w1:H=0.9198,top10E=0.07,eRank=461.4,q75/q25=7.12 mlp_w2:H=0.9270,top10E=0.07,eRank=485.8,q75/q25=8.56 vo_prod:H=0.4830,top10E=0.67,eRank=48.4,q75/q25=295.41 train_time:53998ms step_avg:90.00ms +[2025-08-22 12:56:19] [Rank 0] step:601/10000 train_time:54019ms step_avg:89.88ms +[2025-08-22 12:56:19] [Rank 0] step:601/10000 train_time:54019ms step_avg:89.88ms +[2025-08-22 12:56:21] [Rank 0] step:621/10000 train_time:55816ms step_avg:89.88ms +[2025-08-22 12:56:21] [Rank 0] step:621/10000 train_time:55816ms step_avg:89.88ms +[2025-08-22 12:56:23] [Rank 0] step:641/10000 train_time:57609ms step_avg:89.87ms +[2025-08-22 12:56:23] [Rank 0] step:641/10000 train_time:57609ms step_avg:89.87ms +[2025-08-22 12:56:25] [Rank 0] step:661/10000 train_time:59404ms step_avg:89.87ms +[2025-08-22 12:56:25] [Rank 0] step:661/10000 train_time:59404ms step_avg:89.87ms +[2025-08-22 12:56:26] [Rank 0] step:681/10000 train_time:61200ms step_avg:89.87ms +[2025-08-22 12:56:26] [Rank 0] step:681/10000 train_time:61200ms step_avg:89.87ms +[2025-08-22 12:56:28] [Rank 0] step:701/10000 train_time:62997ms step_avg:89.87ms +[2025-08-22 12:56:28] [Rank 0] step:701/10000 train_time:62997ms step_avg:89.87ms +[2025-08-22 12:56:30] [Rank 0] step:721/10000 train_time:64793ms step_avg:89.87ms +[2025-08-22 12:56:30] [Rank 0] step:721/10000 train_time:64793ms step_avg:89.87ms +[2025-08-22 12:56:32] [Rank 0] step:741/10000 train_time:66591ms step_avg:89.87ms +[2025-08-22 12:56:32] [Rank 0] step:741/10000 train_time:66591ms step_avg:89.87ms +[2025-08-22 12:56:34] [Rank 0] step:761/10000 train_time:68403ms step_avg:89.89ms +[2025-08-22 12:56:34] [Rank 0] step:761/10000 train_time:68403ms step_avg:89.89ms +[2025-08-22 12:56:35] [Rank 0] step:781/10000 train_time:70213ms step_avg:89.90ms +[2025-08-22 12:56:35] [Rank 0] step:781/10000 train_time:70213ms step_avg:89.90ms +[2025-08-22 12:56:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:56:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:56:51] [Rank 0] PRINT: step:800/10000 val_loss:4.7668 svd_entropy: attn_qk:H=0.4307,top10E=0.73,eRank=35.6,q75/q25=58.14 attn_vo:H=0.7001,top10E=0.36,eRank=183.6,q75/q25=25.55 mlp_w1:H=0.9195,top10E=0.07,eRank=460.7,q75/q25=7.17 mlp_w2:H=0.9258,top10E=0.07,eRank=482.6,q75/q25=8.50 vo_prod:H=0.4979,top10E=0.65,eRank=53.1,q75/q25=309.73 train_time:72029ms step_avg:90.04ms +[2025-08-22 12:56:51] [Rank 0] PRINT: step:800/10000 val_loss:4.7668 svd_entropy: attn_qk:H=0.4307,top10E=0.73,eRank=35.6,q75/q25=58.14 attn_vo:H=0.7001,top10E=0.36,eRank=183.6,q75/q25=25.55 mlp_w1:H=0.9195,top10E=0.07,eRank=460.7,q75/q25=7.17 mlp_w2:H=0.9258,top10E=0.07,eRank=482.6,q75/q25=8.50 vo_prod:H=0.4979,top10E=0.65,eRank=53.1,q75/q25=309.73 train_time:72029ms step_avg:90.04ms +[2025-08-22 12:56:51] [Rank 0] step:801/10000 train_time:72050ms step_avg:89.95ms +[2025-08-22 12:56:51] [Rank 0] step:801/10000 train_time:72050ms step_avg:89.95ms +[2025-08-22 12:56:53] [Rank 0] step:821/10000 train_time:73848ms step_avg:89.95ms +[2025-08-22 12:56:53] [Rank 0] step:821/10000 train_time:73848ms step_avg:89.95ms +[2025-08-22 12:56:54] [Rank 0] step:841/10000 train_time:75654ms step_avg:89.96ms +[2025-08-22 12:56:54] [Rank 0] step:841/10000 train_time:75654ms step_avg:89.96ms +[2025-08-22 12:56:56] [Rank 0] step:861/10000 train_time:77461ms step_avg:89.97ms +[2025-08-22 12:56:56] [Rank 0] step:861/10000 train_time:77461ms step_avg:89.97ms +[2025-08-22 12:56:58] [Rank 0] step:881/10000 train_time:79270ms step_avg:89.98ms +[2025-08-22 12:56:58] [Rank 0] step:881/10000 train_time:79270ms step_avg:89.98ms +[2025-08-22 12:57:00] [Rank 0] step:901/10000 train_time:81079ms step_avg:89.99ms +[2025-08-22 12:57:00] [Rank 0] step:901/10000 train_time:81079ms step_avg:89.99ms +[2025-08-22 12:57:02] [Rank 0] step:921/10000 train_time:82888ms step_avg:90.00ms +[2025-08-22 12:57:02] [Rank 0] step:921/10000 train_time:82888ms step_avg:90.00ms +[2025-08-22 12:57:03] [Rank 0] step:941/10000 train_time:84699ms step_avg:90.01ms +[2025-08-22 12:57:03] [Rank 0] step:941/10000 train_time:84699ms step_avg:90.01ms +[2025-08-22 12:57:05] [Rank 0] step:961/10000 train_time:86509ms step_avg:90.02ms +[2025-08-22 12:57:05] [Rank 0] step:961/10000 train_time:86509ms step_avg:90.02ms +[2025-08-22 12:57:07] [Rank 0] step:981/10000 train_time:88322ms step_avg:90.03ms +[2025-08-22 12:57:07] [Rank 0] step:981/10000 train_time:88322ms step_avg:90.03ms +[2025-08-22 12:57:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:57:09] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:57:22] [Rank 0] PRINT: step:1000/10000 val_loss:4.6288 svd_entropy: attn_qk:H=0.4405,top10E=0.72,eRank=39.9,q75/q25=70.20 attn_vo:H=0.7062,top10E=0.36,eRank=184.9,q75/q25=25.45 mlp_w1:H=0.9195,top10E=0.08,eRank=461.0,q75/q25=7.09 mlp_w2:H=0.9254,top10E=0.08,eRank=481.6,q75/q25=8.34 vo_prod:H=0.5002,top10E=0.65,eRank=54.9,q75/q25=310.48 train_time:90140ms step_avg:90.14ms +[2025-08-22 12:57:22] [Rank 0] PRINT: step:1000/10000 val_loss:4.6288 svd_entropy: attn_qk:H=0.4405,top10E=0.72,eRank=39.9,q75/q25=70.20 attn_vo:H=0.7062,top10E=0.36,eRank=184.9,q75/q25=25.45 mlp_w1:H=0.9195,top10E=0.08,eRank=461.0,q75/q25=7.09 mlp_w2:H=0.9254,top10E=0.08,eRank=481.6,q75/q25=8.34 vo_prod:H=0.5002,top10E=0.65,eRank=54.9,q75/q25=310.48 train_time:90140ms step_avg:90.14ms +[2025-08-22 12:57:23] [Rank 0] step:1001/10000 train_time:90161ms step_avg:90.07ms +[2025-08-22 12:57:23] [Rank 0] step:1001/10000 train_time:90161ms step_avg:90.07ms +[2025-08-22 12:57:24] [Rank 0] step:1021/10000 train_time:91958ms step_avg:90.07ms +[2025-08-22 12:57:24] [Rank 0] step:1021/10000 train_time:91958ms step_avg:90.07ms +[2025-08-22 12:57:26] [Rank 0] step:1041/10000 train_time:93765ms step_avg:90.07ms +[2025-08-22 12:57:26] [Rank 0] step:1041/10000 train_time:93765ms step_avg:90.07ms +[2025-08-22 12:57:28] [Rank 0] step:1061/10000 train_time:95583ms step_avg:90.09ms +[2025-08-22 12:57:28] [Rank 0] step:1061/10000 train_time:95583ms step_avg:90.09ms +[2025-08-22 12:57:30] [Rank 0] step:1081/10000 train_time:97393ms step_avg:90.10ms +[2025-08-22 12:57:30] [Rank 0] step:1081/10000 train_time:97393ms step_avg:90.10ms +[2025-08-22 12:57:32] [Rank 0] step:1101/10000 train_time:99204ms step_avg:90.10ms +[2025-08-22 12:57:32] [Rank 0] step:1101/10000 train_time:99204ms step_avg:90.10ms +[2025-08-22 12:57:33] [Rank 0] step:1121/10000 train_time:101016ms step_avg:90.11ms +[2025-08-22 12:57:33] [Rank 0] step:1121/10000 train_time:101016ms step_avg:90.11ms +[2025-08-22 12:57:35] [Rank 0] step:1141/10000 train_time:102828ms step_avg:90.12ms +[2025-08-22 12:57:35] [Rank 0] step:1141/10000 train_time:102828ms step_avg:90.12ms +[2025-08-22 12:57:37] [Rank 0] step:1161/10000 train_time:104641ms step_avg:90.13ms +[2025-08-22 12:57:37] [Rank 0] step:1161/10000 train_time:104641ms step_avg:90.13ms +[2025-08-22 12:57:39] [Rank 0] step:1181/10000 train_time:106454ms step_avg:90.14ms +[2025-08-22 12:57:39] [Rank 0] step:1181/10000 train_time:106454ms step_avg:90.14ms +[2025-08-22 12:57:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:57:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:57:54] [Rank 0] PRINT: step:1200/10000 val_loss:4.5242 svd_entropy: attn_qk:H=0.4473,top10E=0.71,eRank=41.7,q75/q25=78.35 attn_vo:H=0.7069,top10E=0.36,eRank=184.8,q75/q25=25.93 mlp_w1:H=0.9174,top10E=0.08,eRank=456.0,q75/q25=7.08 mlp_w2:H=0.9231,top10E=0.08,eRank=476.1,q75/q25=8.21 vo_prod:H=0.4957,top10E=0.65,eRank=56.6,q75/q25=323.00 train_time:108272ms step_avg:90.23ms +[2025-08-22 12:57:54] [Rank 0] PRINT: step:1200/10000 val_loss:4.5242 svd_entropy: attn_qk:H=0.4473,top10E=0.71,eRank=41.7,q75/q25=78.35 attn_vo:H=0.7069,top10E=0.36,eRank=184.8,q75/q25=25.93 mlp_w1:H=0.9174,top10E=0.08,eRank=456.0,q75/q25=7.08 mlp_w2:H=0.9231,top10E=0.08,eRank=476.1,q75/q25=8.21 vo_prod:H=0.4957,top10E=0.65,eRank=56.6,q75/q25=323.00 train_time:108272ms step_avg:90.23ms +[2025-08-22 12:57:54] [Rank 0] step:1201/10000 train_time:108293ms step_avg:90.17ms +[2025-08-22 12:57:54] [Rank 0] step:1201/10000 train_time:108293ms step_avg:90.17ms +[2025-08-22 12:57:56] [Rank 0] step:1221/10000 train_time:110104ms step_avg:90.18ms +[2025-08-22 12:57:56] [Rank 0] step:1221/10000 train_time:110104ms step_avg:90.18ms +[2025-08-22 12:57:58] [Rank 0] step:1241/10000 train_time:111912ms step_avg:90.18ms +[2025-08-22 12:57:58] [Rank 0] step:1241/10000 train_time:111912ms step_avg:90.18ms +[2025-08-22 12:58:00] [Rank 0] step:1261/10000 train_time:113720ms step_avg:90.18ms +[2025-08-22 12:58:00] [Rank 0] step:1261/10000 train_time:113720ms step_avg:90.18ms +[2025-08-22 12:58:01] [Rank 0] step:1281/10000 train_time:115530ms step_avg:90.19ms +[2025-08-22 12:58:01] [Rank 0] step:1281/10000 train_time:115530ms step_avg:90.19ms +[2025-08-22 12:58:03] [Rank 0] step:1301/10000 train_time:117341ms step_avg:90.19ms +[2025-08-22 12:58:03] [Rank 0] step:1301/10000 train_time:117341ms step_avg:90.19ms +[2025-08-22 12:58:05] [Rank 0] step:1321/10000 train_time:119152ms step_avg:90.20ms +[2025-08-22 12:58:05] [Rank 0] step:1321/10000 train_time:119152ms step_avg:90.20ms +[2025-08-22 12:58:07] [Rank 0] step:1341/10000 train_time:120962ms step_avg:90.20ms +[2025-08-22 12:58:07] [Rank 0] step:1341/10000 train_time:120962ms step_avg:90.20ms +[2025-08-22 12:58:09] [Rank 0] step:1361/10000 train_time:122773ms step_avg:90.21ms +[2025-08-22 12:58:09] [Rank 0] step:1361/10000 train_time:122773ms step_avg:90.21ms +[2025-08-22 12:58:10] [Rank 0] step:1381/10000 train_time:124585ms step_avg:90.21ms +[2025-08-22 12:58:10] [Rank 0] step:1381/10000 train_time:124585ms step_avg:90.21ms +[2025-08-22 12:58:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:58:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:58:26] [Rank 0] PRINT: step:1400/10000 val_loss:4.4683 svd_entropy: attn_qk:H=0.4486,top10E=0.70,eRank=42.7,q75/q25=89.59 attn_vo:H=0.7111,top10E=0.35,eRank=185.9,q75/q25=26.09 mlp_w1:H=0.9172,top10E=0.09,eRank=455.8,q75/q25=7.04 mlp_w2:H=0.9232,top10E=0.09,eRank=476.8,q75/q25=8.01 vo_prod:H=0.4982,top10E=0.64,eRank=58.3,q75/q25=326.41 train_time:126403ms step_avg:90.29ms +[2025-08-22 12:58:26] [Rank 0] PRINT: step:1400/10000 val_loss:4.4683 svd_entropy: attn_qk:H=0.4486,top10E=0.70,eRank=42.7,q75/q25=89.59 attn_vo:H=0.7111,top10E=0.35,eRank=185.9,q75/q25=26.09 mlp_w1:H=0.9172,top10E=0.09,eRank=455.8,q75/q25=7.04 mlp_w2:H=0.9232,top10E=0.09,eRank=476.8,q75/q25=8.01 vo_prod:H=0.4982,top10E=0.64,eRank=58.3,q75/q25=326.41 train_time:126403ms step_avg:90.29ms +[2025-08-22 12:58:26] [Rank 0] step:1401/10000 train_time:126423ms step_avg:90.24ms +[2025-08-22 12:58:26] [Rank 0] step:1401/10000 train_time:126423ms step_avg:90.24ms +[2025-08-22 12:58:28] [Rank 0] step:1421/10000 train_time:128228ms step_avg:90.24ms +[2025-08-22 12:58:28] [Rank 0] step:1421/10000 train_time:128228ms step_avg:90.24ms +[2025-08-22 12:58:29] [Rank 0] step:1441/10000 train_time:130035ms step_avg:90.24ms +[2025-08-22 12:58:29] [Rank 0] step:1441/10000 train_time:130035ms step_avg:90.24ms +[2025-08-22 12:58:31] [Rank 0] step:1461/10000 train_time:131843ms step_avg:90.24ms +[2025-08-22 12:58:31] [Rank 0] step:1461/10000 train_time:131843ms step_avg:90.24ms +[2025-08-22 12:58:33] [Rank 0] step:1481/10000 train_time:133654ms step_avg:90.25ms +[2025-08-22 12:58:33] [Rank 0] step:1481/10000 train_time:133654ms step_avg:90.25ms +[2025-08-22 12:58:35] [Rank 0] step:1501/10000 train_time:135477ms step_avg:90.26ms +[2025-08-22 12:58:35] [Rank 0] step:1501/10000 train_time:135477ms step_avg:90.26ms +[2025-08-22 12:58:37] [Rank 0] step:1521/10000 train_time:137300ms step_avg:90.27ms +[2025-08-22 12:58:37] [Rank 0] step:1521/10000 train_time:137300ms step_avg:90.27ms +[2025-08-22 12:58:39] [Rank 0] step:1541/10000 train_time:139123ms step_avg:90.28ms +[2025-08-22 12:58:39] [Rank 0] step:1541/10000 train_time:139123ms step_avg:90.28ms +[2025-08-22 12:58:40] [Rank 0] step:1561/10000 train_time:140945ms step_avg:90.29ms +[2025-08-22 12:58:40] [Rank 0] step:1561/10000 train_time:140945ms step_avg:90.29ms +[2025-08-22 12:58:42] [Rank 0] step:1581/10000 train_time:142768ms step_avg:90.30ms +[2025-08-22 12:58:42] [Rank 0] step:1581/10000 train_time:142768ms step_avg:90.30ms +[2025-08-22 12:58:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:58:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:58:58] [Rank 0] PRINT: step:1600/10000 val_loss:4.3736 svd_entropy: attn_qk:H=0.4481,top10E=0.70,eRank=43.4,q75/q25=98.05 attn_vo:H=0.7181,top10E=0.35,eRank=188.5,q75/q25=25.39 mlp_w1:H=0.9170,top10E=0.09,eRank=456.0,q75/q25=7.04 mlp_w2:H=0.9227,top10E=0.09,eRank=476.6,q75/q25=8.00 vo_prod:H=0.5050,top10E=0.63,eRank=60.9,q75/q25=311.01 train_time:144599ms step_avg:90.37ms +[2025-08-22 12:58:58] [Rank 0] PRINT: step:1600/10000 val_loss:4.3736 svd_entropy: attn_qk:H=0.4481,top10E=0.70,eRank=43.4,q75/q25=98.05 attn_vo:H=0.7181,top10E=0.35,eRank=188.5,q75/q25=25.39 mlp_w1:H=0.9170,top10E=0.09,eRank=456.0,q75/q25=7.04 mlp_w2:H=0.9227,top10E=0.09,eRank=476.6,q75/q25=8.00 vo_prod:H=0.5050,top10E=0.63,eRank=60.9,q75/q25=311.01 train_time:144599ms step_avg:90.37ms +[2025-08-22 12:58:58] [Rank 0] step:1601/10000 train_time:144620ms step_avg:90.33ms +[2025-08-22 12:58:58] [Rank 0] step:1601/10000 train_time:144620ms step_avg:90.33ms +[2025-08-22 12:58:59] [Rank 0] step:1621/10000 train_time:146451ms step_avg:90.35ms +[2025-08-22 12:58:59] [Rank 0] step:1621/10000 train_time:146451ms step_avg:90.35ms +[2025-08-22 12:59:01] [Rank 0] step:1641/10000 train_time:148269ms step_avg:90.35ms +[2025-08-22 12:59:01] [Rank 0] step:1641/10000 train_time:148269ms step_avg:90.35ms +[2025-08-22 12:59:03] [Rank 0] step:1661/10000 train_time:150089ms step_avg:90.36ms +[2025-08-22 12:59:03] [Rank 0] step:1661/10000 train_time:150089ms step_avg:90.36ms +[2025-08-22 12:59:05] [Rank 0] step:1681/10000 train_time:151912ms step_avg:90.37ms +[2025-08-22 12:59:05] [Rank 0] step:1681/10000 train_time:151912ms step_avg:90.37ms +[2025-08-22 12:59:07] [Rank 0] step:1701/10000 train_time:153737ms step_avg:90.38ms +[2025-08-22 12:59:07] [Rank 0] step:1701/10000 train_time:153737ms step_avg:90.38ms +[2025-08-22 12:59:09] [Rank 0] step:1721/10000 train_time:155562ms step_avg:90.39ms +[2025-08-22 12:59:09] [Rank 0] step:1721/10000 train_time:155562ms step_avg:90.39ms +[2025-08-22 12:59:10] [Rank 0] step:1741/10000 train_time:157388ms step_avg:90.40ms +[2025-08-22 12:59:10] [Rank 0] step:1741/10000 train_time:157388ms step_avg:90.40ms +[2025-08-22 12:59:12] [Rank 0] step:1761/10000 train_time:159215ms step_avg:90.41ms +[2025-08-22 12:59:12] [Rank 0] step:1761/10000 train_time:159215ms step_avg:90.41ms +[2025-08-22 12:59:14] [Rank 0] step:1781/10000 train_time:161039ms step_avg:90.42ms +[2025-08-22 12:59:14] [Rank 0] step:1781/10000 train_time:161039ms step_avg:90.42ms +[2025-08-22 12:59:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:59:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:59:29] [Rank 0] PRINT: step:1800/10000 val_loss:4.3202 svd_entropy: attn_qk:H=0.4520,top10E=0.70,eRank=44.1,q75/q25=105.33 attn_vo:H=0.7232,top10E=0.34,eRank=190.7,q75/q25=25.50 mlp_w1:H=0.9167,top10E=0.09,eRank=456.2,q75/q25=7.14 mlp_w2:H=0.9226,top10E=0.09,eRank=476.8,q75/q25=8.17 vo_prod:H=0.5150,top10E=0.62,eRank=63.4,q75/q25=315.99 train_time:162868ms step_avg:90.48ms +[2025-08-22 12:59:29] [Rank 0] PRINT: step:1800/10000 val_loss:4.3202 svd_entropy: attn_qk:H=0.4520,top10E=0.70,eRank=44.1,q75/q25=105.33 attn_vo:H=0.7232,top10E=0.34,eRank=190.7,q75/q25=25.50 mlp_w1:H=0.9167,top10E=0.09,eRank=456.2,q75/q25=7.14 mlp_w2:H=0.9226,top10E=0.09,eRank=476.8,q75/q25=8.17 vo_prod:H=0.5150,top10E=0.62,eRank=63.4,q75/q25=315.99 train_time:162868ms step_avg:90.48ms +[2025-08-22 12:59:30] [Rank 0] step:1801/10000 train_time:162888ms step_avg:90.44ms +[2025-08-22 12:59:30] [Rank 0] step:1801/10000 train_time:162888ms step_avg:90.44ms +[2025-08-22 12:59:31] [Rank 0] step:1821/10000 train_time:164703ms step_avg:90.45ms +[2025-08-22 12:59:31] [Rank 0] step:1821/10000 train_time:164703ms step_avg:90.45ms +[2025-08-22 12:59:33] [Rank 0] step:1841/10000 train_time:166523ms step_avg:90.45ms +[2025-08-22 12:59:33] [Rank 0] step:1841/10000 train_time:166523ms step_avg:90.45ms +[2025-08-22 12:59:35] [Rank 0] step:1861/10000 train_time:168342ms step_avg:90.46ms +[2025-08-22 12:59:35] [Rank 0] step:1861/10000 train_time:168342ms step_avg:90.46ms +[2025-08-22 12:59:37] [Rank 0] step:1881/10000 train_time:170162ms step_avg:90.46ms +[2025-08-22 12:59:37] [Rank 0] step:1881/10000 train_time:170162ms step_avg:90.46ms +[2025-08-22 12:59:39] [Rank 0] step:1901/10000 train_time:171985ms step_avg:90.47ms +[2025-08-22 12:59:39] [Rank 0] step:1901/10000 train_time:171985ms step_avg:90.47ms +[2025-08-22 12:59:40] [Rank 0] step:1921/10000 train_time:173810ms step_avg:90.48ms +[2025-08-22 12:59:40] [Rank 0] step:1921/10000 train_time:173810ms step_avg:90.48ms +[2025-08-22 12:59:42] [Rank 0] step:1941/10000 train_time:175634ms step_avg:90.49ms +[2025-08-22 12:59:42] [Rank 0] step:1941/10000 train_time:175634ms step_avg:90.49ms +[2025-08-22 12:59:44] [Rank 0] step:1961/10000 train_time:177457ms step_avg:90.49ms +[2025-08-22 12:59:44] [Rank 0] step:1961/10000 train_time:177457ms step_avg:90.49ms +[2025-08-22 12:59:46] [Rank 0] step:1981/10000 train_time:179281ms step_avg:90.50ms +[2025-08-22 12:59:46] [Rank 0] step:1981/10000 train_time:179281ms step_avg:90.50ms +[2025-08-22 12:59:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 12:59:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:00:01] [Rank 0] PRINT: step:2000/10000 val_loss:4.2937 svd_entropy: attn_qk:H=0.4514,top10E=0.69,eRank=44.7,q75/q25=109.80 attn_vo:H=0.7273,top10E=0.33,eRank=192.6,q75/q25=25.57 mlp_w1:H=0.9165,top10E=0.09,eRank=456.7,q75/q25=7.35 mlp_w2:H=0.9228,top10E=0.09,eRank=477.7,q75/q25=8.45 vo_prod:H=0.5266,top10E=0.61,eRank=65.9,q75/q25=323.66 train_time:181111ms step_avg:90.56ms +[2025-08-22 13:00:01] [Rank 0] PRINT: step:2000/10000 val_loss:4.2937 svd_entropy: attn_qk:H=0.4514,top10E=0.69,eRank=44.7,q75/q25=109.80 attn_vo:H=0.7273,top10E=0.33,eRank=192.6,q75/q25=25.57 mlp_w1:H=0.9165,top10E=0.09,eRank=456.7,q75/q25=7.35 mlp_w2:H=0.9228,top10E=0.09,eRank=477.7,q75/q25=8.45 vo_prod:H=0.5266,top10E=0.61,eRank=65.9,q75/q25=323.66 train_time:181111ms step_avg:90.56ms +[2025-08-22 13:00:01] [Rank 0] step:2001/10000 train_time:181131ms step_avg:90.52ms +[2025-08-22 13:00:01] [Rank 0] step:2001/10000 train_time:181131ms step_avg:90.52ms +[2025-08-22 13:00:03] [Rank 0] step:2021/10000 train_time:182950ms step_avg:90.52ms +[2025-08-22 13:00:03] [Rank 0] step:2021/10000 train_time:182950ms step_avg:90.52ms +[2025-08-22 13:00:06] [Rank 0] step:2041/10000 train_time:185435ms step_avg:90.85ms +[2025-08-22 13:00:06] [Rank 0] step:2041/10000 train_time:185435ms step_avg:90.85ms +[2025-08-22 13:00:07] [Rank 0] step:2061/10000 train_time:187254ms step_avg:90.86ms +[2025-08-22 13:00:07] [Rank 0] step:2061/10000 train_time:187254ms step_avg:90.86ms +[2025-08-22 13:00:09] [Rank 0] step:2081/10000 train_time:189074ms step_avg:90.86ms +[2025-08-22 13:00:09] [Rank 0] step:2081/10000 train_time:189074ms step_avg:90.86ms +[2025-08-22 13:00:11] [Rank 0] step:2101/10000 train_time:190895ms step_avg:90.86ms +[2025-08-22 13:00:11] [Rank 0] step:2101/10000 train_time:190895ms step_avg:90.86ms +[2025-08-22 13:00:13] [Rank 0] step:2121/10000 train_time:192716ms step_avg:90.86ms +[2025-08-22 13:00:13] [Rank 0] step:2121/10000 train_time:192716ms step_avg:90.86ms +[2025-08-22 13:00:15] [Rank 0] step:2141/10000 train_time:194540ms step_avg:90.86ms +[2025-08-22 13:00:15] [Rank 0] step:2141/10000 train_time:194540ms step_avg:90.86ms +[2025-08-22 13:00:17] [Rank 0] step:2161/10000 train_time:196364ms step_avg:90.87ms +[2025-08-22 13:00:17] [Rank 0] step:2161/10000 train_time:196364ms step_avg:90.87ms +[2025-08-22 13:00:18] [Rank 0] step:2181/10000 train_time:198188ms step_avg:90.87ms +[2025-08-22 13:00:18] [Rank 0] step:2181/10000 train_time:198188ms step_avg:90.87ms +[2025-08-22 13:00:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:00:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:00:34] [Rank 0] PRINT: step:2200/10000 val_loss:4.2497 svd_entropy: attn_qk:H=0.4543,top10E=0.69,eRank=45.5,q75/q25=114.68 attn_vo:H=0.7301,top10E=0.33,eRank=193.5,q75/q25=25.51 mlp_w1:H=0.9162,top10E=0.08,eRank=456.2,q75/q25=7.62 mlp_w2:H=0.9228,top10E=0.08,eRank=477.7,q75/q25=8.84 vo_prod:H=0.5334,top10E=0.60,eRank=67.4,q75/q25=336.35 train_time:200017ms step_avg:90.92ms +[2025-08-22 13:00:34] [Rank 0] PRINT: step:2200/10000 val_loss:4.2497 svd_entropy: attn_qk:H=0.4543,top10E=0.69,eRank=45.5,q75/q25=114.68 attn_vo:H=0.7301,top10E=0.33,eRank=193.5,q75/q25=25.51 mlp_w1:H=0.9162,top10E=0.08,eRank=456.2,q75/q25=7.62 mlp_w2:H=0.9228,top10E=0.08,eRank=477.7,q75/q25=8.84 vo_prod:H=0.5334,top10E=0.60,eRank=67.4,q75/q25=336.35 train_time:200017ms step_avg:90.92ms +[2025-08-22 13:00:34] [Rank 0] step:2201/10000 train_time:200038ms step_avg:90.88ms +[2025-08-22 13:00:34] [Rank 0] step:2201/10000 train_time:200038ms step_avg:90.88ms +[2025-08-22 13:00:36] [Rank 0] step:2221/10000 train_time:201854ms step_avg:90.88ms +[2025-08-22 13:00:36] [Rank 0] step:2221/10000 train_time:201854ms step_avg:90.88ms +[2025-08-22 13:00:37] [Rank 0] step:2241/10000 train_time:203709ms step_avg:90.90ms +[2025-08-22 13:00:37] [Rank 0] step:2241/10000 train_time:203709ms step_avg:90.90ms +[2025-08-22 13:00:39] [Rank 0] step:2261/10000 train_time:205572ms step_avg:90.92ms +[2025-08-22 13:00:39] [Rank 0] step:2261/10000 train_time:205572ms step_avg:90.92ms +[2025-08-22 13:00:41] [Rank 0] step:2281/10000 train_time:207439ms step_avg:90.94ms +[2025-08-22 13:00:41] [Rank 0] step:2281/10000 train_time:207439ms step_avg:90.94ms +[2025-08-22 13:00:43] [Rank 0] step:2301/10000 train_time:209304ms step_avg:90.96ms +[2025-08-22 13:00:43] [Rank 0] step:2301/10000 train_time:209304ms step_avg:90.96ms +[2025-08-22 13:00:45] [Rank 0] step:2321/10000 train_time:211169ms step_avg:90.98ms +[2025-08-22 13:00:45] [Rank 0] step:2321/10000 train_time:211169ms step_avg:90.98ms +[2025-08-22 13:00:47] [Rank 0] step:2341/10000 train_time:213034ms step_avg:91.00ms +[2025-08-22 13:00:47] [Rank 0] step:2341/10000 train_time:213034ms step_avg:91.00ms +[2025-08-22 13:00:49] [Rank 0] step:2361/10000 train_time:214902ms step_avg:91.02ms +[2025-08-22 13:00:49] [Rank 0] step:2361/10000 train_time:214902ms step_avg:91.02ms +[2025-08-22 13:00:51] [Rank 0] step:2381/10000 train_time:216770ms step_avg:91.04ms +[2025-08-22 13:00:51] [Rank 0] step:2381/10000 train_time:216770ms step_avg:91.04ms +[2025-08-22 13:00:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:00:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:01:06] [Rank 0] PRINT: step:2400/10000 val_loss:4.1927 svd_entropy: attn_qk:H=0.4548,top10E=0.69,eRank=46.2,q75/q25=119.17 attn_vo:H=0.7331,top10E=0.32,eRank=194.3,q75/q25=25.70 mlp_w1:H=0.9160,top10E=0.08,eRank=456.2,q75/q25=7.89 mlp_w2:H=0.9233,top10E=0.08,eRank=479.0,q75/q25=9.27 vo_prod:H=0.5458,top10E=0.59,eRank=69.1,q75/q25=335.26 train_time:218645ms step_avg:91.10ms +[2025-08-22 13:01:06] [Rank 0] PRINT: step:2400/10000 val_loss:4.1927 svd_entropy: attn_qk:H=0.4548,top10E=0.69,eRank=46.2,q75/q25=119.17 attn_vo:H=0.7331,top10E=0.32,eRank=194.3,q75/q25=25.70 mlp_w1:H=0.9160,top10E=0.08,eRank=456.2,q75/q25=7.89 mlp_w2:H=0.9233,top10E=0.08,eRank=479.0,q75/q25=9.27 vo_prod:H=0.5458,top10E=0.59,eRank=69.1,q75/q25=335.26 train_time:218645ms step_avg:91.10ms +[2025-08-22 13:01:06] [Rank 0] step:2401/10000 train_time:218665ms step_avg:91.07ms +[2025-08-22 13:01:06] [Rank 0] step:2401/10000 train_time:218665ms step_avg:91.07ms +[2025-08-22 13:01:08] [Rank 0] step:2421/10000 train_time:220536ms step_avg:91.09ms +[2025-08-22 13:01:08] [Rank 0] step:2421/10000 train_time:220536ms step_avg:91.09ms +[2025-08-22 13:01:10] [Rank 0] step:2441/10000 train_time:222403ms step_avg:91.11ms +[2025-08-22 13:01:10] [Rank 0] step:2441/10000 train_time:222403ms step_avg:91.11ms +[2025-08-22 13:01:12] [Rank 0] step:2461/10000 train_time:224268ms step_avg:91.13ms +[2025-08-22 13:01:12] [Rank 0] step:2461/10000 train_time:224268ms step_avg:91.13ms +[2025-08-22 13:01:14] [Rank 0] step:2481/10000 train_time:226137ms step_avg:91.15ms +[2025-08-22 13:01:14] [Rank 0] step:2481/10000 train_time:226137ms step_avg:91.15ms +[2025-08-22 13:01:15] [Rank 0] step:2501/10000 train_time:228008ms step_avg:91.17ms +[2025-08-22 13:01:15] [Rank 0] step:2501/10000 train_time:228008ms step_avg:91.17ms +[2025-08-22 13:01:17] [Rank 0] step:2521/10000 train_time:229880ms step_avg:91.19ms +[2025-08-22 13:01:17] [Rank 0] step:2521/10000 train_time:229880ms step_avg:91.19ms +[2025-08-22 13:01:19] [Rank 0] step:2541/10000 train_time:231751ms step_avg:91.20ms +[2025-08-22 13:01:19] [Rank 0] step:2541/10000 train_time:231751ms step_avg:91.20ms +[2025-08-22 13:01:21] [Rank 0] step:2561/10000 train_time:233623ms step_avg:91.22ms +[2025-08-22 13:01:21] [Rank 0] step:2561/10000 train_time:233623ms step_avg:91.22ms +[2025-08-22 13:01:23] [Rank 0] step:2581/10000 train_time:235497ms step_avg:91.24ms +[2025-08-22 13:01:23] [Rank 0] step:2581/10000 train_time:235497ms step_avg:91.24ms +[2025-08-22 13:01:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:01:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:01:38] [Rank 0] PRINT: step:2600/10000 val_loss:4.1663 svd_entropy: attn_qk:H=0.4564,top10E=0.69,eRank=46.9,q75/q25=125.14 attn_vo:H=0.7374,top10E=0.31,eRank=196.2,q75/q25=25.85 mlp_w1:H=0.9165,top10E=0.08,eRank=457.8,q75/q25=8.12 mlp_w2:H=0.9241,top10E=0.08,eRank=481.2,q75/q25=9.63 vo_prod:H=0.5551,top10E=0.57,eRank=70.9,q75/q25=344.29 train_time:237376ms step_avg:91.30ms +[2025-08-22 13:01:38] [Rank 0] PRINT: step:2600/10000 val_loss:4.1663 svd_entropy: attn_qk:H=0.4564,top10E=0.69,eRank=46.9,q75/q25=125.14 attn_vo:H=0.7374,top10E=0.31,eRank=196.2,q75/q25=25.85 mlp_w1:H=0.9165,top10E=0.08,eRank=457.8,q75/q25=8.12 mlp_w2:H=0.9241,top10E=0.08,eRank=481.2,q75/q25=9.63 vo_prod:H=0.5551,top10E=0.57,eRank=70.9,q75/q25=344.29 train_time:237376ms step_avg:91.30ms +[2025-08-22 13:01:38] [Rank 0] step:2601/10000 train_time:237397ms step_avg:91.27ms +[2025-08-22 13:01:38] [Rank 0] step:2601/10000 train_time:237397ms step_avg:91.27ms +[2025-08-22 13:01:40] [Rank 0] step:2621/10000 train_time:239261ms step_avg:91.29ms +[2025-08-22 13:01:40] [Rank 0] step:2621/10000 train_time:239261ms step_avg:91.29ms +[2025-08-22 13:01:42] [Rank 0] step:2641/10000 train_time:241125ms step_avg:91.30ms +[2025-08-22 13:01:42] [Rank 0] step:2641/10000 train_time:241125ms step_avg:91.30ms +[2025-08-22 13:01:44] [Rank 0] step:2661/10000 train_time:242992ms step_avg:91.32ms +[2025-08-22 13:01:44] [Rank 0] step:2661/10000 train_time:242992ms step_avg:91.32ms +[2025-08-22 13:01:46] [Rank 0] step:2681/10000 train_time:244859ms step_avg:91.33ms +[2025-08-22 13:01:46] [Rank 0] step:2681/10000 train_time:244859ms step_avg:91.33ms +[2025-08-22 13:01:48] [Rank 0] step:2701/10000 train_time:246726ms step_avg:91.35ms +[2025-08-22 13:01:48] [Rank 0] step:2701/10000 train_time:246726ms step_avg:91.35ms +[2025-08-22 13:01:50] [Rank 0] step:2721/10000 train_time:248593ms step_avg:91.36ms +[2025-08-22 13:01:50] [Rank 0] step:2721/10000 train_time:248593ms step_avg:91.36ms +[2025-08-22 13:01:51] [Rank 0] step:2741/10000 train_time:250462ms step_avg:91.38ms +[2025-08-22 13:01:51] [Rank 0] step:2741/10000 train_time:250462ms step_avg:91.38ms +[2025-08-22 13:01:53] [Rank 0] step:2761/10000 train_time:252330ms step_avg:91.39ms +[2025-08-22 13:01:53] [Rank 0] step:2761/10000 train_time:252330ms step_avg:91.39ms +[2025-08-22 13:01:55] [Rank 0] step:2781/10000 train_time:254200ms step_avg:91.41ms +[2025-08-22 13:01:55] [Rank 0] step:2781/10000 train_time:254200ms step_avg:91.41ms +[2025-08-22 13:01:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:01:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:02:11] [Rank 0] PRINT: step:2800/10000 val_loss:4.1464 svd_entropy: attn_qk:H=0.4586,top10E=0.68,eRank=47.5,q75/q25=130.44 attn_vo:H=0.7420,top10E=0.31,eRank=198.3,q75/q25=25.71 mlp_w1:H=0.9173,top10E=0.08,eRank=459.9,q75/q25=8.39 mlp_w2:H=0.9251,top10E=0.08,eRank=484.0,q75/q25=10.05 vo_prod:H=0.5642,top10E=0.56,eRank=72.8,q75/q25=351.65 train_time:256077ms step_avg:91.46ms +[2025-08-22 13:02:11] [Rank 0] PRINT: step:2800/10000 val_loss:4.1464 svd_entropy: attn_qk:H=0.4586,top10E=0.68,eRank=47.5,q75/q25=130.44 attn_vo:H=0.7420,top10E=0.31,eRank=198.3,q75/q25=25.71 mlp_w1:H=0.9173,top10E=0.08,eRank=459.9,q75/q25=8.39 mlp_w2:H=0.9251,top10E=0.08,eRank=484.0,q75/q25=10.05 vo_prod:H=0.5642,top10E=0.56,eRank=72.8,q75/q25=351.65 train_time:256077ms step_avg:91.46ms +[2025-08-22 13:02:11] [Rank 0] step:2801/10000 train_time:256096ms step_avg:91.43ms +[2025-08-22 13:02:11] [Rank 0] step:2801/10000 train_time:256096ms step_avg:91.43ms +[2025-08-22 13:02:13] [Rank 0] step:2821/10000 train_time:258050ms step_avg:91.47ms +[2025-08-22 13:02:13] [Rank 0] step:2821/10000 train_time:258050ms step_avg:91.47ms +[2025-08-22 13:02:15] [Rank 0] step:2841/10000 train_time:259915ms step_avg:91.49ms +[2025-08-22 13:02:15] [Rank 0] step:2841/10000 train_time:259915ms step_avg:91.49ms +[2025-08-22 13:02:16] [Rank 0] step:2861/10000 train_time:261782ms step_avg:91.50ms +[2025-08-22 13:02:16] [Rank 0] step:2861/10000 train_time:261782ms step_avg:91.50ms +[2025-08-22 13:02:18] [Rank 0] step:2881/10000 train_time:263648ms step_avg:91.51ms +[2025-08-22 13:02:18] [Rank 0] step:2881/10000 train_time:263648ms step_avg:91.51ms +[2025-08-22 13:02:20] [Rank 0] step:2901/10000 train_time:265515ms step_avg:91.53ms +[2025-08-22 13:02:20] [Rank 0] step:2901/10000 train_time:265515ms step_avg:91.53ms +[2025-08-22 13:02:22] [Rank 0] step:2921/10000 train_time:267382ms step_avg:91.54ms +[2025-08-22 13:02:22] [Rank 0] step:2921/10000 train_time:267382ms step_avg:91.54ms +[2025-08-22 13:02:24] [Rank 0] step:2941/10000 train_time:269251ms step_avg:91.55ms +[2025-08-22 13:02:24] [Rank 0] step:2941/10000 train_time:269251ms step_avg:91.55ms +[2025-08-22 13:02:26] [Rank 0] step:2961/10000 train_time:271121ms step_avg:91.56ms +[2025-08-22 13:02:26] [Rank 0] step:2961/10000 train_time:271121ms step_avg:91.56ms +[2025-08-22 13:02:28] [Rank 0] step:2981/10000 train_time:272997ms step_avg:91.58ms +[2025-08-22 13:02:28] [Rank 0] step:2981/10000 train_time:272997ms step_avg:91.58ms +[2025-08-22 13:02:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:02:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:02:43] [Rank 0] PRINT: step:3000/10000 val_loss:4.1173 svd_entropy: attn_qk:H=0.4604,top10E=0.68,eRank=48.1,q75/q25=135.04 attn_vo:H=0.7454,top10E=0.30,eRank=199.9,q75/q25=25.64 mlp_w1:H=0.9178,top10E=0.08,eRank=461.4,q75/q25=8.61 mlp_w2:H=0.9259,top10E=0.08,eRank=486.3,q75/q25=10.19 vo_prod:H=0.5726,top10E=0.55,eRank=74.2,q75/q25=351.10 train_time:274881ms step_avg:91.63ms +[2025-08-22 13:02:43] [Rank 0] PRINT: step:3000/10000 val_loss:4.1173 svd_entropy: attn_qk:H=0.4604,top10E=0.68,eRank=48.1,q75/q25=135.04 attn_vo:H=0.7454,top10E=0.30,eRank=199.9,q75/q25=25.64 mlp_w1:H=0.9178,top10E=0.08,eRank=461.4,q75/q25=8.61 mlp_w2:H=0.9259,top10E=0.08,eRank=486.3,q75/q25=10.19 vo_prod:H=0.5726,top10E=0.55,eRank=74.2,q75/q25=351.10 train_time:274881ms step_avg:91.63ms +[2025-08-22 13:02:43] [Rank 0] step:3001/10000 train_time:274901ms step_avg:91.60ms +[2025-08-22 13:02:43] [Rank 0] step:3001/10000 train_time:274901ms step_avg:91.60ms +[2025-08-22 13:02:45] [Rank 0] step:3021/10000 train_time:276773ms step_avg:91.62ms +[2025-08-22 13:02:45] [Rank 0] step:3021/10000 train_time:276773ms step_avg:91.62ms +[2025-08-22 13:02:47] [Rank 0] step:3041/10000 train_time:278644ms step_avg:91.63ms +[2025-08-22 13:02:47] [Rank 0] step:3041/10000 train_time:278644ms step_avg:91.63ms +[2025-08-22 13:02:49] [Rank 0] step:3061/10000 train_time:280518ms step_avg:91.64ms +[2025-08-22 13:02:49] [Rank 0] step:3061/10000 train_time:280518ms step_avg:91.64ms +[2025-08-22 13:02:51] [Rank 0] step:3081/10000 train_time:282395ms step_avg:91.66ms +[2025-08-22 13:02:51] [Rank 0] step:3081/10000 train_time:282395ms step_avg:91.66ms +[2025-08-22 13:02:53] [Rank 0] step:3101/10000 train_time:284270ms step_avg:91.67ms +[2025-08-22 13:02:53] [Rank 0] step:3101/10000 train_time:284270ms step_avg:91.67ms +[2025-08-22 13:02:55] [Rank 0] step:3121/10000 train_time:286146ms step_avg:91.68ms +[2025-08-22 13:02:55] [Rank 0] step:3121/10000 train_time:286146ms step_avg:91.68ms +[2025-08-22 13:02:57] [Rank 0] step:3141/10000 train_time:288022ms step_avg:91.70ms +[2025-08-22 13:02:57] [Rank 0] step:3141/10000 train_time:288022ms step_avg:91.70ms +[2025-08-22 13:02:58] [Rank 0] step:3161/10000 train_time:289900ms step_avg:91.71ms +[2025-08-22 13:02:58] [Rank 0] step:3161/10000 train_time:289900ms step_avg:91.71ms +[2025-08-22 13:03:00] [Rank 0] step:3181/10000 train_time:291778ms step_avg:91.73ms +[2025-08-22 13:03:00] [Rank 0] step:3181/10000 train_time:291778ms step_avg:91.73ms +[2025-08-22 13:03:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:03:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:03:16] [Rank 0] PRINT: step:3200/10000 val_loss:4.0939 svd_entropy: attn_qk:H=0.4638,top10E=0.68,eRank=49.0,q75/q25=138.47 attn_vo:H=0.7481,top10E=0.29,eRank=201.0,q75/q25=25.49 mlp_w1:H=0.9178,top10E=0.08,eRank=461.8,q75/q25=8.86 mlp_w2:H=0.9263,top10E=0.08,eRank=487.5,q75/q25=10.30 vo_prod:H=0.5791,top10E=0.54,eRank=75.6,q75/q25=353.94 train_time:293663ms step_avg:91.77ms +[2025-08-22 13:03:16] [Rank 0] PRINT: step:3200/10000 val_loss:4.0939 svd_entropy: attn_qk:H=0.4638,top10E=0.68,eRank=49.0,q75/q25=138.47 attn_vo:H=0.7481,top10E=0.29,eRank=201.0,q75/q25=25.49 mlp_w1:H=0.9178,top10E=0.08,eRank=461.8,q75/q25=8.86 mlp_w2:H=0.9263,top10E=0.08,eRank=487.5,q75/q25=10.30 vo_prod:H=0.5791,top10E=0.54,eRank=75.6,q75/q25=353.94 train_time:293663ms step_avg:91.77ms +[2025-08-22 13:03:16] [Rank 0] step:3201/10000 train_time:293684ms step_avg:91.75ms +[2025-08-22 13:03:16] [Rank 0] step:3201/10000 train_time:293684ms step_avg:91.75ms +[2025-08-22 13:03:18] [Rank 0] step:3221/10000 train_time:295563ms step_avg:91.76ms +[2025-08-22 13:03:18] [Rank 0] step:3221/10000 train_time:295563ms step_avg:91.76ms +[2025-08-22 13:03:20] [Rank 0] step:3241/10000 train_time:297436ms step_avg:91.77ms +[2025-08-22 13:03:20] [Rank 0] step:3241/10000 train_time:297436ms step_avg:91.77ms +[2025-08-22 13:03:22] [Rank 0] step:3261/10000 train_time:299311ms step_avg:91.79ms +[2025-08-22 13:03:22] [Rank 0] step:3261/10000 train_time:299311ms step_avg:91.79ms +[2025-08-22 13:03:24] [Rank 0] step:3281/10000 train_time:301187ms step_avg:91.80ms +[2025-08-22 13:03:24] [Rank 0] step:3281/10000 train_time:301187ms step_avg:91.80ms +[2025-08-22 13:03:25] [Rank 0] step:3301/10000 train_time:303065ms step_avg:91.81ms +[2025-08-22 13:03:25] [Rank 0] step:3301/10000 train_time:303065ms step_avg:91.81ms +[2025-08-22 13:03:27] [Rank 0] step:3321/10000 train_time:304945ms step_avg:91.82ms +[2025-08-22 13:03:27] [Rank 0] step:3321/10000 train_time:304945ms step_avg:91.82ms +[2025-08-22 13:03:29] [Rank 0] step:3341/10000 train_time:306825ms step_avg:91.84ms +[2025-08-22 13:03:29] [Rank 0] step:3341/10000 train_time:306825ms step_avg:91.84ms +[2025-08-22 13:03:31] [Rank 0] step:3361/10000 train_time:308705ms step_avg:91.85ms +[2025-08-22 13:03:31] [Rank 0] step:3361/10000 train_time:308705ms step_avg:91.85ms +[2025-08-22 13:03:33] [Rank 0] step:3381/10000 train_time:310585ms step_avg:91.86ms +[2025-08-22 13:03:33] [Rank 0] step:3381/10000 train_time:310585ms step_avg:91.86ms +[2025-08-22 13:03:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:03:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:03:48] [Rank 0] PRINT: step:3400/10000 val_loss:4.0716 svd_entropy: attn_qk:H=0.4691,top10E=0.67,eRank=49.7,q75/q25=143.85 attn_vo:H=0.7519,top10E=0.29,eRank=203.2,q75/q25=25.44 mlp_w1:H=0.9185,top10E=0.08,eRank=463.7,q75/q25=9.05 mlp_w2:H=0.9271,top10E=0.08,eRank=489.8,q75/q25=10.45 vo_prod:H=0.5879,top10E=0.52,eRank=77.5,q75/q25=356.42 train_time:312473ms step_avg:91.90ms +[2025-08-22 13:03:48] [Rank 0] PRINT: step:3400/10000 val_loss:4.0716 svd_entropy: attn_qk:H=0.4691,top10E=0.67,eRank=49.7,q75/q25=143.85 attn_vo:H=0.7519,top10E=0.29,eRank=203.2,q75/q25=25.44 mlp_w1:H=0.9185,top10E=0.08,eRank=463.7,q75/q25=9.05 mlp_w2:H=0.9271,top10E=0.08,eRank=489.8,q75/q25=10.45 vo_prod:H=0.5879,top10E=0.52,eRank=77.5,q75/q25=356.42 train_time:312473ms step_avg:91.90ms +[2025-08-22 13:03:49] [Rank 0] step:3401/10000 train_time:312494ms step_avg:91.88ms +[2025-08-22 13:03:49] [Rank 0] step:3401/10000 train_time:312494ms step_avg:91.88ms +[2025-08-22 13:03:50] [Rank 0] step:3421/10000 train_time:314355ms step_avg:91.89ms +[2025-08-22 13:03:50] [Rank 0] step:3421/10000 train_time:314355ms step_avg:91.89ms +[2025-08-22 13:03:52] [Rank 0] step:3441/10000 train_time:316228ms step_avg:91.90ms +[2025-08-22 13:03:52] [Rank 0] step:3441/10000 train_time:316228ms step_avg:91.90ms +[2025-08-22 13:03:54] [Rank 0] step:3461/10000 train_time:318103ms step_avg:91.91ms +[2025-08-22 13:03:54] [Rank 0] step:3461/10000 train_time:318103ms step_avg:91.91ms +[2025-08-22 13:03:56] [Rank 0] step:3481/10000 train_time:319978ms step_avg:91.92ms +[2025-08-22 13:03:56] [Rank 0] step:3481/10000 train_time:319978ms step_avg:91.92ms +[2025-08-22 13:03:58] [Rank 0] step:3501/10000 train_time:321854ms step_avg:91.93ms +[2025-08-22 13:03:58] [Rank 0] step:3501/10000 train_time:321854ms step_avg:91.93ms +[2025-08-22 13:04:00] [Rank 0] step:3521/10000 train_time:323730ms step_avg:91.94ms +[2025-08-22 13:04:00] [Rank 0] step:3521/10000 train_time:323730ms step_avg:91.94ms +[2025-08-22 13:04:02] [Rank 0] step:3541/10000 train_time:325606ms step_avg:91.95ms +[2025-08-22 13:04:02] [Rank 0] step:3541/10000 train_time:325606ms step_avg:91.95ms +[2025-08-22 13:04:04] [Rank 0] step:3561/10000 train_time:327483ms step_avg:91.96ms +[2025-08-22 13:04:04] [Rank 0] step:3561/10000 train_time:327483ms step_avg:91.96ms +[2025-08-22 13:04:05] [Rank 0] step:3581/10000 train_time:329360ms step_avg:91.97ms +[2025-08-22 13:04:05] [Rank 0] step:3581/10000 train_time:329360ms step_avg:91.97ms +[2025-08-22 13:04:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:04:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:04:21] [Rank 0] PRINT: step:3600/10000 val_loss:4.0672 svd_entropy: attn_qk:H=0.4713,top10E=0.67,eRank=50.4,q75/q25=146.47 attn_vo:H=0.7559,top10E=0.28,eRank=205.7,q75/q25=25.51 mlp_w1:H=0.9193,top10E=0.08,eRank=465.9,q75/q25=9.25 mlp_w2:H=0.9279,top10E=0.08,eRank=492.0,q75/q25=10.47 vo_prod:H=0.5964,top10E=0.50,eRank=79.5,q75/q25=362.06 train_time:331324ms step_avg:92.03ms +[2025-08-22 13:04:21] [Rank 0] PRINT: step:3600/10000 val_loss:4.0672 svd_entropy: attn_qk:H=0.4713,top10E=0.67,eRank=50.4,q75/q25=146.47 attn_vo:H=0.7559,top10E=0.28,eRank=205.7,q75/q25=25.51 mlp_w1:H=0.9193,top10E=0.08,eRank=465.9,q75/q25=9.25 mlp_w2:H=0.9279,top10E=0.08,eRank=492.0,q75/q25=10.47 vo_prod:H=0.5964,top10E=0.50,eRank=79.5,q75/q25=362.06 train_time:331324ms step_avg:92.03ms +[2025-08-22 13:04:21] [Rank 0] step:3601/10000 train_time:331345ms step_avg:92.01ms +[2025-08-22 13:04:21] [Rank 0] step:3601/10000 train_time:331345ms step_avg:92.01ms +[2025-08-22 13:04:23] [Rank 0] step:3621/10000 train_time:333205ms step_avg:92.02ms +[2025-08-22 13:04:23] [Rank 0] step:3621/10000 train_time:333205ms step_avg:92.02ms +[2025-08-22 13:04:25] [Rank 0] step:3641/10000 train_time:335076ms step_avg:92.03ms +[2025-08-22 13:04:25] [Rank 0] step:3641/10000 train_time:335076ms step_avg:92.03ms +[2025-08-22 13:04:27] [Rank 0] step:3661/10000 train_time:336950ms step_avg:92.04ms +[2025-08-22 13:04:27] [Rank 0] step:3661/10000 train_time:336950ms step_avg:92.04ms +[2025-08-22 13:04:28] [Rank 0] step:3681/10000 train_time:338823ms step_avg:92.05ms +[2025-08-22 13:04:28] [Rank 0] step:3681/10000 train_time:338823ms step_avg:92.05ms +[2025-08-22 13:04:30] [Rank 0] step:3701/10000 train_time:340698ms step_avg:92.06ms +[2025-08-22 13:04:30] [Rank 0] step:3701/10000 train_time:340698ms step_avg:92.06ms +[2025-08-22 13:04:32] [Rank 0] step:3721/10000 train_time:342603ms step_avg:92.07ms +[2025-08-22 13:04:32] [Rank 0] step:3721/10000 train_time:342603ms step_avg:92.07ms +[2025-08-22 13:04:34] [Rank 0] step:3741/10000 train_time:344516ms step_avg:92.09ms +[2025-08-22 13:04:34] [Rank 0] step:3741/10000 train_time:344516ms step_avg:92.09ms +[2025-08-22 13:04:36] [Rank 0] step:3761/10000 train_time:346429ms step_avg:92.11ms +[2025-08-22 13:04:36] [Rank 0] step:3761/10000 train_time:346429ms step_avg:92.11ms +[2025-08-22 13:04:38] [Rank 0] step:3781/10000 train_time:348344ms step_avg:92.13ms +[2025-08-22 13:04:38] [Rank 0] step:3781/10000 train_time:348344ms step_avg:92.13ms +[2025-08-22 13:04:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:04:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:04:53] [Rank 0] PRINT: step:3800/10000 val_loss:4.0338 svd_entropy: attn_qk:H=0.4746,top10E=0.67,eRank=51.2,q75/q25=149.82 attn_vo:H=0.7594,top10E=0.27,eRank=207.9,q75/q25=25.51 mlp_w1:H=0.9201,top10E=0.08,eRank=468.0,q75/q25=9.34 mlp_w2:H=0.9286,top10E=0.07,eRank=494.1,q75/q25=10.34 vo_prod:H=0.6033,top10E=0.49,eRank=81.5,q75/q25=370.24 train_time:350267ms step_avg:92.18ms +[2025-08-22 13:04:53] [Rank 0] PRINT: step:3800/10000 val_loss:4.0338 svd_entropy: attn_qk:H=0.4746,top10E=0.67,eRank=51.2,q75/q25=149.82 attn_vo:H=0.7594,top10E=0.27,eRank=207.9,q75/q25=25.51 mlp_w1:H=0.9201,top10E=0.08,eRank=468.0,q75/q25=9.34 mlp_w2:H=0.9286,top10E=0.07,eRank=494.1,q75/q25=10.34 vo_prod:H=0.6033,top10E=0.49,eRank=81.5,q75/q25=370.24 train_time:350267ms step_avg:92.18ms +[2025-08-22 13:04:54] [Rank 0] step:3801/10000 train_time:350287ms step_avg:92.16ms +[2025-08-22 13:04:54] [Rank 0] step:3801/10000 train_time:350287ms step_avg:92.16ms +[2025-08-22 13:04:56] [Rank 0] step:3821/10000 train_time:352200ms step_avg:92.17ms +[2025-08-22 13:04:56] [Rank 0] step:3821/10000 train_time:352200ms step_avg:92.17ms +[2025-08-22 13:04:57] [Rank 0] step:3841/10000 train_time:354118ms step_avg:92.19ms +[2025-08-22 13:04:57] [Rank 0] step:3841/10000 train_time:354118ms step_avg:92.19ms +[2025-08-22 13:04:59] [Rank 0] step:3861/10000 train_time:356033ms step_avg:92.21ms +[2025-08-22 13:04:59] [Rank 0] step:3861/10000 train_time:356033ms step_avg:92.21ms +[2025-08-22 13:05:01] [Rank 0] step:3881/10000 train_time:357948ms step_avg:92.23ms +[2025-08-22 13:05:01] [Rank 0] step:3881/10000 train_time:357948ms step_avg:92.23ms +[2025-08-22 13:05:03] [Rank 0] step:3901/10000 train_time:359863ms step_avg:92.25ms +[2025-08-22 13:05:03] [Rank 0] step:3901/10000 train_time:359863ms step_avg:92.25ms +[2025-08-22 13:05:05] [Rank 0] step:3921/10000 train_time:361779ms step_avg:92.27ms +[2025-08-22 13:05:05] [Rank 0] step:3921/10000 train_time:361779ms step_avg:92.27ms +[2025-08-22 13:05:07] [Rank 0] step:3941/10000 train_time:363697ms step_avg:92.29ms +[2025-08-22 13:05:07] [Rank 0] step:3941/10000 train_time:363697ms step_avg:92.29ms +[2025-08-22 13:05:09] [Rank 0] step:3961/10000 train_time:365760ms step_avg:92.34ms +[2025-08-22 13:05:09] [Rank 0] step:3961/10000 train_time:365760ms step_avg:92.34ms +[2025-08-22 13:05:11] [Rank 0] step:3981/10000 train_time:367689ms step_avg:92.36ms +[2025-08-22 13:05:11] [Rank 0] step:3981/10000 train_time:367689ms step_avg:92.36ms +[2025-08-22 13:05:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:05:13] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:05:27] [Rank 0] PRINT: step:4000/10000 val_loss:4.0139 svd_entropy: attn_qk:H=0.4774,top10E=0.66,eRank=52.0,q75/q25=150.11 attn_vo:H=0.7624,top10E=0.27,eRank=209.9,q75/q25=25.68 mlp_w1:H=0.9208,top10E=0.08,eRank=470.0,q75/q25=9.38 mlp_w2:H=0.9294,top10E=0.07,eRank=496.1,q75/q25=10.23 vo_prod:H=0.6095,top10E=0.48,eRank=83.3,q75/q25=378.77 train_time:369610ms step_avg:92.40ms +[2025-08-22 13:05:27] [Rank 0] PRINT: step:4000/10000 val_loss:4.0139 svd_entropy: attn_qk:H=0.4774,top10E=0.66,eRank=52.0,q75/q25=150.11 attn_vo:H=0.7624,top10E=0.27,eRank=209.9,q75/q25=25.68 mlp_w1:H=0.9208,top10E=0.08,eRank=470.0,q75/q25=9.38 mlp_w2:H=0.9294,top10E=0.07,eRank=496.1,q75/q25=10.23 vo_prod:H=0.6095,top10E=0.48,eRank=83.3,q75/q25=378.77 train_time:369610ms step_avg:92.40ms +[2025-08-22 13:05:27] [Rank 0] step:4001/10000 train_time:369631ms step_avg:92.38ms +[2025-08-22 13:05:27] [Rank 0] step:4001/10000 train_time:369631ms step_avg:92.38ms +[2025-08-22 13:05:29] [Rank 0] step:4021/10000 train_time:371551ms step_avg:92.40ms +[2025-08-22 13:05:29] [Rank 0] step:4021/10000 train_time:371551ms step_avg:92.40ms +[2025-08-22 13:05:30] [Rank 0] step:4041/10000 train_time:373459ms step_avg:92.42ms +[2025-08-22 13:05:30] [Rank 0] step:4041/10000 train_time:373459ms step_avg:92.42ms +[2025-08-22 13:05:32] [Rank 0] step:4061/10000 train_time:375370ms step_avg:92.43ms +[2025-08-22 13:05:32] [Rank 0] step:4061/10000 train_time:375370ms step_avg:92.43ms +[2025-08-22 13:05:35] [Rank 0] step:4081/10000 train_time:377932ms step_avg:92.61ms +[2025-08-22 13:05:35] [Rank 0] step:4081/10000 train_time:377932ms step_avg:92.61ms +[2025-08-22 13:05:37] [Rank 0] step:4101/10000 train_time:379841ms step_avg:92.62ms +[2025-08-22 13:05:37] [Rank 0] step:4101/10000 train_time:379841ms step_avg:92.62ms +[2025-08-22 13:05:39] [Rank 0] step:4121/10000 train_time:381751ms step_avg:92.64ms +[2025-08-22 13:05:39] [Rank 0] step:4121/10000 train_time:381751ms step_avg:92.64ms +[2025-08-22 13:05:41] [Rank 0] step:4141/10000 train_time:383664ms step_avg:92.65ms +[2025-08-22 13:05:41] [Rank 0] step:4141/10000 train_time:383664ms step_avg:92.65ms +[2025-08-22 13:05:43] [Rank 0] step:4161/10000 train_time:385575ms step_avg:92.66ms +[2025-08-22 13:05:43] [Rank 0] step:4161/10000 train_time:385575ms step_avg:92.66ms +[2025-08-22 13:05:45] [Rank 0] step:4181/10000 train_time:387489ms step_avg:92.68ms +[2025-08-22 13:05:45] [Rank 0] step:4181/10000 train_time:387489ms step_avg:92.68ms +[2025-08-22 13:05:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:05:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:06:00] [Rank 0] PRINT: step:4200/10000 val_loss:3.9996 svd_entropy: attn_qk:H=0.4785,top10E=0.66,eRank=52.9,q75/q25=149.64 attn_vo:H=0.7656,top10E=0.26,eRank=212.1,q75/q25=25.64 mlp_w1:H=0.9215,top10E=0.07,eRank=472.0,q75/q25=9.45 mlp_w2:H=0.9301,top10E=0.07,eRank=498.3,q75/q25=10.05 vo_prod:H=0.6158,top10E=0.46,eRank=85.3,q75/q25=386.65 train_time:389408ms step_avg:92.72ms +[2025-08-22 13:06:00] [Rank 0] PRINT: step:4200/10000 val_loss:3.9996 svd_entropy: attn_qk:H=0.4785,top10E=0.66,eRank=52.9,q75/q25=149.64 attn_vo:H=0.7656,top10E=0.26,eRank=212.1,q75/q25=25.64 mlp_w1:H=0.9215,top10E=0.07,eRank=472.0,q75/q25=9.45 mlp_w2:H=0.9301,top10E=0.07,eRank=498.3,q75/q25=10.05 vo_prod:H=0.6158,top10E=0.46,eRank=85.3,q75/q25=386.65 train_time:389408ms step_avg:92.72ms +[2025-08-22 13:06:00] [Rank 0] step:4201/10000 train_time:389429ms step_avg:92.70ms +[2025-08-22 13:06:00] [Rank 0] step:4201/10000 train_time:389429ms step_avg:92.70ms +[2025-08-22 13:06:02] [Rank 0] step:4221/10000 train_time:391330ms step_avg:92.71ms +[2025-08-22 13:06:02] [Rank 0] step:4221/10000 train_time:391330ms step_avg:92.71ms +[2025-08-22 13:06:04] [Rank 0] step:4241/10000 train_time:393241ms step_avg:92.72ms +[2025-08-22 13:06:04] [Rank 0] step:4241/10000 train_time:393241ms step_avg:92.72ms +[2025-08-22 13:06:06] [Rank 0] step:4261/10000 train_time:395148ms step_avg:92.74ms +[2025-08-22 13:06:06] [Rank 0] step:4261/10000 train_time:395148ms step_avg:92.74ms +[2025-08-22 13:06:08] [Rank 0] step:4281/10000 train_time:397101ms step_avg:92.76ms +[2025-08-22 13:06:08] [Rank 0] step:4281/10000 train_time:397101ms step_avg:92.76ms +[2025-08-22 13:06:10] [Rank 0] step:4301/10000 train_time:399009ms step_avg:92.77ms +[2025-08-22 13:06:10] [Rank 0] step:4301/10000 train_time:399009ms step_avg:92.77ms +[2025-08-22 13:06:12] [Rank 0] step:4321/10000 train_time:400920ms step_avg:92.78ms +[2025-08-22 13:06:12] [Rank 0] step:4321/10000 train_time:400920ms step_avg:92.78ms +[2025-08-22 13:06:14] [Rank 0] step:4341/10000 train_time:402901ms step_avg:92.81ms +[2025-08-22 13:06:14] [Rank 0] step:4341/10000 train_time:402901ms step_avg:92.81ms +[2025-08-22 13:06:16] [Rank 0] step:4361/10000 train_time:404870ms step_avg:92.84ms +[2025-08-22 13:06:16] [Rank 0] step:4361/10000 train_time:404870ms step_avg:92.84ms +[2025-08-22 13:06:17] [Rank 0] step:4381/10000 train_time:406778ms step_avg:92.85ms +[2025-08-22 13:06:17] [Rank 0] step:4381/10000 train_time:406778ms step_avg:92.85ms +[2025-08-22 13:06:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:06:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:06:33] [Rank 0] PRINT: step:4400/10000 val_loss:3.9856 svd_entropy: attn_qk:H=0.4823,top10E=0.65,eRank=53.8,q75/q25=150.40 attn_vo:H=0.7681,top10E=0.25,eRank=213.8,q75/q25=25.77 mlp_w1:H=0.9221,top10E=0.07,eRank=473.9,q75/q25=9.44 mlp_w2:H=0.9307,top10E=0.07,eRank=500.1,q75/q25=9.95 vo_prod:H=0.6207,top10E=0.46,eRank=86.8,q75/q25=401.73 train_time:408696ms step_avg:92.89ms +[2025-08-22 13:06:33] [Rank 0] PRINT: step:4400/10000 val_loss:3.9856 svd_entropy: attn_qk:H=0.4823,top10E=0.65,eRank=53.8,q75/q25=150.40 attn_vo:H=0.7681,top10E=0.25,eRank=213.8,q75/q25=25.77 mlp_w1:H=0.9221,top10E=0.07,eRank=473.9,q75/q25=9.44 mlp_w2:H=0.9307,top10E=0.07,eRank=500.1,q75/q25=9.95 vo_prod:H=0.6207,top10E=0.46,eRank=86.8,q75/q25=401.73 train_time:408696ms step_avg:92.89ms +[2025-08-22 13:06:33] [Rank 0] step:4401/10000 train_time:408716ms step_avg:92.87ms +[2025-08-22 13:06:33] [Rank 0] step:4401/10000 train_time:408716ms step_avg:92.87ms +[2025-08-22 13:06:35] [Rank 0] step:4421/10000 train_time:410611ms step_avg:92.88ms +[2025-08-22 13:06:35] [Rank 0] step:4421/10000 train_time:410611ms step_avg:92.88ms +[2025-08-22 13:06:37] [Rank 0] step:4441/10000 train_time:412517ms step_avg:92.89ms +[2025-08-22 13:06:37] [Rank 0] step:4441/10000 train_time:412517ms step_avg:92.89ms +[2025-08-22 13:06:39] [Rank 0] step:4461/10000 train_time:414429ms step_avg:92.90ms +[2025-08-22 13:06:39] [Rank 0] step:4461/10000 train_time:414429ms step_avg:92.90ms +[2025-08-22 13:06:41] [Rank 0] step:4481/10000 train_time:416347ms step_avg:92.91ms +[2025-08-22 13:06:41] [Rank 0] step:4481/10000 train_time:416347ms step_avg:92.91ms +[2025-08-22 13:06:43] [Rank 0] step:4501/10000 train_time:418263ms step_avg:92.93ms +[2025-08-22 13:06:43] [Rank 0] step:4501/10000 train_time:418263ms step_avg:92.93ms +[2025-08-22 13:06:44] [Rank 0] step:4521/10000 train_time:420181ms step_avg:92.94ms +[2025-08-22 13:06:44] [Rank 0] step:4521/10000 train_time:420181ms step_avg:92.94ms +[2025-08-22 13:06:46] [Rank 0] step:4541/10000 train_time:422101ms step_avg:92.95ms +[2025-08-22 13:06:46] [Rank 0] step:4541/10000 train_time:422101ms step_avg:92.95ms +[2025-08-22 13:06:48] [Rank 0] step:4561/10000 train_time:424019ms step_avg:92.97ms +[2025-08-22 13:06:48] [Rank 0] step:4561/10000 train_time:424019ms step_avg:92.97ms +[2025-08-22 13:06:50] [Rank 0] step:4581/10000 train_time:425941ms step_avg:92.98ms +[2025-08-22 13:06:50] [Rank 0] step:4581/10000 train_time:425941ms step_avg:92.98ms +[2025-08-22 13:06:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:06:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:07:06] [Rank 0] PRINT: step:4600/10000 val_loss:3.9626 svd_entropy: attn_qk:H=0.4842,top10E=0.65,eRank=54.5,q75/q25=152.45 attn_vo:H=0.7707,top10E=0.25,eRank=215.7,q75/q25=25.85 mlp_w1:H=0.9228,top10E=0.07,eRank=475.8,q75/q25=9.41 mlp_w2:H=0.9313,top10E=0.07,eRank=501.7,q75/q25=9.76 vo_prod:H=0.6259,top10E=0.44,eRank=88.5,q75/q25=411.63 train_time:427866ms step_avg:93.01ms +[2025-08-22 13:07:06] [Rank 0] PRINT: step:4600/10000 val_loss:3.9626 svd_entropy: attn_qk:H=0.4842,top10E=0.65,eRank=54.5,q75/q25=152.45 attn_vo:H=0.7707,top10E=0.25,eRank=215.7,q75/q25=25.85 mlp_w1:H=0.9228,top10E=0.07,eRank=475.8,q75/q25=9.41 mlp_w2:H=0.9313,top10E=0.07,eRank=501.7,q75/q25=9.76 vo_prod:H=0.6259,top10E=0.44,eRank=88.5,q75/q25=411.63 train_time:427866ms step_avg:93.01ms +[2025-08-22 13:07:06] [Rank 0] step:4601/10000 train_time:427888ms step_avg:93.00ms +[2025-08-22 13:07:06] [Rank 0] step:4601/10000 train_time:427888ms step_avg:93.00ms +[2025-08-22 13:07:08] [Rank 0] step:4621/10000 train_time:429803ms step_avg:93.01ms +[2025-08-22 13:07:08] [Rank 0] step:4621/10000 train_time:429803ms step_avg:93.01ms +[2025-08-22 13:07:10] [Rank 0] step:4641/10000 train_time:431724ms step_avg:93.02ms +[2025-08-22 13:07:10] [Rank 0] step:4641/10000 train_time:431724ms step_avg:93.02ms +[2025-08-22 13:07:12] [Rank 0] step:4661/10000 train_time:433645ms step_avg:93.04ms +[2025-08-22 13:07:12] [Rank 0] step:4661/10000 train_time:433645ms step_avg:93.04ms +[2025-08-22 13:07:14] [Rank 0] step:4681/10000 train_time:435566ms step_avg:93.05ms +[2025-08-22 13:07:14] [Rank 0] step:4681/10000 train_time:435566ms step_avg:93.05ms +[2025-08-22 13:07:16] [Rank 0] step:4701/10000 train_time:437563ms step_avg:93.08ms +[2025-08-22 13:07:16] [Rank 0] step:4701/10000 train_time:437563ms step_avg:93.08ms +[2025-08-22 13:07:18] [Rank 0] step:4721/10000 train_time:439529ms step_avg:93.10ms +[2025-08-22 13:07:18] [Rank 0] step:4721/10000 train_time:439529ms step_avg:93.10ms +[2025-08-22 13:07:20] [Rank 0] step:4741/10000 train_time:441453ms step_avg:93.11ms +[2025-08-22 13:07:20] [Rank 0] step:4741/10000 train_time:441453ms step_avg:93.11ms +[2025-08-22 13:07:22] [Rank 0] step:4761/10000 train_time:443378ms step_avg:93.13ms +[2025-08-22 13:07:22] [Rank 0] step:4761/10000 train_time:443378ms step_avg:93.13ms +[2025-08-22 13:07:24] [Rank 0] step:4781/10000 train_time:445301ms step_avg:93.14ms +[2025-08-22 13:07:24] [Rank 0] step:4781/10000 train_time:445301ms step_avg:93.14ms +[2025-08-22 13:07:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:07:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:07:39] [Rank 0] PRINT: step:4800/10000 val_loss:3.9582 svd_entropy: attn_qk:H=0.4871,top10E=0.65,eRank=55.2,q75/q25=152.96 attn_vo:H=0.7733,top10E=0.24,eRank=217.6,q75/q25=25.94 mlp_w1:H=0.9234,top10E=0.07,eRank=477.8,q75/q25=9.35 mlp_w2:H=0.9319,top10E=0.07,eRank=503.4,q75/q25=9.58 vo_prod:H=0.6304,top10E=0.44,eRank=89.9,q75/q25=412.38 train_time:447231ms step_avg:93.17ms +[2025-08-22 13:07:39] [Rank 0] PRINT: step:4800/10000 val_loss:3.9582 svd_entropy: attn_qk:H=0.4871,top10E=0.65,eRank=55.2,q75/q25=152.96 attn_vo:H=0.7733,top10E=0.24,eRank=217.6,q75/q25=25.94 mlp_w1:H=0.9234,top10E=0.07,eRank=477.8,q75/q25=9.35 mlp_w2:H=0.9319,top10E=0.07,eRank=503.4,q75/q25=9.58 vo_prod:H=0.6304,top10E=0.44,eRank=89.9,q75/q25=412.38 train_time:447231ms step_avg:93.17ms +[2025-08-22 13:07:39] [Rank 0] step:4801/10000 train_time:447251ms step_avg:93.16ms +[2025-08-22 13:07:39] [Rank 0] step:4801/10000 train_time:447251ms step_avg:93.16ms +[2025-08-22 13:07:41] [Rank 0] step:4821/10000 train_time:449155ms step_avg:93.17ms +[2025-08-22 13:07:41] [Rank 0] step:4821/10000 train_time:449155ms step_avg:93.17ms +[2025-08-22 13:07:43] [Rank 0] step:4841/10000 train_time:451068ms step_avg:93.18ms +[2025-08-22 13:07:43] [Rank 0] step:4841/10000 train_time:451068ms step_avg:93.18ms +[2025-08-22 13:07:45] [Rank 0] step:4861/10000 train_time:452986ms step_avg:93.19ms +[2025-08-22 13:07:45] [Rank 0] step:4861/10000 train_time:452986ms step_avg:93.19ms +[2025-08-22 13:07:47] [Rank 0] step:4881/10000 train_time:454902ms step_avg:93.20ms +[2025-08-22 13:07:47] [Rank 0] step:4881/10000 train_time:454902ms step_avg:93.20ms +[2025-08-22 13:07:49] [Rank 0] step:4901/10000 train_time:456818ms step_avg:93.21ms +[2025-08-22 13:07:49] [Rank 0] step:4901/10000 train_time:456818ms step_avg:93.21ms +[2025-08-22 13:07:51] [Rank 0] step:4921/10000 train_time:458736ms step_avg:93.22ms +[2025-08-22 13:07:51] [Rank 0] step:4921/10000 train_time:458736ms step_avg:93.22ms +[2025-08-22 13:07:52] [Rank 0] step:4941/10000 train_time:460656ms step_avg:93.23ms +[2025-08-22 13:07:52] [Rank 0] step:4941/10000 train_time:460656ms step_avg:93.23ms +[2025-08-22 13:07:54] [Rank 0] step:4961/10000 train_time:462575ms step_avg:93.24ms +[2025-08-22 13:07:54] [Rank 0] step:4961/10000 train_time:462575ms step_avg:93.24ms +[2025-08-22 13:07:56] [Rank 0] step:4981/10000 train_time:464496ms step_avg:93.25ms +[2025-08-22 13:07:56] [Rank 0] step:4981/10000 train_time:464496ms step_avg:93.25ms +[2025-08-22 13:07:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:07:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:08:12] [Rank 0] PRINT: step:5000/10000 val_loss:3.9422 svd_entropy: attn_qk:H=0.4902,top10E=0.64,eRank=55.9,q75/q25=153.22 attn_vo:H=0.7754,top10E=0.24,eRank=219.3,q75/q25=26.15 mlp_w1:H=0.9240,top10E=0.07,eRank=479.5,q75/q25=9.27 mlp_w2:H=0.9324,top10E=0.07,eRank=504.9,q75/q25=9.46 vo_prod:H=0.6350,top10E=0.43,eRank=91.7,q75/q25=423.53 train_time:466423ms step_avg:93.28ms +[2025-08-22 13:08:12] [Rank 0] PRINT: step:5000/10000 val_loss:3.9422 svd_entropy: attn_qk:H=0.4902,top10E=0.64,eRank=55.9,q75/q25=153.22 attn_vo:H=0.7754,top10E=0.24,eRank=219.3,q75/q25=26.15 mlp_w1:H=0.9240,top10E=0.07,eRank=479.5,q75/q25=9.27 mlp_w2:H=0.9324,top10E=0.07,eRank=504.9,q75/q25=9.46 vo_prod:H=0.6350,top10E=0.43,eRank=91.7,q75/q25=423.53 train_time:466423ms step_avg:93.28ms +[2025-08-22 13:08:12] [Rank 0] step:5001/10000 train_time:466445ms step_avg:93.27ms +[2025-08-22 13:08:12] [Rank 0] step:5001/10000 train_time:466445ms step_avg:93.27ms +[2025-08-22 13:08:14] [Rank 0] step:5021/10000 train_time:468346ms step_avg:93.28ms +[2025-08-22 13:08:14] [Rank 0] step:5021/10000 train_time:468346ms step_avg:93.28ms +[2025-08-22 13:08:16] [Rank 0] step:5041/10000 train_time:470262ms step_avg:93.29ms +[2025-08-22 13:08:16] [Rank 0] step:5041/10000 train_time:470262ms step_avg:93.29ms +[2025-08-22 13:08:18] [Rank 0] step:5061/10000 train_time:472175ms step_avg:93.30ms +[2025-08-22 13:08:18] [Rank 0] step:5061/10000 train_time:472175ms step_avg:93.30ms +[2025-08-22 13:08:20] [Rank 0] step:5081/10000 train_time:474157ms step_avg:93.32ms +[2025-08-22 13:08:20] [Rank 0] step:5081/10000 train_time:474157ms step_avg:93.32ms +[2025-08-22 13:08:22] [Rank 0] step:5101/10000 train_time:476140ms step_avg:93.34ms +[2025-08-22 13:08:22] [Rank 0] step:5101/10000 train_time:476140ms step_avg:93.34ms +[2025-08-22 13:08:24] [Rank 0] step:5121/10000 train_time:478057ms step_avg:93.35ms +[2025-08-22 13:08:24] [Rank 0] step:5121/10000 train_time:478057ms step_avg:93.35ms +[2025-08-22 13:08:25] [Rank 0] step:5141/10000 train_time:479979ms step_avg:93.36ms +[2025-08-22 13:08:25] [Rank 0] step:5141/10000 train_time:479979ms step_avg:93.36ms +[2025-08-22 13:08:27] [Rank 0] step:5161/10000 train_time:481897ms step_avg:93.37ms +[2025-08-22 13:08:27] [Rank 0] step:5161/10000 train_time:481897ms step_avg:93.37ms +[2025-08-22 13:08:29] [Rank 0] step:5181/10000 train_time:483818ms step_avg:93.38ms +[2025-08-22 13:08:29] [Rank 0] step:5181/10000 train_time:483818ms step_avg:93.38ms +[2025-08-22 13:08:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:08:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:08:45] [Rank 0] PRINT: step:5200/10000 val_loss:3.9293 svd_entropy: attn_qk:H=0.4935,top10E=0.64,eRank=56.5,q75/q25=152.39 attn_vo:H=0.7774,top10E=0.23,eRank=220.8,q75/q25=26.13 mlp_w1:H=0.9245,top10E=0.07,eRank=481.1,q75/q25=9.18 mlp_w2:H=0.9328,top10E=0.07,eRank=506.2,q75/q25=9.25 vo_prod:H=0.6379,top10E=0.42,eRank=92.1,q75/q25=438.48 train_time:485768ms step_avg:93.42ms +[2025-08-22 13:08:45] [Rank 0] PRINT: step:5200/10000 val_loss:3.9293 svd_entropy: attn_qk:H=0.4935,top10E=0.64,eRank=56.5,q75/q25=152.39 attn_vo:H=0.7774,top10E=0.23,eRank=220.8,q75/q25=26.13 mlp_w1:H=0.9245,top10E=0.07,eRank=481.1,q75/q25=9.18 mlp_w2:H=0.9328,top10E=0.07,eRank=506.2,q75/q25=9.25 vo_prod:H=0.6379,top10E=0.42,eRank=92.1,q75/q25=438.48 train_time:485768ms step_avg:93.42ms +[2025-08-22 13:08:45] [Rank 0] step:5201/10000 train_time:485788ms step_avg:93.40ms +[2025-08-22 13:08:45] [Rank 0] step:5201/10000 train_time:485788ms step_avg:93.40ms +[2025-08-22 13:08:47] [Rank 0] step:5221/10000 train_time:487726ms step_avg:93.42ms +[2025-08-22 13:08:47] [Rank 0] step:5221/10000 train_time:487726ms step_avg:93.42ms +[2025-08-22 13:08:49] [Rank 0] step:5241/10000 train_time:489671ms step_avg:93.43ms +[2025-08-22 13:08:49] [Rank 0] step:5241/10000 train_time:489671ms step_avg:93.43ms +[2025-08-22 13:08:51] [Rank 0] step:5261/10000 train_time:491617ms step_avg:93.45ms +[2025-08-22 13:08:51] [Rank 0] step:5261/10000 train_time:491617ms step_avg:93.45ms +[2025-08-22 13:08:53] [Rank 0] step:5281/10000 train_time:493563ms step_avg:93.46ms +[2025-08-22 13:08:53] [Rank 0] step:5281/10000 train_time:493563ms step_avg:93.46ms +[2025-08-22 13:08:55] [Rank 0] step:5301/10000 train_time:495520ms step_avg:93.48ms +[2025-08-22 13:08:55] [Rank 0] step:5301/10000 train_time:495520ms step_avg:93.48ms +[2025-08-22 13:08:57] [Rank 0] step:5321/10000 train_time:497467ms step_avg:93.49ms +[2025-08-22 13:08:57] [Rank 0] step:5321/10000 train_time:497467ms step_avg:93.49ms +[2025-08-22 13:08:59] [Rank 0] step:5341/10000 train_time:499416ms step_avg:93.51ms +[2025-08-22 13:08:59] [Rank 0] step:5341/10000 train_time:499416ms step_avg:93.51ms +[2025-08-22 13:09:01] [Rank 0] step:5361/10000 train_time:501369ms step_avg:93.52ms +[2025-08-22 13:09:01] [Rank 0] step:5361/10000 train_time:501369ms step_avg:93.52ms +[2025-08-22 13:09:03] [Rank 0] step:5381/10000 train_time:503319ms step_avg:93.54ms +[2025-08-22 13:09:03] [Rank 0] step:5381/10000 train_time:503319ms step_avg:93.54ms +[2025-08-22 13:09:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:09:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:09:18] [Rank 0] PRINT: step:5400/10000 val_loss:3.9153 svd_entropy: attn_qk:H=0.4961,top10E=0.63,eRank=57.1,q75/q25=151.72 attn_vo:H=0.7797,top10E=0.23,eRank=222.8,q75/q25=26.32 mlp_w1:H=0.9250,top10E=0.07,eRank=482.8,q75/q25=9.05 mlp_w2:H=0.9333,top10E=0.07,eRank=507.5,q75/q25=9.15 vo_prod:H=0.6433,top10E=0.41,eRank=94.6,q75/q25=448.05 train_time:505274ms step_avg:93.57ms +[2025-08-22 13:09:18] [Rank 0] PRINT: step:5400/10000 val_loss:3.9153 svd_entropy: attn_qk:H=0.4961,top10E=0.63,eRank=57.1,q75/q25=151.72 attn_vo:H=0.7797,top10E=0.23,eRank=222.8,q75/q25=26.32 mlp_w1:H=0.9250,top10E=0.07,eRank=482.8,q75/q25=9.05 mlp_w2:H=0.9333,top10E=0.07,eRank=507.5,q75/q25=9.15 vo_prod:H=0.6433,top10E=0.41,eRank=94.6,q75/q25=448.05 train_time:505274ms step_avg:93.57ms +[2025-08-22 13:09:18] [Rank 0] step:5401/10000 train_time:505295ms step_avg:93.56ms +[2025-08-22 13:09:18] [Rank 0] step:5401/10000 train_time:505295ms step_avg:93.56ms +[2025-08-22 13:09:20] [Rank 0] step:5421/10000 train_time:507252ms step_avg:93.57ms +[2025-08-22 13:09:20] [Rank 0] step:5421/10000 train_time:507252ms step_avg:93.57ms +[2025-08-22 13:09:22] [Rank 0] step:5441/10000 train_time:509256ms step_avg:93.60ms +[2025-08-22 13:09:22] [Rank 0] step:5441/10000 train_time:509256ms step_avg:93.60ms +[2025-08-22 13:09:24] [Rank 0] step:5461/10000 train_time:511271ms step_avg:93.62ms +[2025-08-22 13:09:24] [Rank 0] step:5461/10000 train_time:511271ms step_avg:93.62ms +[2025-08-22 13:09:26] [Rank 0] step:5481/10000 train_time:513221ms step_avg:93.64ms +[2025-08-22 13:09:26] [Rank 0] step:5481/10000 train_time:513221ms step_avg:93.64ms +[2025-08-22 13:09:28] [Rank 0] step:5501/10000 train_time:515181ms step_avg:93.65ms +[2025-08-22 13:09:28] [Rank 0] step:5501/10000 train_time:515181ms step_avg:93.65ms +[2025-08-22 13:09:30] [Rank 0] step:5521/10000 train_time:517138ms step_avg:93.67ms +[2025-08-22 13:09:30] [Rank 0] step:5521/10000 train_time:517138ms step_avg:93.67ms +[2025-08-22 13:09:32] [Rank 0] step:5541/10000 train_time:519093ms step_avg:93.68ms +[2025-08-22 13:09:32] [Rank 0] step:5541/10000 train_time:519093ms step_avg:93.68ms +[2025-08-22 13:09:34] [Rank 0] step:5561/10000 train_time:521048ms step_avg:93.70ms +[2025-08-22 13:09:34] [Rank 0] step:5561/10000 train_time:521048ms step_avg:93.70ms +[2025-08-22 13:09:36] [Rank 0] step:5581/10000 train_time:523005ms step_avg:93.71ms +[2025-08-22 13:09:36] [Rank 0] step:5581/10000 train_time:523005ms step_avg:93.71ms +[2025-08-22 13:09:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:09:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:09:51] [Rank 0] PRINT: step:5600/10000 val_loss:3.9086 svd_entropy: attn_qk:H=0.4982,top10E=0.63,eRank=57.8,q75/q25=150.99 attn_vo:H=0.7820,top10E=0.23,eRank=224.6,q75/q25=26.46 mlp_w1:H=0.9255,top10E=0.07,eRank=484.4,q75/q25=8.93 mlp_w2:H=0.9337,top10E=0.07,eRank=508.7,q75/q25=9.04 vo_prod:H=0.6473,top10E=0.40,eRank=96.2,q75/q25=467.17 train_time:524969ms step_avg:93.74ms +[2025-08-22 13:09:51] [Rank 0] PRINT: step:5600/10000 val_loss:3.9086 svd_entropy: attn_qk:H=0.4982,top10E=0.63,eRank=57.8,q75/q25=150.99 attn_vo:H=0.7820,top10E=0.23,eRank=224.6,q75/q25=26.46 mlp_w1:H=0.9255,top10E=0.07,eRank=484.4,q75/q25=8.93 mlp_w2:H=0.9337,top10E=0.07,eRank=508.7,q75/q25=9.04 vo_prod:H=0.6473,top10E=0.40,eRank=96.2,q75/q25=467.17 train_time:524969ms step_avg:93.74ms +[2025-08-22 13:09:52] [Rank 0] step:5601/10000 train_time:524990ms step_avg:93.73ms +[2025-08-22 13:09:52] [Rank 0] step:5601/10000 train_time:524990ms step_avg:93.73ms +[2025-08-22 13:09:54] [Rank 0] step:5621/10000 train_time:526939ms step_avg:93.74ms +[2025-08-22 13:09:54] [Rank 0] step:5621/10000 train_time:526939ms step_avg:93.74ms +[2025-08-22 13:09:55] [Rank 0] step:5641/10000 train_time:528886ms step_avg:93.76ms +[2025-08-22 13:09:55] [Rank 0] step:5641/10000 train_time:528886ms step_avg:93.76ms +[2025-08-22 13:09:57] [Rank 0] step:5661/10000 train_time:530832ms step_avg:93.77ms +[2025-08-22 13:09:57] [Rank 0] step:5661/10000 train_time:530832ms step_avg:93.77ms +[2025-08-22 13:09:59] [Rank 0] step:5681/10000 train_time:532781ms step_avg:93.78ms +[2025-08-22 13:09:59] [Rank 0] step:5681/10000 train_time:532781ms step_avg:93.78ms +[2025-08-22 13:10:01] [Rank 0] step:5701/10000 train_time:534730ms step_avg:93.80ms +[2025-08-22 13:10:01] [Rank 0] step:5701/10000 train_time:534730ms step_avg:93.80ms +[2025-08-22 13:10:03] [Rank 0] step:5721/10000 train_time:536685ms step_avg:93.81ms +[2025-08-22 13:10:03] [Rank 0] step:5721/10000 train_time:536685ms step_avg:93.81ms +[2025-08-22 13:10:05] [Rank 0] step:5741/10000 train_time:538634ms step_avg:93.82ms +[2025-08-22 13:10:05] [Rank 0] step:5741/10000 train_time:538634ms step_avg:93.82ms +[2025-08-22 13:10:07] [Rank 0] step:5761/10000 train_time:540588ms step_avg:93.84ms +[2025-08-22 13:10:07] [Rank 0] step:5761/10000 train_time:540588ms step_avg:93.84ms +[2025-08-22 13:10:09] [Rank 0] step:5781/10000 train_time:542540ms step_avg:93.85ms +[2025-08-22 13:10:09] [Rank 0] step:5781/10000 train_time:542540ms step_avg:93.85ms +[2025-08-22 13:10:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:10:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:10:25] [Rank 0] PRINT: step:5800/10000 val_loss:3.9047 svd_entropy: attn_qk:H=0.5019,top10E=0.63,eRank=58.4,q75/q25=150.29 attn_vo:H=0.7840,top10E=0.22,eRank=226.3,q75/q25=26.50 mlp_w1:H=0.9260,top10E=0.07,eRank=485.9,q75/q25=8.82 mlp_w2:H=0.9341,top10E=0.07,eRank=509.8,q75/q25=8.86 vo_prod:H=0.6515,top10E=0.40,eRank=97.9,q75/q25=476.30 train_time:544499ms step_avg:93.88ms +[2025-08-22 13:10:25] [Rank 0] PRINT: step:5800/10000 val_loss:3.9047 svd_entropy: attn_qk:H=0.5019,top10E=0.63,eRank=58.4,q75/q25=150.29 attn_vo:H=0.7840,top10E=0.22,eRank=226.3,q75/q25=26.50 mlp_w1:H=0.9260,top10E=0.07,eRank=485.9,q75/q25=8.82 mlp_w2:H=0.9341,top10E=0.07,eRank=509.8,q75/q25=8.86 vo_prod:H=0.6515,top10E=0.40,eRank=97.9,q75/q25=476.30 train_time:544499ms step_avg:93.88ms +[2025-08-22 13:10:25] [Rank 0] step:5801/10000 train_time:544519ms step_avg:93.87ms +[2025-08-22 13:10:25] [Rank 0] step:5801/10000 train_time:544519ms step_avg:93.87ms +[2025-08-22 13:10:27] [Rank 0] step:5821/10000 train_time:546505ms step_avg:93.88ms +[2025-08-22 13:10:27] [Rank 0] step:5821/10000 train_time:546505ms step_avg:93.88ms +[2025-08-22 13:10:29] [Rank 0] step:5841/10000 train_time:548522ms step_avg:93.91ms +[2025-08-22 13:10:29] [Rank 0] step:5841/10000 train_time:548522ms step_avg:93.91ms +[2025-08-22 13:10:31] [Rank 0] step:5861/10000 train_time:550475ms step_avg:93.92ms +[2025-08-22 13:10:31] [Rank 0] step:5861/10000 train_time:550475ms step_avg:93.92ms +[2025-08-22 13:10:33] [Rank 0] step:5881/10000 train_time:552424ms step_avg:93.93ms +[2025-08-22 13:10:33] [Rank 0] step:5881/10000 train_time:552424ms step_avg:93.93ms +[2025-08-22 13:10:35] [Rank 0] step:5901/10000 train_time:554372ms step_avg:93.95ms +[2025-08-22 13:10:35] [Rank 0] step:5901/10000 train_time:554372ms step_avg:93.95ms +[2025-08-22 13:10:37] [Rank 0] step:5921/10000 train_time:556320ms step_avg:93.96ms +[2025-08-22 13:10:37] [Rank 0] step:5921/10000 train_time:556320ms step_avg:93.96ms +[2025-08-22 13:10:39] [Rank 0] step:5941/10000 train_time:558274ms step_avg:93.97ms +[2025-08-22 13:10:39] [Rank 0] step:5941/10000 train_time:558274ms step_avg:93.97ms +[2025-08-22 13:10:41] [Rank 0] step:5961/10000 train_time:560227ms step_avg:93.98ms +[2025-08-22 13:10:41] [Rank 0] step:5961/10000 train_time:560227ms step_avg:93.98ms +[2025-08-22 13:10:42] [Rank 0] step:5981/10000 train_time:562178ms step_avg:93.99ms +[2025-08-22 13:10:42] [Rank 0] step:5981/10000 train_time:562178ms step_avg:93.99ms +[2025-08-22 13:10:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:10:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:10:58] [Rank 0] PRINT: step:6000/10000 val_loss:3.8836 svd_entropy: attn_qk:H=0.5045,top10E=0.62,eRank=59.0,q75/q25=152.58 attn_vo:H=0.7860,top10E=0.22,eRank=228.2,q75/q25=26.62 mlp_w1:H=0.9264,top10E=0.07,eRank=487.3,q75/q25=8.68 mlp_w2:H=0.9344,top10E=0.07,eRank=510.8,q75/q25=8.78 vo_prod:H=0.6552,top10E=0.39,eRank=99.4,q75/q25=493.13 train_time:564133ms step_avg:94.02ms +[2025-08-22 13:10:58] [Rank 0] PRINT: step:6000/10000 val_loss:3.8836 svd_entropy: attn_qk:H=0.5045,top10E=0.62,eRank=59.0,q75/q25=152.58 attn_vo:H=0.7860,top10E=0.22,eRank=228.2,q75/q25=26.62 mlp_w1:H=0.9264,top10E=0.07,eRank=487.3,q75/q25=8.68 mlp_w2:H=0.9344,top10E=0.07,eRank=510.8,q75/q25=8.78 vo_prod:H=0.6552,top10E=0.39,eRank=99.4,q75/q25=493.13 train_time:564133ms step_avg:94.02ms +[2025-08-22 13:10:58] [Rank 0] step:6001/10000 train_time:564153ms step_avg:94.01ms +[2025-08-22 13:10:58] [Rank 0] step:6001/10000 train_time:564153ms step_avg:94.01ms +[2025-08-22 13:11:00] [Rank 0] step:6021/10000 train_time:566101ms step_avg:94.02ms +[2025-08-22 13:11:00] [Rank 0] step:6021/10000 train_time:566101ms step_avg:94.02ms +[2025-08-22 13:11:02] [Rank 0] step:6041/10000 train_time:568054ms step_avg:94.03ms +[2025-08-22 13:11:02] [Rank 0] step:6041/10000 train_time:568054ms step_avg:94.03ms +[2025-08-22 13:11:04] [Rank 0] step:6061/10000 train_time:570006ms step_avg:94.04ms +[2025-08-22 13:11:04] [Rank 0] step:6061/10000 train_time:570006ms step_avg:94.04ms +[2025-08-22 13:11:06] [Rank 0] step:6081/10000 train_time:571956ms step_avg:94.06ms +[2025-08-22 13:11:06] [Rank 0] step:6081/10000 train_time:571956ms step_avg:94.06ms +[2025-08-22 13:11:08] [Rank 0] step:6101/10000 train_time:573915ms step_avg:94.07ms +[2025-08-22 13:11:08] [Rank 0] step:6101/10000 train_time:573915ms step_avg:94.07ms +[2025-08-22 13:11:10] [Rank 0] step:6121/10000 train_time:576135ms step_avg:94.12ms +[2025-08-22 13:11:10] [Rank 0] step:6121/10000 train_time:576135ms step_avg:94.12ms +[2025-08-22 13:11:12] [Rank 0] step:6141/10000 train_time:578100ms step_avg:94.14ms +[2025-08-22 13:11:12] [Rank 0] step:6141/10000 train_time:578100ms step_avg:94.14ms +[2025-08-22 13:11:14] [Rank 0] step:6161/10000 train_time:580056ms step_avg:94.15ms +[2025-08-22 13:11:14] [Rank 0] step:6161/10000 train_time:580056ms step_avg:94.15ms +[2025-08-22 13:11:16] [Rank 0] step:6181/10000 train_time:582011ms step_avg:94.16ms +[2025-08-22 13:11:16] [Rank 0] step:6181/10000 train_time:582011ms step_avg:94.16ms +[2025-08-22 13:11:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:11:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:11:32] [Rank 0] PRINT: step:6200/10000 val_loss:3.8731 svd_entropy: attn_qk:H=0.5071,top10E=0.62,eRank=59.6,q75/q25=151.24 attn_vo:H=0.7881,top10E=0.21,eRank=230.1,q75/q25=26.77 mlp_w1:H=0.9268,top10E=0.07,eRank=488.5,q75/q25=8.61 mlp_w2:H=0.9347,top10E=0.07,eRank=511.7,q75/q25=8.69 vo_prod:H=0.6591,top10E=0.38,eRank=101.1,q75/q25=507.13 train_time:583973ms step_avg:94.19ms +[2025-08-22 13:11:32] [Rank 0] PRINT: step:6200/10000 val_loss:3.8731 svd_entropy: attn_qk:H=0.5071,top10E=0.62,eRank=59.6,q75/q25=151.24 attn_vo:H=0.7881,top10E=0.21,eRank=230.1,q75/q25=26.77 mlp_w1:H=0.9268,top10E=0.07,eRank=488.5,q75/q25=8.61 mlp_w2:H=0.9347,top10E=0.07,eRank=511.7,q75/q25=8.69 vo_prod:H=0.6591,top10E=0.38,eRank=101.1,q75/q25=507.13 train_time:583973ms step_avg:94.19ms +[2025-08-22 13:11:32] [Rank 0] step:6201/10000 train_time:583995ms step_avg:94.18ms +[2025-08-22 13:11:32] [Rank 0] step:6201/10000 train_time:583995ms step_avg:94.18ms +[2025-08-22 13:11:34] [Rank 0] step:6221/10000 train_time:585934ms step_avg:94.19ms +[2025-08-22 13:11:34] [Rank 0] step:6221/10000 train_time:585934ms step_avg:94.19ms +[2025-08-22 13:11:36] [Rank 0] step:6241/10000 train_time:587885ms step_avg:94.20ms +[2025-08-22 13:11:36] [Rank 0] step:6241/10000 train_time:587885ms step_avg:94.20ms +[2025-08-22 13:11:38] [Rank 0] step:6261/10000 train_time:589840ms step_avg:94.21ms +[2025-08-22 13:11:38] [Rank 0] step:6261/10000 train_time:589840ms step_avg:94.21ms +[2025-08-22 13:11:40] [Rank 0] step:6281/10000 train_time:591799ms step_avg:94.22ms +[2025-08-22 13:11:40] [Rank 0] step:6281/10000 train_time:591799ms step_avg:94.22ms +[2025-08-22 13:11:42] [Rank 0] step:6301/10000 train_time:593755ms step_avg:94.23ms +[2025-08-22 13:11:42] [Rank 0] step:6301/10000 train_time:593755ms step_avg:94.23ms +[2025-08-22 13:11:44] [Rank 0] step:6321/10000 train_time:595713ms step_avg:94.24ms +[2025-08-22 13:11:44] [Rank 0] step:6321/10000 train_time:595713ms step_avg:94.24ms +[2025-08-22 13:11:46] [Rank 0] step:6341/10000 train_time:597671ms step_avg:94.26ms +[2025-08-22 13:11:46] [Rank 0] step:6341/10000 train_time:597671ms step_avg:94.26ms +[2025-08-22 13:11:48] [Rank 0] step:6361/10000 train_time:599636ms step_avg:94.27ms +[2025-08-22 13:11:48] [Rank 0] step:6361/10000 train_time:599636ms step_avg:94.27ms +[2025-08-22 13:11:50] [Rank 0] step:6381/10000 train_time:601598ms step_avg:94.28ms +[2025-08-22 13:11:50] [Rank 0] step:6381/10000 train_time:601598ms step_avg:94.28ms +[2025-08-22 13:11:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:11:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:12:05] [Rank 0] PRINT: step:6400/10000 val_loss:3.8585 svd_entropy: attn_qk:H=0.5097,top10E=0.62,eRank=60.0,q75/q25=151.70 attn_vo:H=0.7900,top10E=0.21,eRank=231.9,q75/q25=26.97 mlp_w1:H=0.9272,top10E=0.07,eRank=489.7,q75/q25=8.51 mlp_w2:H=0.9350,top10E=0.07,eRank=512.6,q75/q25=8.53 vo_prod:H=0.6628,top10E=0.37,eRank=102.7,q75/q25=519.64 train_time:603563ms step_avg:94.31ms +[2025-08-22 13:12:05] [Rank 0] PRINT: step:6400/10000 val_loss:3.8585 svd_entropy: attn_qk:H=0.5097,top10E=0.62,eRank=60.0,q75/q25=151.70 attn_vo:H=0.7900,top10E=0.21,eRank=231.9,q75/q25=26.97 mlp_w1:H=0.9272,top10E=0.07,eRank=489.7,q75/q25=8.51 mlp_w2:H=0.9350,top10E=0.07,eRank=512.6,q75/q25=8.53 vo_prod:H=0.6628,top10E=0.37,eRank=102.7,q75/q25=519.64 train_time:603563ms step_avg:94.31ms +[2025-08-22 13:12:05] [Rank 0] step:6401/10000 train_time:603585ms step_avg:94.30ms +[2025-08-22 13:12:05] [Rank 0] step:6401/10000 train_time:603585ms step_avg:94.30ms +[2025-08-22 13:12:07] [Rank 0] step:6421/10000 train_time:605533ms step_avg:94.31ms +[2025-08-22 13:12:07] [Rank 0] step:6421/10000 train_time:605533ms step_avg:94.31ms +[2025-08-22 13:12:09] [Rank 0] step:6441/10000 train_time:607484ms step_avg:94.32ms +[2025-08-22 13:12:09] [Rank 0] step:6441/10000 train_time:607484ms step_avg:94.32ms +[2025-08-22 13:12:11] [Rank 0] step:6461/10000 train_time:609440ms step_avg:94.33ms +[2025-08-22 13:12:11] [Rank 0] step:6461/10000 train_time:609440ms step_avg:94.33ms +[2025-08-22 13:12:13] [Rank 0] step:6481/10000 train_time:611401ms step_avg:94.34ms +[2025-08-22 13:12:13] [Rank 0] step:6481/10000 train_time:611401ms step_avg:94.34ms +[2025-08-22 13:12:15] [Rank 0] step:6501/10000 train_time:613352ms step_avg:94.35ms +[2025-08-22 13:12:15] [Rank 0] step:6501/10000 train_time:613352ms step_avg:94.35ms +[2025-08-22 13:12:17] [Rank 0] step:6521/10000 train_time:615305ms step_avg:94.36ms +[2025-08-22 13:12:17] [Rank 0] step:6521/10000 train_time:615305ms step_avg:94.36ms +[2025-08-22 13:12:19] [Rank 0] step:6541/10000 train_time:617261ms step_avg:94.37ms +[2025-08-22 13:12:19] [Rank 0] step:6541/10000 train_time:617261ms step_avg:94.37ms +[2025-08-22 13:12:21] [Rank 0] step:6561/10000 train_time:619218ms step_avg:94.38ms +[2025-08-22 13:12:21] [Rank 0] step:6561/10000 train_time:619218ms step_avg:94.38ms +[2025-08-22 13:12:23] [Rank 0] step:6581/10000 train_time:621169ms step_avg:94.39ms +[2025-08-22 13:12:23] [Rank 0] step:6581/10000 train_time:621169ms step_avg:94.39ms +[2025-08-22 13:12:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:12:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:12:39] [Rank 0] PRINT: step:6600/10000 val_loss:3.8452 svd_entropy: attn_qk:H=0.5116,top10E=0.61,eRank=60.6,q75/q25=151.08 attn_vo:H=0.7917,top10E=0.21,eRank=233.4,q75/q25=27.15 mlp_w1:H=0.9275,top10E=0.07,eRank=490.9,q75/q25=8.41 mlp_w2:H=0.9353,top10E=0.07,eRank=513.4,q75/q25=8.47 vo_prod:H=0.6654,top10E=0.37,eRank=103.9,q75/q25=529.89 train_time:623132ms step_avg:94.41ms +[2025-08-22 13:12:39] [Rank 0] PRINT: step:6600/10000 val_loss:3.8452 svd_entropy: attn_qk:H=0.5116,top10E=0.61,eRank=60.6,q75/q25=151.08 attn_vo:H=0.7917,top10E=0.21,eRank=233.4,q75/q25=27.15 mlp_w1:H=0.9275,top10E=0.07,eRank=490.9,q75/q25=8.41 mlp_w2:H=0.9353,top10E=0.07,eRank=513.4,q75/q25=8.47 vo_prod:H=0.6654,top10E=0.37,eRank=103.9,q75/q25=529.89 train_time:623132ms step_avg:94.41ms +[2025-08-22 13:12:39] [Rank 0] step:6601/10000 train_time:623153ms step_avg:94.40ms +[2025-08-22 13:12:39] [Rank 0] step:6601/10000 train_time:623153ms step_avg:94.40ms +[2025-08-22 13:12:41] [Rank 0] step:6621/10000 train_time:625110ms step_avg:94.41ms +[2025-08-22 13:12:41] [Rank 0] step:6621/10000 train_time:625110ms step_avg:94.41ms +[2025-08-22 13:12:43] [Rank 0] step:6641/10000 train_time:627069ms step_avg:94.42ms +[2025-08-22 13:12:43] [Rank 0] step:6641/10000 train_time:627069ms step_avg:94.42ms +[2025-08-22 13:12:45] [Rank 0] step:6661/10000 train_time:629021ms step_avg:94.43ms +[2025-08-22 13:12:45] [Rank 0] step:6661/10000 train_time:629021ms step_avg:94.43ms +[2025-08-22 13:12:47] [Rank 0] step:6681/10000 train_time:630991ms step_avg:94.45ms +[2025-08-22 13:12:47] [Rank 0] step:6681/10000 train_time:630991ms step_avg:94.45ms +[2025-08-22 13:12:49] [Rank 0] step:6701/10000 train_time:632981ms step_avg:94.46ms +[2025-08-22 13:12:49] [Rank 0] step:6701/10000 train_time:632981ms step_avg:94.46ms +[2025-08-22 13:12:51] [Rank 0] step:6721/10000 train_time:634965ms step_avg:94.47ms +[2025-08-22 13:12:51] [Rank 0] step:6721/10000 train_time:634965ms step_avg:94.47ms +[2025-08-22 13:12:53] [Rank 0] step:6741/10000 train_time:636947ms step_avg:94.49ms +[2025-08-22 13:12:53] [Rank 0] step:6741/10000 train_time:636947ms step_avg:94.49ms +[2025-08-22 13:12:55] [Rank 0] step:6761/10000 train_time:638926ms step_avg:94.50ms +[2025-08-22 13:12:55] [Rank 0] step:6761/10000 train_time:638926ms step_avg:94.50ms +[2025-08-22 13:12:57] [Rank 0] step:6781/10000 train_time:640914ms step_avg:94.52ms +[2025-08-22 13:12:57] [Rank 0] step:6781/10000 train_time:640914ms step_avg:94.52ms +[2025-08-22 13:12:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:12:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:13:12] [Rank 0] PRINT: step:6800/10000 val_loss:3.8296 svd_entropy: attn_qk:H=0.5137,top10E=0.61,eRank=61.1,q75/q25=151.84 attn_vo:H=0.7932,top10E=0.21,eRank=234.9,q75/q25=27.29 mlp_w1:H=0.9278,top10E=0.07,eRank=491.9,q75/q25=8.33 mlp_w2:H=0.9356,top10E=0.07,eRank=514.1,q75/q25=8.43 vo_prod:H=0.6681,top10E=0.37,eRank=105.0,q75/q25=553.68 train_time:642908ms step_avg:94.55ms +[2025-08-22 13:13:12] [Rank 0] PRINT: step:6800/10000 val_loss:3.8296 svd_entropy: attn_qk:H=0.5137,top10E=0.61,eRank=61.1,q75/q25=151.84 attn_vo:H=0.7932,top10E=0.21,eRank=234.9,q75/q25=27.29 mlp_w1:H=0.9278,top10E=0.07,eRank=491.9,q75/q25=8.33 mlp_w2:H=0.9356,top10E=0.07,eRank=514.1,q75/q25=8.43 vo_prod:H=0.6681,top10E=0.37,eRank=105.0,q75/q25=553.68 train_time:642908ms step_avg:94.55ms +[2025-08-22 13:13:13] [Rank 0] step:6801/10000 train_time:642929ms step_avg:94.53ms +[2025-08-22 13:13:13] [Rank 0] step:6801/10000 train_time:642929ms step_avg:94.53ms +[2025-08-22 13:13:14] [Rank 0] step:6821/10000 train_time:644897ms step_avg:94.55ms +[2025-08-22 13:13:14] [Rank 0] step:6821/10000 train_time:644897ms step_avg:94.55ms +[2025-08-22 13:13:16] [Rank 0] step:6841/10000 train_time:646878ms step_avg:94.56ms +[2025-08-22 13:13:16] [Rank 0] step:6841/10000 train_time:646878ms step_avg:94.56ms +[2025-08-22 13:13:18] [Rank 0] step:6861/10000 train_time:648853ms step_avg:94.57ms +[2025-08-22 13:13:18] [Rank 0] step:6861/10000 train_time:648853ms step_avg:94.57ms +[2025-08-22 13:13:20] [Rank 0] step:6881/10000 train_time:650838ms step_avg:94.58ms +[2025-08-22 13:13:20] [Rank 0] step:6881/10000 train_time:650838ms step_avg:94.58ms +[2025-08-22 13:13:22] [Rank 0] step:6901/10000 train_time:652816ms step_avg:94.60ms +[2025-08-22 13:13:22] [Rank 0] step:6901/10000 train_time:652816ms step_avg:94.60ms +[2025-08-22 13:13:24] [Rank 0] step:6921/10000 train_time:654792ms step_avg:94.61ms +[2025-08-22 13:13:24] [Rank 0] step:6921/10000 train_time:654792ms step_avg:94.61ms +[2025-08-22 13:13:26] [Rank 0] step:6941/10000 train_time:656782ms step_avg:94.62ms +[2025-08-22 13:13:26] [Rank 0] step:6941/10000 train_time:656782ms step_avg:94.62ms +[2025-08-22 13:13:28] [Rank 0] step:6961/10000 train_time:658783ms step_avg:94.64ms +[2025-08-22 13:13:28] [Rank 0] step:6961/10000 train_time:658783ms step_avg:94.64ms +[2025-08-22 13:13:30] [Rank 0] step:6981/10000 train_time:660774ms step_avg:94.65ms +[2025-08-22 13:13:30] [Rank 0] step:6981/10000 train_time:660774ms step_avg:94.65ms +[2025-08-22 13:13:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:13:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:13:46] [Rank 0] PRINT: step:7000/10000 val_loss:3.8139 svd_entropy: attn_qk:H=0.5160,top10E=0.61,eRank=61.6,q75/q25=151.83 attn_vo:H=0.7947,top10E=0.20,eRank=236.3,q75/q25=27.47 mlp_w1:H=0.9281,top10E=0.07,eRank=492.7,q75/q25=8.28 mlp_w2:H=0.9358,top10E=0.07,eRank=514.8,q75/q25=8.38 vo_prod:H=0.6708,top10E=0.36,eRank=106.4,q75/q25=573.29 train_time:662769ms step_avg:94.68ms +[2025-08-22 13:13:46] [Rank 0] PRINT: step:7000/10000 val_loss:3.8139 svd_entropy: attn_qk:H=0.5160,top10E=0.61,eRank=61.6,q75/q25=151.83 attn_vo:H=0.7947,top10E=0.20,eRank=236.3,q75/q25=27.47 mlp_w1:H=0.9281,top10E=0.07,eRank=492.7,q75/q25=8.28 mlp_w2:H=0.9358,top10E=0.07,eRank=514.8,q75/q25=8.38 vo_prod:H=0.6708,top10E=0.36,eRank=106.4,q75/q25=573.29 train_time:662769ms step_avg:94.68ms +[2025-08-22 13:13:46] [Rank 0] step:7001/10000 train_time:662789ms step_avg:94.67ms +[2025-08-22 13:13:46] [Rank 0] step:7001/10000 train_time:662789ms step_avg:94.67ms +[2025-08-22 13:13:48] [Rank 0] step:7021/10000 train_time:664781ms step_avg:94.68ms +[2025-08-22 13:13:48] [Rank 0] step:7021/10000 train_time:664781ms step_avg:94.68ms +[2025-08-22 13:13:50] [Rank 0] step:7041/10000 train_time:666764ms step_avg:94.70ms +[2025-08-22 13:13:50] [Rank 0] step:7041/10000 train_time:666764ms step_avg:94.70ms +[2025-08-22 13:13:52] [Rank 0] step:7061/10000 train_time:668748ms step_avg:94.71ms +[2025-08-22 13:13:52] [Rank 0] step:7061/10000 train_time:668748ms step_avg:94.71ms +[2025-08-22 13:13:54] [Rank 0] step:7081/10000 train_time:670732ms step_avg:94.72ms +[2025-08-22 13:13:54] [Rank 0] step:7081/10000 train_time:670732ms step_avg:94.72ms +[2025-08-22 13:13:56] [Rank 0] step:7101/10000 train_time:672727ms step_avg:94.74ms +[2025-08-22 13:13:56] [Rank 0] step:7101/10000 train_time:672727ms step_avg:94.74ms +[2025-08-22 13:13:58] [Rank 0] step:7121/10000 train_time:674710ms step_avg:94.75ms +[2025-08-22 13:13:58] [Rank 0] step:7121/10000 train_time:674710ms step_avg:94.75ms +[2025-08-22 13:14:00] [Rank 0] step:7141/10000 train_time:676700ms step_avg:94.76ms +[2025-08-22 13:14:00] [Rank 0] step:7141/10000 train_time:676700ms step_avg:94.76ms +[2025-08-22 13:14:02] [Rank 0] step:7161/10000 train_time:678693ms step_avg:94.78ms +[2025-08-22 13:14:02] [Rank 0] step:7161/10000 train_time:678693ms step_avg:94.78ms +[2025-08-22 13:14:04] [Rank 0] step:7181/10000 train_time:680684ms step_avg:94.79ms +[2025-08-22 13:14:04] [Rank 0] step:7181/10000 train_time:680684ms step_avg:94.79ms +[2025-08-22 13:14:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:14:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:14:20] [Rank 0] PRINT: step:7200/10000 val_loss:3.8046 svd_entropy: attn_qk:H=0.5182,top10E=0.60,eRank=62.0,q75/q25=151.93 attn_vo:H=0.7960,top10E=0.20,eRank=237.5,q75/q25=27.57 mlp_w1:H=0.9283,top10E=0.07,eRank=493.5,q75/q25=8.22 mlp_w2:H=0.9360,top10E=0.07,eRank=515.5,q75/q25=8.31 vo_prod:H=0.6732,top10E=0.36,eRank=107.5,q75/q25=587.00 train_time:682685ms step_avg:94.82ms +[2025-08-22 13:14:20] [Rank 0] PRINT: step:7200/10000 val_loss:3.8046 svd_entropy: attn_qk:H=0.5182,top10E=0.60,eRank=62.0,q75/q25=151.93 attn_vo:H=0.7960,top10E=0.20,eRank=237.5,q75/q25=27.57 mlp_w1:H=0.9283,top10E=0.07,eRank=493.5,q75/q25=8.22 mlp_w2:H=0.9360,top10E=0.07,eRank=515.5,q75/q25=8.31 vo_prod:H=0.6732,top10E=0.36,eRank=107.5,q75/q25=587.00 train_time:682685ms step_avg:94.82ms +[2025-08-22 13:14:20] [Rank 0] step:7201/10000 train_time:682705ms step_avg:94.81ms +[2025-08-22 13:14:20] [Rank 0] step:7201/10000 train_time:682705ms step_avg:94.81ms +[2025-08-22 13:14:22] [Rank 0] step:7221/10000 train_time:684700ms step_avg:94.82ms +[2025-08-22 13:14:22] [Rank 0] step:7221/10000 train_time:684700ms step_avg:94.82ms +[2025-08-22 13:14:24] [Rank 0] step:7241/10000 train_time:686682ms step_avg:94.83ms +[2025-08-22 13:14:24] [Rank 0] step:7241/10000 train_time:686682ms step_avg:94.83ms +[2025-08-22 13:14:26] [Rank 0] step:7261/10000 train_time:688660ms step_avg:94.84ms +[2025-08-22 13:14:26] [Rank 0] step:7261/10000 train_time:688660ms step_avg:94.84ms +[2025-08-22 13:14:28] [Rank 0] step:7281/10000 train_time:690658ms step_avg:94.86ms +[2025-08-22 13:14:28] [Rank 0] step:7281/10000 train_time:690658ms step_avg:94.86ms +[2025-08-22 13:14:30] [Rank 0] step:7301/10000 train_time:692640ms step_avg:94.87ms +[2025-08-22 13:14:30] [Rank 0] step:7301/10000 train_time:692640ms step_avg:94.87ms +[2025-08-22 13:14:32] [Rank 0] step:7321/10000 train_time:694639ms step_avg:94.88ms +[2025-08-22 13:14:32] [Rank 0] step:7321/10000 train_time:694639ms step_avg:94.88ms +[2025-08-22 13:14:34] [Rank 0] step:7341/10000 train_time:696626ms step_avg:94.90ms +[2025-08-22 13:14:34] [Rank 0] step:7341/10000 train_time:696626ms step_avg:94.90ms +[2025-08-22 13:14:36] [Rank 0] step:7361/10000 train_time:698622ms step_avg:94.91ms +[2025-08-22 13:14:36] [Rank 0] step:7361/10000 train_time:698622ms step_avg:94.91ms +[2025-08-22 13:14:38] [Rank 0] step:7381/10000 train_time:700691ms step_avg:94.93ms +[2025-08-22 13:14:38] [Rank 0] step:7381/10000 train_time:700691ms step_avg:94.93ms +[2025-08-22 13:14:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:14:40] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:14:54] [Rank 0] PRINT: step:7400/10000 val_loss:3.7862 svd_entropy: attn_qk:H=0.5195,top10E=0.60,eRank=62.4,q75/q25=150.54 attn_vo:H=0.7972,top10E=0.20,eRank=238.7,q75/q25=27.80 mlp_w1:H=0.9285,top10E=0.07,eRank=494.1,q75/q25=8.16 mlp_w2:H=0.9362,top10E=0.07,eRank=516.0,q75/q25=8.29 vo_prod:H=0.6756,top10E=0.35,eRank=108.8,q75/q25=599.74 train_time:702735ms step_avg:94.96ms +[2025-08-22 13:14:54] [Rank 0] PRINT: step:7400/10000 val_loss:3.7862 svd_entropy: attn_qk:H=0.5195,top10E=0.60,eRank=62.4,q75/q25=150.54 attn_vo:H=0.7972,top10E=0.20,eRank=238.7,q75/q25=27.80 mlp_w1:H=0.9285,top10E=0.07,eRank=494.1,q75/q25=8.16 mlp_w2:H=0.9362,top10E=0.07,eRank=516.0,q75/q25=8.29 vo_prod:H=0.6756,top10E=0.35,eRank=108.8,q75/q25=599.74 train_time:702735ms step_avg:94.96ms +[2025-08-22 13:14:54] [Rank 0] step:7401/10000 train_time:702756ms step_avg:94.95ms +[2025-08-22 13:14:54] [Rank 0] step:7401/10000 train_time:702756ms step_avg:94.95ms +[2025-08-22 13:14:56] [Rank 0] step:7421/10000 train_time:704745ms step_avg:94.97ms +[2025-08-22 13:14:56] [Rank 0] step:7421/10000 train_time:704745ms step_avg:94.97ms +[2025-08-22 13:14:58] [Rank 0] step:7441/10000 train_time:706724ms step_avg:94.98ms +[2025-08-22 13:14:58] [Rank 0] step:7441/10000 train_time:706724ms step_avg:94.98ms +[2025-08-22 13:15:00] [Rank 0] step:7461/10000 train_time:708707ms step_avg:94.99ms +[2025-08-22 13:15:00] [Rank 0] step:7461/10000 train_time:708707ms step_avg:94.99ms +[2025-08-22 13:15:02] [Rank 0] step:7481/10000 train_time:710702ms step_avg:95.00ms +[2025-08-22 13:15:02] [Rank 0] step:7481/10000 train_time:710702ms step_avg:95.00ms +[2025-08-22 13:15:04] [Rank 0] step:7501/10000 train_time:712691ms step_avg:95.01ms +[2025-08-22 13:15:04] [Rank 0] step:7501/10000 train_time:712691ms step_avg:95.01ms +[2025-08-22 13:15:06] [Rank 0] step:7521/10000 train_time:714684ms step_avg:95.03ms +[2025-08-22 13:15:06] [Rank 0] step:7521/10000 train_time:714684ms step_avg:95.03ms +[2025-08-22 13:15:08] [Rank 0] step:7541/10000 train_time:716682ms step_avg:95.04ms +[2025-08-22 13:15:08] [Rank 0] step:7541/10000 train_time:716682ms step_avg:95.04ms +[2025-08-22 13:15:10] [Rank 0] step:7561/10000 train_time:718662ms step_avg:95.05ms +[2025-08-22 13:15:10] [Rank 0] step:7561/10000 train_time:718662ms step_avg:95.05ms +[2025-08-22 13:15:12] [Rank 0] step:7581/10000 train_time:720660ms step_avg:95.06ms +[2025-08-22 13:15:12] [Rank 0] step:7581/10000 train_time:720660ms step_avg:95.06ms +[2025-08-22 13:15:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:15:14] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:15:28] [Rank 0] PRINT: step:7600/10000 val_loss:3.7785 svd_entropy: attn_qk:H=0.5208,top10E=0.60,eRank=62.8,q75/q25=149.04 attn_vo:H=0.7982,top10E=0.20,eRank=239.7,q75/q25=27.95 mlp_w1:H=0.9287,top10E=0.07,eRank=494.7,q75/q25=8.10 mlp_w2:H=0.9364,top10E=0.07,eRank=516.5,q75/q25=8.28 vo_prod:H=0.6774,top10E=0.35,eRank=109.7,q75/q25=605.72 train_time:722666ms step_avg:95.09ms +[2025-08-22 13:15:28] [Rank 0] PRINT: step:7600/10000 val_loss:3.7785 svd_entropy: attn_qk:H=0.5208,top10E=0.60,eRank=62.8,q75/q25=149.04 attn_vo:H=0.7982,top10E=0.20,eRank=239.7,q75/q25=27.95 mlp_w1:H=0.9287,top10E=0.07,eRank=494.7,q75/q25=8.10 mlp_w2:H=0.9364,top10E=0.07,eRank=516.5,q75/q25=8.28 vo_prod:H=0.6774,top10E=0.35,eRank=109.7,q75/q25=605.72 train_time:722666ms step_avg:95.09ms +[2025-08-22 13:15:28] [Rank 0] step:7601/10000 train_time:722686ms step_avg:95.08ms +[2025-08-22 13:15:28] [Rank 0] step:7601/10000 train_time:722686ms step_avg:95.08ms +[2025-08-22 13:15:30] [Rank 0] step:7621/10000 train_time:724655ms step_avg:95.09ms +[2025-08-22 13:15:30] [Rank 0] step:7621/10000 train_time:724655ms step_avg:95.09ms +[2025-08-22 13:15:32] [Rank 0] step:7641/10000 train_time:726639ms step_avg:95.10ms +[2025-08-22 13:15:32] [Rank 0] step:7641/10000 train_time:726639ms step_avg:95.10ms +[2025-08-22 13:15:34] [Rank 0] step:7661/10000 train_time:728626ms step_avg:95.11ms +[2025-08-22 13:15:34] [Rank 0] step:7661/10000 train_time:728626ms step_avg:95.11ms +[2025-08-22 13:15:36] [Rank 0] step:7681/10000 train_time:730609ms step_avg:95.12ms +[2025-08-22 13:15:36] [Rank 0] step:7681/10000 train_time:730609ms step_avg:95.12ms +[2025-08-22 13:15:38] [Rank 0] step:7701/10000 train_time:732596ms step_avg:95.13ms +[2025-08-22 13:15:38] [Rank 0] step:7701/10000 train_time:732596ms step_avg:95.13ms +[2025-08-22 13:15:40] [Rank 0] step:7721/10000 train_time:734597ms step_avg:95.14ms +[2025-08-22 13:15:40] [Rank 0] step:7721/10000 train_time:734597ms step_avg:95.14ms +[2025-08-22 13:15:42] [Rank 0] step:7741/10000 train_time:736668ms step_avg:95.16ms +[2025-08-22 13:15:42] [Rank 0] step:7741/10000 train_time:736668ms step_avg:95.16ms +[2025-08-22 13:15:44] [Rank 0] step:7761/10000 train_time:738742ms step_avg:95.19ms +[2025-08-22 13:15:44] [Rank 0] step:7761/10000 train_time:738742ms step_avg:95.19ms +[2025-08-22 13:15:46] [Rank 0] step:7781/10000 train_time:740735ms step_avg:95.20ms +[2025-08-22 13:15:46] [Rank 0] step:7781/10000 train_time:740735ms step_avg:95.20ms +[2025-08-22 13:15:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:15:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:16:02] [Rank 0] PRINT: step:7800/10000 val_loss:3.7656 svd_entropy: attn_qk:H=0.5225,top10E=0.60,eRank=63.1,q75/q25=148.28 attn_vo:H=0.7991,top10E=0.19,eRank=240.6,q75/q25=27.98 mlp_w1:H=0.9289,top10E=0.07,eRank=495.3,q75/q25=8.08 mlp_w2:H=0.9365,top10E=0.07,eRank=517.0,q75/q25=8.18 vo_prod:H=0.6792,top10E=0.34,eRank=110.7,q75/q25=622.86 train_time:742744ms step_avg:95.22ms +[2025-08-22 13:16:02] [Rank 0] PRINT: step:7800/10000 val_loss:3.7656 svd_entropy: attn_qk:H=0.5225,top10E=0.60,eRank=63.1,q75/q25=148.28 attn_vo:H=0.7991,top10E=0.19,eRank=240.6,q75/q25=27.98 mlp_w1:H=0.9289,top10E=0.07,eRank=495.3,q75/q25=8.08 mlp_w2:H=0.9365,top10E=0.07,eRank=517.0,q75/q25=8.18 vo_prod:H=0.6792,top10E=0.34,eRank=110.7,q75/q25=622.86 train_time:742744ms step_avg:95.22ms +[2025-08-22 13:16:02] [Rank 0] step:7801/10000 train_time:742764ms step_avg:95.21ms +[2025-08-22 13:16:02] [Rank 0] step:7801/10000 train_time:742764ms step_avg:95.21ms +[2025-08-22 13:16:04] [Rank 0] step:7821/10000 train_time:744766ms step_avg:95.23ms +[2025-08-22 13:16:04] [Rank 0] step:7821/10000 train_time:744766ms step_avg:95.23ms +[2025-08-22 13:16:06] [Rank 0] step:7841/10000 train_time:746750ms step_avg:95.24ms +[2025-08-22 13:16:06] [Rank 0] step:7841/10000 train_time:746750ms step_avg:95.24ms +[2025-08-22 13:16:08] [Rank 0] step:7861/10000 train_time:748744ms step_avg:95.25ms +[2025-08-22 13:16:08] [Rank 0] step:7861/10000 train_time:748744ms step_avg:95.25ms +[2025-08-22 13:16:10] [Rank 0] step:7881/10000 train_time:750744ms step_avg:95.26ms +[2025-08-22 13:16:10] [Rank 0] step:7881/10000 train_time:750744ms step_avg:95.26ms +[2025-08-22 13:16:12] [Rank 0] step:7901/10000 train_time:752732ms step_avg:95.27ms +[2025-08-22 13:16:12] [Rank 0] step:7901/10000 train_time:752732ms step_avg:95.27ms +[2025-08-22 13:16:14] [Rank 0] step:7921/10000 train_time:754728ms step_avg:95.28ms +[2025-08-22 13:16:14] [Rank 0] step:7921/10000 train_time:754728ms step_avg:95.28ms +[2025-08-22 13:16:16] [Rank 0] step:7941/10000 train_time:756730ms step_avg:95.29ms +[2025-08-22 13:16:16] [Rank 0] step:7941/10000 train_time:756730ms step_avg:95.29ms +[2025-08-22 13:16:18] [Rank 0] step:7961/10000 train_time:758725ms step_avg:95.31ms +[2025-08-22 13:16:18] [Rank 0] step:7961/10000 train_time:758725ms step_avg:95.31ms +[2025-08-22 13:16:20] [Rank 0] step:7981/10000 train_time:760711ms step_avg:95.32ms +[2025-08-22 13:16:20] [Rank 0] step:7981/10000 train_time:760711ms step_avg:95.32ms +[2025-08-22 13:16:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:16:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:16:36] [Rank 0] PRINT: step:8000/10000 val_loss:3.7482 svd_entropy: attn_qk:H=0.5236,top10E=0.60,eRank=63.4,q75/q25=148.06 attn_vo:H=0.8000,top10E=0.19,eRank=241.5,q75/q25=28.02 mlp_w1:H=0.9291,top10E=0.07,eRank=495.8,q75/q25=8.04 mlp_w2:H=0.9367,top10E=0.07,eRank=517.5,q75/q25=8.21 vo_prod:H=0.6808,top10E=0.34,eRank=111.5,q75/q25=638.41 train_time:762711ms step_avg:95.34ms +[2025-08-22 13:16:36] [Rank 0] PRINT: step:8000/10000 val_loss:3.7482 svd_entropy: attn_qk:H=0.5236,top10E=0.60,eRank=63.4,q75/q25=148.06 attn_vo:H=0.8000,top10E=0.19,eRank=241.5,q75/q25=28.02 mlp_w1:H=0.9291,top10E=0.07,eRank=495.8,q75/q25=8.04 mlp_w2:H=0.9367,top10E=0.07,eRank=517.5,q75/q25=8.21 vo_prod:H=0.6808,top10E=0.34,eRank=111.5,q75/q25=638.41 train_time:762711ms step_avg:95.34ms +[2025-08-22 13:16:36] [Rank 0] step:8001/10000 train_time:762732ms step_avg:95.33ms +[2025-08-22 13:16:36] [Rank 0] step:8001/10000 train_time:762732ms step_avg:95.33ms +[2025-08-22 13:16:38] [Rank 0] step:8021/10000 train_time:764720ms step_avg:95.34ms +[2025-08-22 13:16:38] [Rank 0] step:8021/10000 train_time:764720ms step_avg:95.34ms +[2025-08-22 13:16:40] [Rank 0] step:8041/10000 train_time:766714ms step_avg:95.35ms +[2025-08-22 13:16:40] [Rank 0] step:8041/10000 train_time:766714ms step_avg:95.35ms +[2025-08-22 13:16:42] [Rank 0] step:8061/10000 train_time:768702ms step_avg:95.36ms +[2025-08-22 13:16:42] [Rank 0] step:8061/10000 train_time:768702ms step_avg:95.36ms +[2025-08-22 13:16:44] [Rank 0] step:8081/10000 train_time:770678ms step_avg:95.37ms +[2025-08-22 13:16:44] [Rank 0] step:8081/10000 train_time:770678ms step_avg:95.37ms +[2025-08-22 13:16:46] [Rank 0] step:8101/10000 train_time:772748ms step_avg:95.39ms +[2025-08-22 13:16:46] [Rank 0] step:8101/10000 train_time:772748ms step_avg:95.39ms +[2025-08-22 13:16:48] [Rank 0] step:8121/10000 train_time:774776ms step_avg:95.40ms +[2025-08-22 13:16:48] [Rank 0] step:8121/10000 train_time:774776ms step_avg:95.40ms +[2025-08-22 13:16:50] [Rank 0] step:8141/10000 train_time:776995ms step_avg:95.44ms +[2025-08-22 13:16:50] [Rank 0] step:8141/10000 train_time:776995ms step_avg:95.44ms +[2025-08-22 13:16:52] [Rank 0] step:8161/10000 train_time:779000ms step_avg:95.45ms +[2025-08-22 13:16:52] [Rank 0] step:8161/10000 train_time:779000ms step_avg:95.45ms +[2025-08-22 13:16:54] [Rank 0] step:8181/10000 train_time:781015ms step_avg:95.47ms +[2025-08-22 13:16:54] [Rank 0] step:8181/10000 train_time:781015ms step_avg:95.47ms +[2025-08-22 13:16:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:16:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:17:10] [Rank 0] PRINT: step:8200/10000 val_loss:3.7370 svd_entropy: attn_qk:H=0.5243,top10E=0.60,eRank=63.8,q75/q25=149.04 attn_vo:H=0.8008,top10E=0.19,eRank=242.3,q75/q25=28.19 mlp_w1:H=0.9292,top10E=0.07,eRank=496.3,q75/q25=8.02 mlp_w2:H=0.9369,top10E=0.07,eRank=518.0,q75/q25=8.16 vo_prod:H=0.6822,top10E=0.34,eRank=112.3,q75/q25=641.31 train_time:783060ms step_avg:95.50ms +[2025-08-22 13:17:10] [Rank 0] PRINT: step:8200/10000 val_loss:3.7370 svd_entropy: attn_qk:H=0.5243,top10E=0.60,eRank=63.8,q75/q25=149.04 attn_vo:H=0.8008,top10E=0.19,eRank=242.3,q75/q25=28.19 mlp_w1:H=0.9292,top10E=0.07,eRank=496.3,q75/q25=8.02 mlp_w2:H=0.9369,top10E=0.07,eRank=518.0,q75/q25=8.16 vo_prod:H=0.6822,top10E=0.34,eRank=112.3,q75/q25=641.31 train_time:783060ms step_avg:95.50ms +[2025-08-22 13:17:10] [Rank 0] step:8201/10000 train_time:783081ms step_avg:95.49ms +[2025-08-22 13:17:10] [Rank 0] step:8201/10000 train_time:783081ms step_avg:95.49ms +[2025-08-22 13:17:12] [Rank 0] step:8221/10000 train_time:785099ms step_avg:95.50ms +[2025-08-22 13:17:12] [Rank 0] step:8221/10000 train_time:785099ms step_avg:95.50ms +[2025-08-22 13:17:14] [Rank 0] step:8241/10000 train_time:787117ms step_avg:95.51ms +[2025-08-22 13:17:14] [Rank 0] step:8241/10000 train_time:787117ms step_avg:95.51ms +[2025-08-22 13:17:16] [Rank 0] step:8261/10000 train_time:789138ms step_avg:95.53ms +[2025-08-22 13:17:16] [Rank 0] step:8261/10000 train_time:789138ms step_avg:95.53ms +[2025-08-22 13:17:18] [Rank 0] step:8281/10000 train_time:791150ms step_avg:95.54ms +[2025-08-22 13:17:18] [Rank 0] step:8281/10000 train_time:791150ms step_avg:95.54ms +[2025-08-22 13:17:20] [Rank 0] step:8301/10000 train_time:793163ms step_avg:95.55ms +[2025-08-22 13:17:20] [Rank 0] step:8301/10000 train_time:793163ms step_avg:95.55ms +[2025-08-22 13:17:22] [Rank 0] step:8321/10000 train_time:795176ms step_avg:95.56ms +[2025-08-22 13:17:22] [Rank 0] step:8321/10000 train_time:795176ms step_avg:95.56ms +[2025-08-22 13:17:24] [Rank 0] step:8341/10000 train_time:797199ms step_avg:95.58ms +[2025-08-22 13:17:24] [Rank 0] step:8341/10000 train_time:797199ms step_avg:95.58ms +[2025-08-22 13:17:26] [Rank 0] step:8361/10000 train_time:799216ms step_avg:95.59ms +[2025-08-22 13:17:26] [Rank 0] step:8361/10000 train_time:799216ms step_avg:95.59ms +[2025-08-22 13:17:28] [Rank 0] step:8381/10000 train_time:801232ms step_avg:95.60ms +[2025-08-22 13:17:28] [Rank 0] step:8381/10000 train_time:801232ms step_avg:95.60ms +[2025-08-22 13:17:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:17:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:17:44] [Rank 0] PRINT: step:8400/10000 val_loss:3.7259 svd_entropy: attn_qk:H=0.5254,top10E=0.59,eRank=64.0,q75/q25=148.49 attn_vo:H=0.8015,top10E=0.19,eRank=243.1,q75/q25=28.15 mlp_w1:H=0.9294,top10E=0.07,eRank=496.7,q75/q25=7.98 mlp_w2:H=0.9370,top10E=0.07,eRank=518.4,q75/q25=8.17 vo_prod:H=0.6836,top10E=0.34,eRank=113.1,q75/q25=654.89 train_time:803255ms step_avg:95.63ms +[2025-08-22 13:17:44] [Rank 0] PRINT: step:8400/10000 val_loss:3.7259 svd_entropy: attn_qk:H=0.5254,top10E=0.59,eRank=64.0,q75/q25=148.49 attn_vo:H=0.8015,top10E=0.19,eRank=243.1,q75/q25=28.15 mlp_w1:H=0.9294,top10E=0.07,eRank=496.7,q75/q25=7.98 mlp_w2:H=0.9370,top10E=0.07,eRank=518.4,q75/q25=8.17 vo_prod:H=0.6836,top10E=0.34,eRank=113.1,q75/q25=654.89 train_time:803255ms step_avg:95.63ms +[2025-08-22 13:17:44] [Rank 0] step:8401/10000 train_time:803275ms step_avg:95.62ms +[2025-08-22 13:17:44] [Rank 0] step:8401/10000 train_time:803275ms step_avg:95.62ms +[2025-08-22 13:17:46] [Rank 0] step:8421/10000 train_time:805281ms step_avg:95.63ms +[2025-08-22 13:17:46] [Rank 0] step:8421/10000 train_time:805281ms step_avg:95.63ms +[2025-08-22 13:17:48] [Rank 0] step:8441/10000 train_time:807294ms step_avg:95.64ms +[2025-08-22 13:17:48] [Rank 0] step:8441/10000 train_time:807294ms step_avg:95.64ms +[2025-08-22 13:17:50] [Rank 0] step:8461/10000 train_time:809434ms step_avg:95.67ms +[2025-08-22 13:17:50] [Rank 0] step:8461/10000 train_time:809434ms step_avg:95.67ms +[2025-08-22 13:17:52] [Rank 0] step:8481/10000 train_time:811504ms step_avg:95.69ms +[2025-08-22 13:17:52] [Rank 0] step:8481/10000 train_time:811504ms step_avg:95.69ms +[2025-08-22 13:17:54] [Rank 0] step:8501/10000 train_time:813543ms step_avg:95.70ms +[2025-08-22 13:17:54] [Rank 0] step:8501/10000 train_time:813543ms step_avg:95.70ms +[2025-08-22 13:17:56] [Rank 0] step:8521/10000 train_time:815561ms step_avg:95.71ms +[2025-08-22 13:17:56] [Rank 0] step:8521/10000 train_time:815561ms step_avg:95.71ms +[2025-08-22 13:17:58] [Rank 0] step:8541/10000 train_time:817591ms step_avg:95.73ms +[2025-08-22 13:17:58] [Rank 0] step:8541/10000 train_time:817591ms step_avg:95.73ms +[2025-08-22 13:18:00] [Rank 0] step:8561/10000 train_time:819617ms step_avg:95.74ms +[2025-08-22 13:18:00] [Rank 0] step:8561/10000 train_time:819617ms step_avg:95.74ms +[2025-08-22 13:18:02] [Rank 0] step:8581/10000 train_time:821639ms step_avg:95.75ms +[2025-08-22 13:18:02] [Rank 0] step:8581/10000 train_time:821639ms step_avg:95.75ms +[2025-08-22 13:18:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:18:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:18:18] [Rank 0] PRINT: step:8600/10000 val_loss:3.7158 svd_entropy: attn_qk:H=0.5263,top10E=0.59,eRank=64.3,q75/q25=147.77 attn_vo:H=0.8022,top10E=0.19,eRank=243.7,q75/q25=28.20 mlp_w1:H=0.9295,top10E=0.07,eRank=497.1,q75/q25=7.95 mlp_w2:H=0.9371,top10E=0.07,eRank=518.8,q75/q25=8.11 vo_prod:H=0.6847,top10E=0.33,eRank=113.7,q75/q25=658.91 train_time:823660ms step_avg:95.77ms +[2025-08-22 13:18:18] [Rank 0] PRINT: step:8600/10000 val_loss:3.7158 svd_entropy: attn_qk:H=0.5263,top10E=0.59,eRank=64.3,q75/q25=147.77 attn_vo:H=0.8022,top10E=0.19,eRank=243.7,q75/q25=28.20 mlp_w1:H=0.9295,top10E=0.07,eRank=497.1,q75/q25=7.95 mlp_w2:H=0.9371,top10E=0.07,eRank=518.8,q75/q25=8.11 vo_prod:H=0.6847,top10E=0.33,eRank=113.7,q75/q25=658.91 train_time:823660ms step_avg:95.77ms +[2025-08-22 13:18:18] [Rank 0] step:8601/10000 train_time:823680ms step_avg:95.77ms +[2025-08-22 13:18:18] [Rank 0] step:8601/10000 train_time:823680ms step_avg:95.77ms +[2025-08-22 13:18:20] [Rank 0] step:8621/10000 train_time:825704ms step_avg:95.78ms +[2025-08-22 13:18:20] [Rank 0] step:8621/10000 train_time:825704ms step_avg:95.78ms +[2025-08-22 13:18:22] [Rank 0] step:8641/10000 train_time:827720ms step_avg:95.79ms +[2025-08-22 13:18:22] [Rank 0] step:8641/10000 train_time:827720ms step_avg:95.79ms +[2025-08-22 13:18:24] [Rank 0] step:8661/10000 train_time:829737ms step_avg:95.80ms +[2025-08-22 13:18:24] [Rank 0] step:8661/10000 train_time:829737ms step_avg:95.80ms +[2025-08-22 13:18:26] [Rank 0] step:8681/10000 train_time:831765ms step_avg:95.81ms +[2025-08-22 13:18:26] [Rank 0] step:8681/10000 train_time:831765ms step_avg:95.81ms +[2025-08-22 13:18:28] [Rank 0] step:8701/10000 train_time:833778ms step_avg:95.83ms +[2025-08-22 13:18:28] [Rank 0] step:8701/10000 train_time:833778ms step_avg:95.83ms +[2025-08-22 13:18:30] [Rank 0] step:8721/10000 train_time:835800ms step_avg:95.84ms +[2025-08-22 13:18:30] [Rank 0] step:8721/10000 train_time:835800ms step_avg:95.84ms +[2025-08-22 13:18:32] [Rank 0] step:8741/10000 train_time:837811ms step_avg:95.85ms +[2025-08-22 13:18:32] [Rank 0] step:8741/10000 train_time:837811ms step_avg:95.85ms +[2025-08-22 13:18:34] [Rank 0] step:8761/10000 train_time:839836ms step_avg:95.86ms +[2025-08-22 13:18:34] [Rank 0] step:8761/10000 train_time:839836ms step_avg:95.86ms +[2025-08-22 13:18:36] [Rank 0] step:8781/10000 train_time:841859ms step_avg:95.87ms +[2025-08-22 13:18:36] [Rank 0] step:8781/10000 train_time:841859ms step_avg:95.87ms +[2025-08-22 13:18:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:18:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:18:52] [Rank 0] PRINT: step:8800/10000 val_loss:3.7040 svd_entropy: attn_qk:H=0.5272,top10E=0.59,eRank=64.5,q75/q25=146.78 attn_vo:H=0.8028,top10E=0.19,eRank=244.3,q75/q25=28.27 mlp_w1:H=0.9296,top10E=0.07,eRank=497.4,q75/q25=7.93 mlp_w2:H=0.9372,top10E=0.07,eRank=519.1,q75/q25=8.09 vo_prod:H=0.6858,top10E=0.33,eRank=114.3,q75/q25=671.61 train_time:843885ms step_avg:95.90ms +[2025-08-22 13:18:52] [Rank 0] PRINT: step:8800/10000 val_loss:3.7040 svd_entropy: attn_qk:H=0.5272,top10E=0.59,eRank=64.5,q75/q25=146.78 attn_vo:H=0.8028,top10E=0.19,eRank=244.3,q75/q25=28.27 mlp_w1:H=0.9296,top10E=0.07,eRank=497.4,q75/q25=7.93 mlp_w2:H=0.9372,top10E=0.07,eRank=519.1,q75/q25=8.09 vo_prod:H=0.6858,top10E=0.33,eRank=114.3,q75/q25=671.61 train_time:843885ms step_avg:95.90ms +[2025-08-22 13:18:52] [Rank 0] step:8801/10000 train_time:843905ms step_avg:95.89ms +[2025-08-22 13:18:52] [Rank 0] step:8801/10000 train_time:843905ms step_avg:95.89ms +[2025-08-22 13:18:54] [Rank 0] step:8821/10000 train_time:845979ms step_avg:95.91ms +[2025-08-22 13:18:54] [Rank 0] step:8821/10000 train_time:845979ms step_avg:95.91ms +[2025-08-22 13:18:56] [Rank 0] step:8841/10000 train_time:848014ms step_avg:95.92ms +[2025-08-22 13:18:56] [Rank 0] step:8841/10000 train_time:848014ms step_avg:95.92ms +[2025-08-22 13:18:58] [Rank 0] step:8861/10000 train_time:850024ms step_avg:95.93ms +[2025-08-22 13:18:58] [Rank 0] step:8861/10000 train_time:850024ms step_avg:95.93ms +[2025-08-22 13:19:00] [Rank 0] step:8881/10000 train_time:852038ms step_avg:95.94ms +[2025-08-22 13:19:00] [Rank 0] step:8881/10000 train_time:852038ms step_avg:95.94ms +[2025-08-22 13:19:02] [Rank 0] step:8901/10000 train_time:854056ms step_avg:95.95ms +[2025-08-22 13:19:02] [Rank 0] step:8901/10000 train_time:854056ms step_avg:95.95ms +[2025-08-22 13:19:04] [Rank 0] step:8921/10000 train_time:856087ms step_avg:95.96ms +[2025-08-22 13:19:04] [Rank 0] step:8921/10000 train_time:856087ms step_avg:95.96ms +[2025-08-22 13:19:06] [Rank 0] step:8941/10000 train_time:858110ms step_avg:95.97ms +[2025-08-22 13:19:06] [Rank 0] step:8941/10000 train_time:858110ms step_avg:95.97ms +[2025-08-22 13:19:08] [Rank 0] step:8961/10000 train_time:860127ms step_avg:95.99ms +[2025-08-22 13:19:08] [Rank 0] step:8961/10000 train_time:860127ms step_avg:95.99ms +[2025-08-22 13:19:10] [Rank 0] step:8981/10000 train_time:862148ms step_avg:96.00ms +[2025-08-22 13:19:10] [Rank 0] step:8981/10000 train_time:862148ms step_avg:96.00ms +[2025-08-22 13:19:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:19:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:19:26] [Rank 0] PRINT: step:9000/10000 val_loss:3.6947 svd_entropy: attn_qk:H=0.5279,top10E=0.59,eRank=64.8,q75/q25=146.94 attn_vo:H=0.8032,top10E=0.19,eRank=244.8,q75/q25=28.35 mlp_w1:H=0.9297,top10E=0.07,eRank=497.8,q75/q25=7.92 mlp_w2:H=0.9373,top10E=0.07,eRank=519.4,q75/q25=8.05 vo_prod:H=0.6867,top10E=0.33,eRank=114.8,q75/q25=672.33 train_time:864168ms step_avg:96.02ms +[2025-08-22 13:19:26] [Rank 0] PRINT: step:9000/10000 val_loss:3.6947 svd_entropy: attn_qk:H=0.5279,top10E=0.59,eRank=64.8,q75/q25=146.94 attn_vo:H=0.8032,top10E=0.19,eRank=244.8,q75/q25=28.35 mlp_w1:H=0.9297,top10E=0.07,eRank=497.8,q75/q25=7.92 mlp_w2:H=0.9373,top10E=0.07,eRank=519.4,q75/q25=8.05 vo_prod:H=0.6867,top10E=0.33,eRank=114.8,q75/q25=672.33 train_time:864168ms step_avg:96.02ms +[2025-08-22 13:19:26] [Rank 0] step:9001/10000 train_time:864189ms step_avg:96.01ms +[2025-08-22 13:19:26] [Rank 0] step:9001/10000 train_time:864189ms step_avg:96.01ms +[2025-08-22 13:19:28] [Rank 0] step:9021/10000 train_time:866208ms step_avg:96.02ms +[2025-08-22 13:19:28] [Rank 0] step:9021/10000 train_time:866208ms step_avg:96.02ms +[2025-08-22 13:19:30] [Rank 0] step:9041/10000 train_time:868220ms step_avg:96.03ms +[2025-08-22 13:19:30] [Rank 0] step:9041/10000 train_time:868220ms step_avg:96.03ms +[2025-08-22 13:19:32] [Rank 0] step:9061/10000 train_time:870242ms step_avg:96.04ms +[2025-08-22 13:19:32] [Rank 0] step:9061/10000 train_time:870242ms step_avg:96.04ms +[2025-08-22 13:19:34] [Rank 0] step:9081/10000 train_time:872264ms step_avg:96.05ms +[2025-08-22 13:19:34] [Rank 0] step:9081/10000 train_time:872264ms step_avg:96.05ms +[2025-08-22 13:19:36] [Rank 0] step:9101/10000 train_time:874299ms step_avg:96.07ms +[2025-08-22 13:19:36] [Rank 0] step:9101/10000 train_time:874299ms step_avg:96.07ms +[2025-08-22 13:19:38] [Rank 0] step:9121/10000 train_time:876324ms step_avg:96.08ms +[2025-08-22 13:19:38] [Rank 0] step:9121/10000 train_time:876324ms step_avg:96.08ms +[2025-08-22 13:19:40] [Rank 0] step:9141/10000 train_time:878330ms step_avg:96.09ms +[2025-08-22 13:19:40] [Rank 0] step:9141/10000 train_time:878330ms step_avg:96.09ms +[2025-08-22 13:19:42] [Rank 0] step:9161/10000 train_time:880342ms step_avg:96.10ms +[2025-08-22 13:19:42] [Rank 0] step:9161/10000 train_time:880342ms step_avg:96.10ms +[2025-08-22 13:19:44] [Rank 0] step:9181/10000 train_time:882395ms step_avg:96.11ms +[2025-08-22 13:19:44] [Rank 0] step:9181/10000 train_time:882395ms step_avg:96.11ms +[2025-08-22 13:19:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:19:46] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:20:00] [Rank 0] PRINT: step:9200/10000 val_loss:3.6854 svd_entropy: attn_qk:H=0.5286,top10E=0.59,eRank=64.9,q75/q25=147.07 attn_vo:H=0.8037,top10E=0.19,eRank=245.3,q75/q25=28.43 mlp_w1:H=0.9298,top10E=0.07,eRank=498.1,q75/q25=7.92 mlp_w2:H=0.9374,top10E=0.07,eRank=519.7,q75/q25=8.06 vo_prod:H=0.6876,top10E=0.33,eRank=115.3,q75/q25=676.57 train_time:884420ms step_avg:96.13ms +[2025-08-22 13:20:00] [Rank 0] PRINT: step:9200/10000 val_loss:3.6854 svd_entropy: attn_qk:H=0.5286,top10E=0.59,eRank=64.9,q75/q25=147.07 attn_vo:H=0.8037,top10E=0.19,eRank=245.3,q75/q25=28.43 mlp_w1:H=0.9298,top10E=0.07,eRank=498.1,q75/q25=7.92 mlp_w2:H=0.9374,top10E=0.07,eRank=519.7,q75/q25=8.06 vo_prod:H=0.6876,top10E=0.33,eRank=115.3,q75/q25=676.57 train_time:884420ms step_avg:96.13ms +[2025-08-22 13:20:00] [Rank 0] step:9201/10000 train_time:884441ms step_avg:96.12ms +[2025-08-22 13:20:00] [Rank 0] step:9201/10000 train_time:884441ms step_avg:96.12ms +[2025-08-22 13:20:02] [Rank 0] step:9221/10000 train_time:886477ms step_avg:96.14ms +[2025-08-22 13:20:02] [Rank 0] step:9221/10000 train_time:886477ms step_avg:96.14ms +[2025-08-22 13:20:04] [Rank 0] step:9241/10000 train_time:888505ms step_avg:96.15ms +[2025-08-22 13:20:04] [Rank 0] step:9241/10000 train_time:888505ms step_avg:96.15ms +[2025-08-22 13:20:06] [Rank 0] step:9261/10000 train_time:890530ms step_avg:96.16ms +[2025-08-22 13:20:06] [Rank 0] step:9261/10000 train_time:890530ms step_avg:96.16ms +[2025-08-22 13:20:08] [Rank 0] step:9281/10000 train_time:892542ms step_avg:96.17ms +[2025-08-22 13:20:08] [Rank 0] step:9281/10000 train_time:892542ms step_avg:96.17ms +[2025-08-22 13:20:10] [Rank 0] step:9301/10000 train_time:894556ms step_avg:96.18ms +[2025-08-22 13:20:10] [Rank 0] step:9301/10000 train_time:894556ms step_avg:96.18ms +[2025-08-22 13:20:12] [Rank 0] step:9321/10000 train_time:896579ms step_avg:96.19ms +[2025-08-22 13:20:12] [Rank 0] step:9321/10000 train_time:896579ms step_avg:96.19ms +[2025-08-22 13:20:14] [Rank 0] step:9341/10000 train_time:898601ms step_avg:96.20ms +[2025-08-22 13:20:14] [Rank 0] step:9341/10000 train_time:898601ms step_avg:96.20ms +[2025-08-22 13:20:16] [Rank 0] step:9361/10000 train_time:900629ms step_avg:96.21ms +[2025-08-22 13:20:16] [Rank 0] step:9361/10000 train_time:900629ms step_avg:96.21ms +[2025-08-22 13:20:18] [Rank 0] step:9381/10000 train_time:902668ms step_avg:96.22ms +[2025-08-22 13:20:18] [Rank 0] step:9381/10000 train_time:902668ms step_avg:96.22ms +[2025-08-22 13:20:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:20:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:20:34] [Rank 0] PRINT: step:9400/10000 val_loss:3.6764 svd_entropy: attn_qk:H=0.5291,top10E=0.59,eRank=65.1,q75/q25=146.71 attn_vo:H=0.8040,top10E=0.19,eRank=245.6,q75/q25=28.43 mlp_w1:H=0.9299,top10E=0.06,eRank=498.3,q75/q25=7.89 mlp_w2:H=0.9375,top10E=0.07,eRank=520.0,q75/q25=8.03 vo_prod:H=0.6882,top10E=0.33,eRank=115.7,q75/q25=680.17 train_time:904702ms step_avg:96.24ms +[2025-08-22 13:20:34] [Rank 0] PRINT: step:9400/10000 val_loss:3.6764 svd_entropy: attn_qk:H=0.5291,top10E=0.59,eRank=65.1,q75/q25=146.71 attn_vo:H=0.8040,top10E=0.19,eRank=245.6,q75/q25=28.43 mlp_w1:H=0.9299,top10E=0.06,eRank=498.3,q75/q25=7.89 mlp_w2:H=0.9375,top10E=0.07,eRank=520.0,q75/q25=8.03 vo_prod:H=0.6882,top10E=0.33,eRank=115.7,q75/q25=680.17 train_time:904702ms step_avg:96.24ms +[2025-08-22 13:20:34] [Rank 0] step:9401/10000 train_time:904723ms step_avg:96.24ms +[2025-08-22 13:20:34] [Rank 0] step:9401/10000 train_time:904723ms step_avg:96.24ms +[2025-08-22 13:20:36] [Rank 0] step:9421/10000 train_time:906737ms step_avg:96.25ms +[2025-08-22 13:20:36] [Rank 0] step:9421/10000 train_time:906737ms step_avg:96.25ms +[2025-08-22 13:20:38] [Rank 0] step:9441/10000 train_time:908765ms step_avg:96.26ms +[2025-08-22 13:20:38] [Rank 0] step:9441/10000 train_time:908765ms step_avg:96.26ms +[2025-08-22 13:20:40] [Rank 0] step:9461/10000 train_time:910792ms step_avg:96.27ms +[2025-08-22 13:20:40] [Rank 0] step:9461/10000 train_time:910792ms step_avg:96.27ms +[2025-08-22 13:20:42] [Rank 0] step:9481/10000 train_time:912823ms step_avg:96.28ms +[2025-08-22 13:20:42] [Rank 0] step:9481/10000 train_time:912823ms step_avg:96.28ms +[2025-08-22 13:20:44] [Rank 0] step:9501/10000 train_time:914858ms step_avg:96.29ms +[2025-08-22 13:20:44] [Rank 0] step:9501/10000 train_time:914858ms step_avg:96.29ms +[2025-08-22 13:20:47] [Rank 0] step:9521/10000 train_time:916876ms step_avg:96.30ms +[2025-08-22 13:20:47] [Rank 0] step:9521/10000 train_time:916876ms step_avg:96.30ms +[2025-08-22 13:20:49] [Rank 0] step:9541/10000 train_time:918901ms step_avg:96.31ms +[2025-08-22 13:20:49] [Rank 0] step:9541/10000 train_time:918901ms step_avg:96.31ms +[2025-08-22 13:20:51] [Rank 0] step:9561/10000 train_time:920919ms step_avg:96.32ms +[2025-08-22 13:20:51] [Rank 0] step:9561/10000 train_time:920919ms step_avg:96.32ms +[2025-08-22 13:20:53] [Rank 0] step:9581/10000 train_time:922945ms step_avg:96.33ms +[2025-08-22 13:20:53] [Rank 0] step:9581/10000 train_time:922945ms step_avg:96.33ms +[2025-08-22 13:20:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:20:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:21:08] [Rank 0] PRINT: step:9600/10000 val_loss:3.6687 svd_entropy: attn_qk:H=0.5294,top10E=0.59,eRank=65.2,q75/q25=147.35 attn_vo:H=0.8043,top10E=0.19,eRank=245.9,q75/q25=28.50 mlp_w1:H=0.9300,top10E=0.06,eRank=498.5,q75/q25=7.88 mlp_w2:H=0.9376,top10E=0.07,eRank=520.2,q75/q25=8.00 vo_prod:H=0.6888,top10E=0.33,eRank=116.0,q75/q25=689.73 train_time:924987ms step_avg:96.35ms +[2025-08-22 13:21:08] [Rank 0] PRINT: step:9600/10000 val_loss:3.6687 svd_entropy: attn_qk:H=0.5294,top10E=0.59,eRank=65.2,q75/q25=147.35 attn_vo:H=0.8043,top10E=0.19,eRank=245.9,q75/q25=28.50 mlp_w1:H=0.9300,top10E=0.06,eRank=498.5,q75/q25=7.88 mlp_w2:H=0.9376,top10E=0.07,eRank=520.2,q75/q25=8.00 vo_prod:H=0.6888,top10E=0.33,eRank=116.0,q75/q25=689.73 train_time:924987ms step_avg:96.35ms +[2025-08-22 13:21:09] [Rank 0] step:9601/10000 train_time:925008ms step_avg:96.34ms +[2025-08-22 13:21:09] [Rank 0] step:9601/10000 train_time:925008ms step_avg:96.34ms +[2025-08-22 13:21:11] [Rank 0] step:9621/10000 train_time:927024ms step_avg:96.35ms +[2025-08-22 13:21:11] [Rank 0] step:9621/10000 train_time:927024ms step_avg:96.35ms +[2025-08-22 13:21:13] [Rank 0] step:9641/10000 train_time:929049ms step_avg:96.36ms +[2025-08-22 13:21:13] [Rank 0] step:9641/10000 train_time:929049ms step_avg:96.36ms +[2025-08-22 13:21:15] [Rank 0] step:9661/10000 train_time:931098ms step_avg:96.38ms +[2025-08-22 13:21:15] [Rank 0] step:9661/10000 train_time:931098ms step_avg:96.38ms +[2025-08-22 13:21:17] [Rank 0] step:9681/10000 train_time:933142ms step_avg:96.39ms +[2025-08-22 13:21:17] [Rank 0] step:9681/10000 train_time:933142ms step_avg:96.39ms +[2025-08-22 13:21:19] [Rank 0] step:9701/10000 train_time:935199ms step_avg:96.40ms +[2025-08-22 13:21:19] [Rank 0] step:9701/10000 train_time:935199ms step_avg:96.40ms +[2025-08-22 13:21:21] [Rank 0] step:9721/10000 train_time:937243ms step_avg:96.41ms +[2025-08-22 13:21:21] [Rank 0] step:9721/10000 train_time:937243ms step_avg:96.41ms +[2025-08-22 13:21:23] [Rank 0] step:9741/10000 train_time:939304ms step_avg:96.43ms +[2025-08-22 13:21:23] [Rank 0] step:9741/10000 train_time:939304ms step_avg:96.43ms +[2025-08-22 13:21:25] [Rank 0] step:9761/10000 train_time:941355ms step_avg:96.44ms +[2025-08-22 13:21:25] [Rank 0] step:9761/10000 train_time:941355ms step_avg:96.44ms +[2025-08-22 13:21:27] [Rank 0] step:9781/10000 train_time:943411ms step_avg:96.45ms +[2025-08-22 13:21:27] [Rank 0] step:9781/10000 train_time:943411ms step_avg:96.45ms +[2025-08-22 13:21:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:21:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:21:43] [Rank 0] PRINT: step:9800/10000 val_loss:3.6609 svd_entropy: attn_qk:H=0.5298,top10E=0.59,eRank=65.3,q75/q25=146.97 attn_vo:H=0.8045,top10E=0.19,eRank=246.2,q75/q25=28.54 mlp_w1:H=0.9300,top10E=0.06,eRank=498.7,q75/q25=7.86 mlp_w2:H=0.9377,top10E=0.06,eRank=520.4,q75/q25=8.00 vo_prod:H=0.6893,top10E=0.33,eRank=116.3,q75/q25=692.44 train_time:945482ms step_avg:96.48ms +[2025-08-22 13:21:43] [Rank 0] PRINT: step:9800/10000 val_loss:3.6609 svd_entropy: attn_qk:H=0.5298,top10E=0.59,eRank=65.3,q75/q25=146.97 attn_vo:H=0.8045,top10E=0.19,eRank=246.2,q75/q25=28.54 mlp_w1:H=0.9300,top10E=0.06,eRank=498.7,q75/q25=7.86 mlp_w2:H=0.9377,top10E=0.06,eRank=520.4,q75/q25=8.00 vo_prod:H=0.6893,top10E=0.33,eRank=116.3,q75/q25=692.44 train_time:945482ms step_avg:96.48ms +[2025-08-22 13:21:43] [Rank 0] step:9801/10000 train_time:945502ms step_avg:96.47ms +[2025-08-22 13:21:43] [Rank 0] step:9801/10000 train_time:945502ms step_avg:96.47ms +[2025-08-22 13:21:45] [Rank 0] step:9821/10000 train_time:947533ms step_avg:96.48ms +[2025-08-22 13:21:45] [Rank 0] step:9821/10000 train_time:947533ms step_avg:96.48ms +[2025-08-22 13:21:47] [Rank 0] step:9841/10000 train_time:949587ms step_avg:96.49ms +[2025-08-22 13:21:47] [Rank 0] step:9841/10000 train_time:949587ms step_avg:96.49ms +[2025-08-22 13:21:49] [Rank 0] step:9861/10000 train_time:951620ms step_avg:96.50ms +[2025-08-22 13:21:49] [Rank 0] step:9861/10000 train_time:951620ms step_avg:96.50ms +[2025-08-22 13:21:51] [Rank 0] step:9881/10000 train_time:953655ms step_avg:96.51ms +[2025-08-22 13:21:51] [Rank 0] step:9881/10000 train_time:953655ms step_avg:96.51ms +[2025-08-22 13:21:53] [Rank 0] step:9901/10000 train_time:955713ms step_avg:96.53ms +[2025-08-22 13:21:53] [Rank 0] step:9901/10000 train_time:955713ms step_avg:96.53ms +[2025-08-22 13:21:55] [Rank 0] step:9921/10000 train_time:957751ms step_avg:96.54ms +[2025-08-22 13:21:55] [Rank 0] step:9921/10000 train_time:957751ms step_avg:96.54ms +[2025-08-22 13:21:57] [Rank 0] step:9941/10000 train_time:959806ms step_avg:96.55ms +[2025-08-22 13:21:57] [Rank 0] step:9941/10000 train_time:959806ms step_avg:96.55ms +[2025-08-22 13:21:59] [Rank 0] step:9961/10000 train_time:961846ms step_avg:96.56ms +[2025-08-22 13:21:59] [Rank 0] step:9961/10000 train_time:961846ms step_avg:96.56ms +[2025-08-22 13:22:02] [Rank 0] step:9981/10000 train_time:963974ms step_avg:96.58ms +[2025-08-22 13:22:02] [Rank 0] step:9981/10000 train_time:963974ms step_avg:96.58ms +[2025-08-22 13:22:04] [Rank 0] step:10000/10000 train_time:965994ms step_avg:96.60ms +[2025-08-22 13:22:04] [Rank 0] step:10000/10000 train_time:965994ms step_avg:96.60ms +[2025-08-22 13:22:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:22:04] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 13:22:17] [Rank 0] PRINT: step:10000/10000 val_loss:3.6543 svd_entropy: attn_qk:H=0.5300,top10E=0.59,eRank=65.4,q75/q25=146.99 attn_vo:H=0.8047,top10E=0.19,eRank=246.3,q75/q25=28.52 mlp_w1:H=0.9301,top10E=0.06,eRank=498.8,q75/q25=7.85 mlp_w2:H=0.9377,top10E=0.06,eRank=520.5,q75/q25=8.01 vo_prod:H=0.6896,top10E=0.33,eRank=116.4,q75/q25=700.56 train_time:966105ms step_avg:96.61ms +[2025-08-22 13:22:17] [Rank 0] PRINT: step:10000/10000 val_loss:3.6543 svd_entropy: attn_qk:H=0.5300,top10E=0.59,eRank=65.4,q75/q25=146.99 attn_vo:H=0.8047,top10E=0.19,eRank=246.3,q75/q25=28.52 mlp_w1:H=0.9301,top10E=0.06,eRank=498.8,q75/q25=7.85 mlp_w2:H=0.9377,top10E=0.06,eRank=520.5,q75/q25=8.01 vo_prod:H=0.6896,top10E=0.33,eRank=116.4,q75/q25=700.56 train_time:966105ms step_avg:96.61ms +[2025-08-22 13:22:17] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 13:22:17 2025 --- +[2025-08-22 13:22:17] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 13:22:17 2025 --- +[2025-08-22 13:22:17] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16976 MiB +[2025-08-22 13:22:17] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16976 MiB diff --git a/logs_svd_gated/mode_9_param_gated_seed_42/config.json b/logs_svd_gated/mode_9_param_gated_seed_42/config.json new file mode 100644 index 0000000000000000000000000000000000000000..1b6b4c5e5f1f8d82b298cd58a6b381b26d9c5e5a --- /dev/null +++ b/logs_svd_gated/mode_9_param_gated_seed_42/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 42, + "optimizer_mode": 9, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "2ef6219a-70ab-421d-8abf-382e626308b3", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_9_param_gated_seed_42/training_log_2ef6219a-70ab-421d-8abf-382e626308b3.txt b/logs_svd_gated/mode_9_param_gated_seed_42/training_log_2ef6219a-70ab-421d-8abf-382e626308b3.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca39745a29dd675e96aa93dd857f153c821ca2a5 --- /dev/null +++ b/logs_svd_gated/mode_9_param_gated_seed_42/training_log_2ef6219a-70ab-421d-8abf-382e626308b3.txt @@ -0,0 +1,2926 @@ +[2025-08-22 18:07:05] [Rank 0] PRINT: --- Script Start: Fri Aug 22 18:07:05 2025 --- +[2025-08-22 18:07:05] [Rank 0] PRINT: --- Script Start: Fri Aug 22 18:07:05 2025 --- +[2025-08-22 18:07:05] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=9, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 18:07:05] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=42, optimizer_mode=9, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 18:07:05] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 18:07:05] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 18:07:05] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 18:07:05] [Rank 0] PRINT: Using fixed seed: 42 +[2025-08-22 18:07:05] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_9_param_gated_seed_42 +[2025-08-22 18:07:05] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_9_param_gated_seed_42 +[2025-08-22 18:07:05] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 18:07:05] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 18:07:05] [Rank 0] PRINT: Constructing model... +[2025-08-22 18:07:05] [Rank 0] PRINT: Constructing model... +[2025-08-22 18:07:07] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 18:07:07] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 18:07:07] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 18:07:07] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 18:07:07] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 18:07:07] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 18:07:07] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 9 +[2025-08-22 18:07:07] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 9 +[2025-08-22 18:07:07] [Rank 0] PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: 0.05). +[2025-08-22 18:07:07] [Rank 0] PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: 0.05). +[2025-08-22 18:07:07] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 18:07:07] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 18:07:07] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 18:07:07] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 18:07:07] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 18:07:07] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 18:07:07] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 18:07:07] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 18:07:07] [Rank 0] PRINT: Starting warmup... +[2025-08-22 18:07:07] [Rank 0] PRINT: Starting warmup... +[2025-08-22 18:07:51] [Rank 0] PRINT: Warmup complete. +[2025-08-22 18:07:51] [Rank 0] PRINT: Warmup complete. +[2025-08-22 18:07:51] [Rank 0] PRINT: Starting training... +[2025-08-22 18:07:51] [Rank 0] PRINT: Starting training... +[2025-08-22 18:07:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:07:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:08:09] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 18:08:09] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.29 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.8,q75/q25=2.36 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 18:08:11] [Rank 0] step:21/10000 train_time:1838ms step_avg:87.52ms +[2025-08-22 18:08:11] [Rank 0] step:21/10000 train_time:1838ms step_avg:87.52ms +[2025-08-22 18:08:13] [Rank 0] step:41/10000 train_time:3631ms step_avg:88.55ms +[2025-08-22 18:08:13] [Rank 0] step:41/10000 train_time:3631ms step_avg:88.55ms +[2025-08-22 18:08:15] [Rank 0] step:61/10000 train_time:5424ms step_avg:88.92ms +[2025-08-22 18:08:15] [Rank 0] step:61/10000 train_time:5424ms step_avg:88.92ms +[2025-08-22 18:08:16] [Rank 0] step:81/10000 train_time:7220ms step_avg:89.13ms +[2025-08-22 18:08:16] [Rank 0] step:81/10000 train_time:7220ms step_avg:89.13ms +[2025-08-22 18:08:18] [Rank 0] step:101/10000 train_time:9019ms step_avg:89.30ms +[2025-08-22 18:08:18] [Rank 0] step:101/10000 train_time:9019ms step_avg:89.30ms +[2025-08-22 18:08:20] [Rank 0] step:121/10000 train_time:10818ms step_avg:89.40ms +[2025-08-22 18:08:20] [Rank 0] step:121/10000 train_time:10818ms step_avg:89.40ms +[2025-08-22 18:08:22] [Rank 0] step:141/10000 train_time:12617ms step_avg:89.48ms +[2025-08-22 18:08:22] [Rank 0] step:141/10000 train_time:12617ms step_avg:89.48ms +[2025-08-22 18:08:24] [Rank 0] step:161/10000 train_time:14418ms step_avg:89.55ms +[2025-08-22 18:08:24] [Rank 0] step:161/10000 train_time:14418ms step_avg:89.55ms +[2025-08-22 18:08:25] [Rank 0] step:181/10000 train_time:16219ms step_avg:89.61ms +[2025-08-22 18:08:25] [Rank 0] step:181/10000 train_time:16219ms step_avg:89.61ms +[2025-08-22 18:08:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:08:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:08:41] [Rank 0] PRINT: step:200/10000 val_loss:5.6513 svd_entropy: attn_qk:H=0.4748,top10E=0.72,eRank=37.3,q75/q25=27.40 attn_vo:H=0.6447,top10E=0.43,eRank=150.1,q75/q25=20.62 mlp_w1:H=0.9346,top10E=0.06,eRank=507.6,q75/q25=6.14 mlp_w2:H=0.9121,top10E=0.08,eRank=447.9,q75/q25=8.81 vo_prod:H=0.3804,top10E=0.78,eRank=27.3,q75/q25=186.37 train_time:18026ms step_avg:90.13ms +[2025-08-22 18:08:41] [Rank 0] PRINT: step:200/10000 val_loss:5.6513 svd_entropy: attn_qk:H=0.4748,top10E=0.72,eRank=37.3,q75/q25=27.40 attn_vo:H=0.6447,top10E=0.43,eRank=150.1,q75/q25=20.62 mlp_w1:H=0.9346,top10E=0.06,eRank=507.6,q75/q25=6.14 mlp_w2:H=0.9121,top10E=0.08,eRank=447.9,q75/q25=8.81 vo_prod:H=0.3804,top10E=0.78,eRank=27.3,q75/q25=186.37 train_time:18026ms step_avg:90.13ms +[2025-08-22 18:08:41] [Rank 0] step:201/10000 train_time:18047ms step_avg:89.79ms +[2025-08-22 18:08:41] [Rank 0] step:201/10000 train_time:18047ms step_avg:89.79ms +[2025-08-22 18:08:43] [Rank 0] step:221/10000 train_time:19836ms step_avg:89.76ms +[2025-08-22 18:08:43] [Rank 0] step:221/10000 train_time:19836ms step_avg:89.76ms +[2025-08-22 18:08:45] [Rank 0] step:241/10000 train_time:21721ms step_avg:90.13ms +[2025-08-22 18:08:45] [Rank 0] step:241/10000 train_time:21721ms step_avg:90.13ms +[2025-08-22 18:08:46] [Rank 0] step:261/10000 train_time:23605ms step_avg:90.44ms +[2025-08-22 18:08:46] [Rank 0] step:261/10000 train_time:23605ms step_avg:90.44ms +[2025-08-22 18:08:48] [Rank 0] step:281/10000 train_time:25400ms step_avg:90.39ms +[2025-08-22 18:08:48] [Rank 0] step:281/10000 train_time:25400ms step_avg:90.39ms +[2025-08-22 18:08:50] [Rank 0] step:301/10000 train_time:27198ms step_avg:90.36ms +[2025-08-22 18:08:50] [Rank 0] step:301/10000 train_time:27198ms step_avg:90.36ms +[2025-08-22 18:08:52] [Rank 0] step:321/10000 train_time:28997ms step_avg:90.33ms +[2025-08-22 18:08:52] [Rank 0] step:321/10000 train_time:28997ms step_avg:90.33ms +[2025-08-22 18:08:54] [Rank 0] step:341/10000 train_time:30796ms step_avg:90.31ms +[2025-08-22 18:08:54] [Rank 0] step:341/10000 train_time:30796ms step_avg:90.31ms +[2025-08-22 18:08:55] [Rank 0] step:361/10000 train_time:32596ms step_avg:90.29ms +[2025-08-22 18:08:55] [Rank 0] step:361/10000 train_time:32596ms step_avg:90.29ms +[2025-08-22 18:08:57] [Rank 0] step:381/10000 train_time:34395ms step_avg:90.28ms +[2025-08-22 18:08:57] [Rank 0] step:381/10000 train_time:34395ms step_avg:90.28ms +[2025-08-22 18:08:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:08:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:09:13] [Rank 0] PRINT: step:400/10000 val_loss:5.2231 svd_entropy: attn_qk:H=0.4959,top10E=0.69,eRank=41.4,q75/q25=34.07 attn_vo:H=0.6783,top10E=0.39,eRank=176.5,q75/q25=20.55 mlp_w1:H=0.9300,top10E=0.07,eRank=497.1,q75/q25=6.04 mlp_w2:H=0.9205,top10E=0.07,eRank=473.7,q75/q25=9.07 vo_prod:H=0.4443,top10E=0.71,eRank=46.2,q75/q25=217.55 train_time:36201ms step_avg:90.50ms +[2025-08-22 18:09:13] [Rank 0] PRINT: step:400/10000 val_loss:5.2231 svd_entropy: attn_qk:H=0.4959,top10E=0.69,eRank=41.4,q75/q25=34.07 attn_vo:H=0.6783,top10E=0.39,eRank=176.5,q75/q25=20.55 mlp_w1:H=0.9300,top10E=0.07,eRank=497.1,q75/q25=6.04 mlp_w2:H=0.9205,top10E=0.07,eRank=473.7,q75/q25=9.07 vo_prod:H=0.4443,top10E=0.71,eRank=46.2,q75/q25=217.55 train_time:36201ms step_avg:90.50ms +[2025-08-22 18:09:13] [Rank 0] step:401/10000 train_time:36223ms step_avg:90.33ms +[2025-08-22 18:09:13] [Rank 0] step:401/10000 train_time:36223ms step_avg:90.33ms +[2025-08-22 18:09:15] [Rank 0] step:421/10000 train_time:38015ms step_avg:90.30ms +[2025-08-22 18:09:15] [Rank 0] step:421/10000 train_time:38015ms step_avg:90.30ms +[2025-08-22 18:09:16] [Rank 0] step:441/10000 train_time:39808ms step_avg:90.27ms +[2025-08-22 18:09:16] [Rank 0] step:441/10000 train_time:39808ms step_avg:90.27ms +[2025-08-22 18:09:18] [Rank 0] step:461/10000 train_time:41602ms step_avg:90.24ms +[2025-08-22 18:09:18] [Rank 0] step:461/10000 train_time:41602ms step_avg:90.24ms +[2025-08-22 18:09:20] [Rank 0] step:481/10000 train_time:43398ms step_avg:90.22ms +[2025-08-22 18:09:20] [Rank 0] step:481/10000 train_time:43398ms step_avg:90.22ms +[2025-08-22 18:09:22] [Rank 0] step:501/10000 train_time:45193ms step_avg:90.21ms +[2025-08-22 18:09:22] [Rank 0] step:501/10000 train_time:45193ms step_avg:90.21ms +[2025-08-22 18:09:24] [Rank 0] step:521/10000 train_time:46991ms step_avg:90.19ms +[2025-08-22 18:09:24] [Rank 0] step:521/10000 train_time:46991ms step_avg:90.19ms +[2025-08-22 18:09:25] [Rank 0] step:541/10000 train_time:48789ms step_avg:90.18ms +[2025-08-22 18:09:25] [Rank 0] step:541/10000 train_time:48789ms step_avg:90.18ms +[2025-08-22 18:09:27] [Rank 0] step:561/10000 train_time:50588ms step_avg:90.17ms +[2025-08-22 18:09:27] [Rank 0] step:561/10000 train_time:50588ms step_avg:90.17ms +[2025-08-22 18:09:29] [Rank 0] step:581/10000 train_time:52388ms step_avg:90.17ms +[2025-08-22 18:09:29] [Rank 0] step:581/10000 train_time:52388ms step_avg:90.17ms +[2025-08-22 18:09:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:09:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:09:45] [Rank 0] PRINT: step:600/10000 val_loss:5.0038 svd_entropy: attn_qk:H=0.4951,top10E=0.67,eRank=46.9,q75/q25=54.50 attn_vo:H=0.6950,top10E=0.38,eRank=182.4,q75/q25=21.80 mlp_w1:H=0.9272,top10E=0.07,eRank=490.3,q75/q25=6.14 mlp_w2:H=0.9172,top10E=0.08,eRank=467.5,q75/q25=9.51 vo_prod:H=0.4616,top10E=0.68,eRank=51.0,q75/q25=247.81 train_time:54194ms step_avg:90.32ms +[2025-08-22 18:09:45] [Rank 0] PRINT: step:600/10000 val_loss:5.0038 svd_entropy: attn_qk:H=0.4951,top10E=0.67,eRank=46.9,q75/q25=54.50 attn_vo:H=0.6950,top10E=0.38,eRank=182.4,q75/q25=21.80 mlp_w1:H=0.9272,top10E=0.07,eRank=490.3,q75/q25=6.14 mlp_w2:H=0.9172,top10E=0.08,eRank=467.5,q75/q25=9.51 vo_prod:H=0.4616,top10E=0.68,eRank=51.0,q75/q25=247.81 train_time:54194ms step_avg:90.32ms +[2025-08-22 18:09:45] [Rank 0] step:601/10000 train_time:54214ms step_avg:90.21ms +[2025-08-22 18:09:45] [Rank 0] step:601/10000 train_time:54214ms step_avg:90.21ms +[2025-08-22 18:09:47] [Rank 0] step:621/10000 train_time:55995ms step_avg:90.17ms +[2025-08-22 18:09:47] [Rank 0] step:621/10000 train_time:55995ms step_avg:90.17ms +[2025-08-22 18:09:48] [Rank 0] step:641/10000 train_time:57839ms step_avg:90.23ms +[2025-08-22 18:09:48] [Rank 0] step:641/10000 train_time:57839ms step_avg:90.23ms +[2025-08-22 18:09:50] [Rank 0] step:661/10000 train_time:59734ms step_avg:90.37ms +[2025-08-22 18:09:50] [Rank 0] step:661/10000 train_time:59734ms step_avg:90.37ms +[2025-08-22 18:09:52] [Rank 0] step:681/10000 train_time:61529ms step_avg:90.35ms +[2025-08-22 18:09:52] [Rank 0] step:681/10000 train_time:61529ms step_avg:90.35ms +[2025-08-22 18:09:54] [Rank 0] step:701/10000 train_time:63325ms step_avg:90.34ms +[2025-08-22 18:09:54] [Rank 0] step:701/10000 train_time:63325ms step_avg:90.34ms +[2025-08-22 18:09:56] [Rank 0] step:721/10000 train_time:65122ms step_avg:90.32ms +[2025-08-22 18:09:56] [Rank 0] step:721/10000 train_time:65122ms step_avg:90.32ms +[2025-08-22 18:09:57] [Rank 0] step:741/10000 train_time:66920ms step_avg:90.31ms +[2025-08-22 18:09:57] [Rank 0] step:741/10000 train_time:66920ms step_avg:90.31ms +[2025-08-22 18:09:59] [Rank 0] step:761/10000 train_time:68732ms step_avg:90.32ms +[2025-08-22 18:09:59] [Rank 0] step:761/10000 train_time:68732ms step_avg:90.32ms +[2025-08-22 18:10:01] [Rank 0] step:781/10000 train_time:70605ms step_avg:90.40ms +[2025-08-22 18:10:01] [Rank 0] step:781/10000 train_time:70605ms step_avg:90.40ms +[2025-08-22 18:10:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:10:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:10:17] [Rank 0] PRINT: step:800/10000 val_loss:4.7118 svd_entropy: attn_qk:H=0.5109,top10E=0.65,eRank=51.8,q75/q25=64.53 attn_vo:H=0.6924,top10E=0.38,eRank=181.2,q75/q25=22.53 mlp_w1:H=0.9219,top10E=0.08,eRank=476.4,q75/q25=6.22 mlp_w2:H=0.9089,top10E=0.10,eRank=449.0,q75/q25=9.75 vo_prod:H=0.4691,top10E=0.68,eRank=53.3,q75/q25=260.91 train_time:72363ms step_avg:90.45ms +[2025-08-22 18:10:17] [Rank 0] PRINT: step:800/10000 val_loss:4.7118 svd_entropy: attn_qk:H=0.5109,top10E=0.65,eRank=51.8,q75/q25=64.53 attn_vo:H=0.6924,top10E=0.38,eRank=181.2,q75/q25=22.53 mlp_w1:H=0.9219,top10E=0.08,eRank=476.4,q75/q25=6.22 mlp_w2:H=0.9089,top10E=0.10,eRank=449.0,q75/q25=9.75 vo_prod:H=0.4691,top10E=0.68,eRank=53.3,q75/q25=260.91 train_time:72363ms step_avg:90.45ms +[2025-08-22 18:10:17] [Rank 0] step:801/10000 train_time:72383ms step_avg:90.37ms +[2025-08-22 18:10:17] [Rank 0] step:801/10000 train_time:72383ms step_avg:90.37ms +[2025-08-22 18:10:19] [Rank 0] step:821/10000 train_time:74199ms step_avg:90.38ms +[2025-08-22 18:10:19] [Rank 0] step:821/10000 train_time:74199ms step_avg:90.38ms +[2025-08-22 18:10:21] [Rank 0] step:841/10000 train_time:76007ms step_avg:90.38ms +[2025-08-22 18:10:21] [Rank 0] step:841/10000 train_time:76007ms step_avg:90.38ms +[2025-08-22 18:10:22] [Rank 0] step:861/10000 train_time:77816ms step_avg:90.38ms +[2025-08-22 18:10:22] [Rank 0] step:861/10000 train_time:77816ms step_avg:90.38ms +[2025-08-22 18:10:24] [Rank 0] step:881/10000 train_time:79628ms step_avg:90.38ms +[2025-08-22 18:10:24] [Rank 0] step:881/10000 train_time:79628ms step_avg:90.38ms +[2025-08-22 18:10:26] [Rank 0] step:901/10000 train_time:81438ms step_avg:90.39ms +[2025-08-22 18:10:26] [Rank 0] step:901/10000 train_time:81438ms step_avg:90.39ms +[2025-08-22 18:10:28] [Rank 0] step:921/10000 train_time:83251ms step_avg:90.39ms +[2025-08-22 18:10:28] [Rank 0] step:921/10000 train_time:83251ms step_avg:90.39ms +[2025-08-22 18:10:30] [Rank 0] step:941/10000 train_time:85066ms step_avg:90.40ms +[2025-08-22 18:10:30] [Rank 0] step:941/10000 train_time:85066ms step_avg:90.40ms +[2025-08-22 18:10:31] [Rank 0] step:961/10000 train_time:86882ms step_avg:90.41ms +[2025-08-22 18:10:31] [Rank 0] step:961/10000 train_time:86882ms step_avg:90.41ms +[2025-08-22 18:10:33] [Rank 0] step:981/10000 train_time:88698ms step_avg:90.42ms +[2025-08-22 18:10:33] [Rank 0] step:981/10000 train_time:88698ms step_avg:90.42ms +[2025-08-22 18:10:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:10:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:10:49] [Rank 0] PRINT: step:1000/10000 val_loss:4.5888 svd_entropy: attn_qk:H=0.5133,top10E=0.65,eRank=53.1,q75/q25=69.04 attn_vo:H=0.6944,top10E=0.38,eRank=182.5,q75/q25=22.80 mlp_w1:H=0.9197,top10E=0.09,eRank=471.0,q75/q25=6.31 mlp_w2:H=0.9057,top10E=0.10,eRank=441.8,q75/q25=9.94 vo_prod:H=0.4675,top10E=0.68,eRank=54.3,q75/q25=262.18 train_time:90520ms step_avg:90.52ms +[2025-08-22 18:10:49] [Rank 0] PRINT: step:1000/10000 val_loss:4.5888 svd_entropy: attn_qk:H=0.5133,top10E=0.65,eRank=53.1,q75/q25=69.04 attn_vo:H=0.6944,top10E=0.38,eRank=182.5,q75/q25=22.80 mlp_w1:H=0.9197,top10E=0.09,eRank=471.0,q75/q25=6.31 mlp_w2:H=0.9057,top10E=0.10,eRank=441.8,q75/q25=9.94 vo_prod:H=0.4675,top10E=0.68,eRank=54.3,q75/q25=262.18 train_time:90520ms step_avg:90.52ms +[2025-08-22 18:10:49] [Rank 0] step:1001/10000 train_time:90541ms step_avg:90.45ms +[2025-08-22 18:10:49] [Rank 0] step:1001/10000 train_time:90541ms step_avg:90.45ms +[2025-08-22 18:10:51] [Rank 0] step:1021/10000 train_time:92426ms step_avg:90.53ms +[2025-08-22 18:10:51] [Rank 0] step:1021/10000 train_time:92426ms step_avg:90.53ms +[2025-08-22 18:10:53] [Rank 0] step:1041/10000 train_time:94235ms step_avg:90.52ms +[2025-08-22 18:10:53] [Rank 0] step:1041/10000 train_time:94235ms step_avg:90.52ms +[2025-08-22 18:10:55] [Rank 0] step:1061/10000 train_time:96140ms step_avg:90.61ms +[2025-08-22 18:10:55] [Rank 0] step:1061/10000 train_time:96140ms step_avg:90.61ms +[2025-08-22 18:10:56] [Rank 0] step:1081/10000 train_time:97950ms step_avg:90.61ms +[2025-08-22 18:10:56] [Rank 0] step:1081/10000 train_time:97950ms step_avg:90.61ms +[2025-08-22 18:10:58] [Rank 0] step:1101/10000 train_time:99764ms step_avg:90.61ms +[2025-08-22 18:10:58] [Rank 0] step:1101/10000 train_time:99764ms step_avg:90.61ms +[2025-08-22 18:11:00] [Rank 0] step:1121/10000 train_time:101578ms step_avg:90.61ms +[2025-08-22 18:11:00] [Rank 0] step:1121/10000 train_time:101578ms step_avg:90.61ms +[2025-08-22 18:11:02] [Rank 0] step:1141/10000 train_time:103392ms step_avg:90.61ms +[2025-08-22 18:11:02] [Rank 0] step:1141/10000 train_time:103392ms step_avg:90.61ms +[2025-08-22 18:11:04] [Rank 0] step:1161/10000 train_time:105207ms step_avg:90.62ms +[2025-08-22 18:11:04] [Rank 0] step:1161/10000 train_time:105207ms step_avg:90.62ms +[2025-08-22 18:11:05] [Rank 0] step:1181/10000 train_time:107021ms step_avg:90.62ms +[2025-08-22 18:11:05] [Rank 0] step:1181/10000 train_time:107021ms step_avg:90.62ms +[2025-08-22 18:11:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:11:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:11:21] [Rank 0] PRINT: step:1200/10000 val_loss:4.4937 svd_entropy: attn_qk:H=0.5150,top10E=0.64,eRank=53.8,q75/q25=74.30 attn_vo:H=0.6941,top10E=0.39,eRank=181.6,q75/q25=23.40 mlp_w1:H=0.9153,top10E=0.10,eRank=460.6,q75/q25=6.37 mlp_w2:H=0.8996,top10E=0.12,eRank=429.9,q75/q25=9.98 vo_prod:H=0.4648,top10E=0.69,eRank=56.3,q75/q25=268.30 train_time:108841ms step_avg:90.70ms +[2025-08-22 18:11:21] [Rank 0] PRINT: step:1200/10000 val_loss:4.4937 svd_entropy: attn_qk:H=0.5150,top10E=0.64,eRank=53.8,q75/q25=74.30 attn_vo:H=0.6941,top10E=0.39,eRank=181.6,q75/q25=23.40 mlp_w1:H=0.9153,top10E=0.10,eRank=460.6,q75/q25=6.37 mlp_w2:H=0.8996,top10E=0.12,eRank=429.9,q75/q25=9.98 vo_prod:H=0.4648,top10E=0.69,eRank=56.3,q75/q25=268.30 train_time:108841ms step_avg:90.70ms +[2025-08-22 18:11:21] [Rank 0] step:1201/10000 train_time:108862ms step_avg:90.64ms +[2025-08-22 18:11:21] [Rank 0] step:1201/10000 train_time:108862ms step_avg:90.64ms +[2025-08-22 18:11:23] [Rank 0] step:1221/10000 train_time:110662ms step_avg:90.63ms +[2025-08-22 18:11:23] [Rank 0] step:1221/10000 train_time:110662ms step_avg:90.63ms +[2025-08-22 18:11:25] [Rank 0] step:1241/10000 train_time:112470ms step_avg:90.63ms +[2025-08-22 18:11:25] [Rank 0] step:1241/10000 train_time:112470ms step_avg:90.63ms +[2025-08-22 18:11:27] [Rank 0] step:1261/10000 train_time:114279ms step_avg:90.63ms +[2025-08-22 18:11:27] [Rank 0] step:1261/10000 train_time:114279ms step_avg:90.63ms +[2025-08-22 18:11:28] [Rank 0] step:1281/10000 train_time:116090ms step_avg:90.62ms +[2025-08-22 18:11:28] [Rank 0] step:1281/10000 train_time:116090ms step_avg:90.62ms +[2025-08-22 18:11:30] [Rank 0] step:1301/10000 train_time:117901ms step_avg:90.62ms +[2025-08-22 18:11:30] [Rank 0] step:1301/10000 train_time:117901ms step_avg:90.62ms +[2025-08-22 18:11:32] [Rank 0] step:1321/10000 train_time:119716ms step_avg:90.63ms +[2025-08-22 18:11:32] [Rank 0] step:1321/10000 train_time:119716ms step_avg:90.63ms +[2025-08-22 18:11:34] [Rank 0] step:1341/10000 train_time:121530ms step_avg:90.63ms +[2025-08-22 18:11:34] [Rank 0] step:1341/10000 train_time:121530ms step_avg:90.63ms +[2025-08-22 18:11:36] [Rank 0] step:1361/10000 train_time:123345ms step_avg:90.63ms +[2025-08-22 18:11:36] [Rank 0] step:1361/10000 train_time:123345ms step_avg:90.63ms +[2025-08-22 18:11:38] [Rank 0] step:1381/10000 train_time:125162ms step_avg:90.63ms +[2025-08-22 18:11:38] [Rank 0] step:1381/10000 train_time:125162ms step_avg:90.63ms +[2025-08-22 18:11:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:11:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:11:53] [Rank 0] PRINT: step:1400/10000 val_loss:4.4430 svd_entropy: attn_qk:H=0.5055,top10E=0.65,eRank=53.3,q75/q25=78.77 attn_vo:H=0.6929,top10E=0.39,eRank=181.5,q75/q25=23.50 mlp_w1:H=0.9138,top10E=0.10,eRank=457.0,q75/q25=6.45 mlp_w2:H=0.8971,top10E=0.12,eRank=424.7,q75/q25=10.10 vo_prod:H=0.4633,top10E=0.69,eRank=56.9,q75/q25=268.81 train_time:126985ms step_avg:90.70ms +[2025-08-22 18:11:53] [Rank 0] PRINT: step:1400/10000 val_loss:4.4430 svd_entropy: attn_qk:H=0.5055,top10E=0.65,eRank=53.3,q75/q25=78.77 attn_vo:H=0.6929,top10E=0.39,eRank=181.5,q75/q25=23.50 mlp_w1:H=0.9138,top10E=0.10,eRank=457.0,q75/q25=6.45 mlp_w2:H=0.8971,top10E=0.12,eRank=424.7,q75/q25=10.10 vo_prod:H=0.4633,top10E=0.69,eRank=56.9,q75/q25=268.81 train_time:126985ms step_avg:90.70ms +[2025-08-22 18:11:53] [Rank 0] step:1401/10000 train_time:127005ms step_avg:90.65ms +[2025-08-22 18:11:53] [Rank 0] step:1401/10000 train_time:127005ms step_avg:90.65ms +[2025-08-22 18:11:55] [Rank 0] step:1421/10000 train_time:128886ms step_avg:90.70ms +[2025-08-22 18:11:55] [Rank 0] step:1421/10000 train_time:128886ms step_avg:90.70ms +[2025-08-22 18:11:57] [Rank 0] step:1441/10000 train_time:130749ms step_avg:90.73ms +[2025-08-22 18:11:57] [Rank 0] step:1441/10000 train_time:130749ms step_avg:90.73ms +[2025-08-22 18:11:59] [Rank 0] step:1461/10000 train_time:132562ms step_avg:90.73ms +[2025-08-22 18:11:59] [Rank 0] step:1461/10000 train_time:132562ms step_avg:90.73ms +[2025-08-22 18:12:00] [Rank 0] step:1481/10000 train_time:134380ms step_avg:90.74ms +[2025-08-22 18:12:00] [Rank 0] step:1481/10000 train_time:134380ms step_avg:90.74ms +[2025-08-22 18:12:02] [Rank 0] step:1501/10000 train_time:136206ms step_avg:90.74ms +[2025-08-22 18:12:02] [Rank 0] step:1501/10000 train_time:136206ms step_avg:90.74ms +[2025-08-22 18:12:04] [Rank 0] step:1521/10000 train_time:138036ms step_avg:90.75ms +[2025-08-22 18:12:04] [Rank 0] step:1521/10000 train_time:138036ms step_avg:90.75ms +[2025-08-22 18:12:06] [Rank 0] step:1541/10000 train_time:139863ms step_avg:90.76ms +[2025-08-22 18:12:06] [Rank 0] step:1541/10000 train_time:139863ms step_avg:90.76ms +[2025-08-22 18:12:08] [Rank 0] step:1561/10000 train_time:141693ms step_avg:90.77ms +[2025-08-22 18:12:08] [Rank 0] step:1561/10000 train_time:141693ms step_avg:90.77ms +[2025-08-22 18:12:10] [Rank 0] step:1581/10000 train_time:143566ms step_avg:90.81ms +[2025-08-22 18:12:10] [Rank 0] step:1581/10000 train_time:143566ms step_avg:90.81ms +[2025-08-22 18:12:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:12:11] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:12:25] [Rank 0] PRINT: step:1600/10000 val_loss:4.3590 svd_entropy: attn_qk:H=0.5036,top10E=0.65,eRank=53.4,q75/q25=61.91 attn_vo:H=0.6923,top10E=0.39,eRank=181.3,q75/q25=23.37 mlp_w1:H=0.9114,top10E=0.10,eRank=451.5,q75/q25=6.56 mlp_w2:H=0.8944,top10E=0.13,eRank=419.6,q75/q25=10.24 vo_prod:H=0.4605,top10E=0.69,eRank=57.7,q75/q25=260.39 train_time:145403ms step_avg:90.88ms +[2025-08-22 18:12:25] [Rank 0] PRINT: step:1600/10000 val_loss:4.3590 svd_entropy: attn_qk:H=0.5036,top10E=0.65,eRank=53.4,q75/q25=61.91 attn_vo:H=0.6923,top10E=0.39,eRank=181.3,q75/q25=23.37 mlp_w1:H=0.9114,top10E=0.10,eRank=451.5,q75/q25=6.56 mlp_w2:H=0.8944,top10E=0.13,eRank=419.6,q75/q25=10.24 vo_prod:H=0.4605,top10E=0.69,eRank=57.7,q75/q25=260.39 train_time:145403ms step_avg:90.88ms +[2025-08-22 18:12:25] [Rank 0] step:1601/10000 train_time:145423ms step_avg:90.83ms +[2025-08-22 18:12:25] [Rank 0] step:1601/10000 train_time:145423ms step_avg:90.83ms +[2025-08-22 18:12:27] [Rank 0] step:1621/10000 train_time:147248ms step_avg:90.84ms +[2025-08-22 18:12:27] [Rank 0] step:1621/10000 train_time:147248ms step_avg:90.84ms +[2025-08-22 18:12:29] [Rank 0] step:1641/10000 train_time:149069ms step_avg:90.84ms +[2025-08-22 18:12:29] [Rank 0] step:1641/10000 train_time:149069ms step_avg:90.84ms +[2025-08-22 18:12:31] [Rank 0] step:1661/10000 train_time:150890ms step_avg:90.84ms +[2025-08-22 18:12:31] [Rank 0] step:1661/10000 train_time:150890ms step_avg:90.84ms +[2025-08-22 18:12:32] [Rank 0] step:1681/10000 train_time:152715ms step_avg:90.85ms +[2025-08-22 18:12:32] [Rank 0] step:1681/10000 train_time:152715ms step_avg:90.85ms +[2025-08-22 18:12:34] [Rank 0] step:1701/10000 train_time:154539ms step_avg:90.85ms +[2025-08-22 18:12:34] [Rank 0] step:1701/10000 train_time:154539ms step_avg:90.85ms +[2025-08-22 18:12:36] [Rank 0] step:1721/10000 train_time:156363ms step_avg:90.86ms +[2025-08-22 18:12:36] [Rank 0] step:1721/10000 train_time:156363ms step_avg:90.86ms +[2025-08-22 18:12:38] [Rank 0] step:1741/10000 train_time:158188ms step_avg:90.86ms +[2025-08-22 18:12:38] [Rank 0] step:1741/10000 train_time:158188ms step_avg:90.86ms +[2025-08-22 18:12:40] [Rank 0] step:1761/10000 train_time:160014ms step_avg:90.87ms +[2025-08-22 18:12:40] [Rank 0] step:1761/10000 train_time:160014ms step_avg:90.87ms +[2025-08-22 18:12:42] [Rank 0] step:1781/10000 train_time:161841ms step_avg:90.87ms +[2025-08-22 18:12:42] [Rank 0] step:1781/10000 train_time:161841ms step_avg:90.87ms +[2025-08-22 18:12:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:12:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:12:57] [Rank 0] PRINT: step:1800/10000 val_loss:4.3031 svd_entropy: attn_qk:H=0.5104,top10E=0.64,eRank=54.7,q75/q25=63.16 attn_vo:H=0.6903,top10E=0.40,eRank=179.8,q75/q25=23.66 mlp_w1:H=0.9028,top10E=0.11,eRank=432.0,q75/q25=6.83 mlp_w2:H=0.8866,top10E=0.14,eRank=404.8,q75/q25=10.58 vo_prod:H=0.4655,top10E=0.69,eRank=59.1,q75/q25=265.94 train_time:163673ms step_avg:90.93ms +[2025-08-22 18:12:57] [Rank 0] PRINT: step:1800/10000 val_loss:4.3031 svd_entropy: attn_qk:H=0.5104,top10E=0.64,eRank=54.7,q75/q25=63.16 attn_vo:H=0.6903,top10E=0.40,eRank=179.8,q75/q25=23.66 mlp_w1:H=0.9028,top10E=0.11,eRank=432.0,q75/q25=6.83 mlp_w2:H=0.8866,top10E=0.14,eRank=404.8,q75/q25=10.58 vo_prod:H=0.4655,top10E=0.69,eRank=59.1,q75/q25=265.94 train_time:163673ms step_avg:90.93ms +[2025-08-22 18:12:57] [Rank 0] step:1801/10000 train_time:163693ms step_avg:90.89ms +[2025-08-22 18:12:57] [Rank 0] step:1801/10000 train_time:163693ms step_avg:90.89ms +[2025-08-22 18:12:59] [Rank 0] step:1821/10000 train_time:165513ms step_avg:90.89ms +[2025-08-22 18:12:59] [Rank 0] step:1821/10000 train_time:165513ms step_avg:90.89ms +[2025-08-22 18:13:01] [Rank 0] step:1841/10000 train_time:167384ms step_avg:90.92ms +[2025-08-22 18:13:01] [Rank 0] step:1841/10000 train_time:167384ms step_avg:90.92ms +[2025-08-22 18:13:03] [Rank 0] step:1861/10000 train_time:169205ms step_avg:90.92ms +[2025-08-22 18:13:03] [Rank 0] step:1861/10000 train_time:169205ms step_avg:90.92ms +[2025-08-22 18:13:04] [Rank 0] step:1881/10000 train_time:171028ms step_avg:90.92ms +[2025-08-22 18:13:04] [Rank 0] step:1881/10000 train_time:171028ms step_avg:90.92ms +[2025-08-22 18:13:06] [Rank 0] step:1901/10000 train_time:172852ms step_avg:90.93ms +[2025-08-22 18:13:06] [Rank 0] step:1901/10000 train_time:172852ms step_avg:90.93ms +[2025-08-22 18:13:08] [Rank 0] step:1921/10000 train_time:174676ms step_avg:90.93ms +[2025-08-22 18:13:08] [Rank 0] step:1921/10000 train_time:174676ms step_avg:90.93ms +[2025-08-22 18:13:10] [Rank 0] step:1941/10000 train_time:176499ms step_avg:90.93ms +[2025-08-22 18:13:10] [Rank 0] step:1941/10000 train_time:176499ms step_avg:90.93ms +[2025-08-22 18:13:12] [Rank 0] step:1961/10000 train_time:178324ms step_avg:90.94ms +[2025-08-22 18:13:12] [Rank 0] step:1961/10000 train_time:178324ms step_avg:90.94ms +[2025-08-22 18:13:13] [Rank 0] step:1981/10000 train_time:180149ms step_avg:90.94ms +[2025-08-22 18:13:13] [Rank 0] step:1981/10000 train_time:180149ms step_avg:90.94ms +[2025-08-22 18:13:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:13:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:13:29] [Rank 0] PRINT: step:2000/10000 val_loss:4.2749 svd_entropy: attn_qk:H=0.5135,top10E=0.64,eRank=55.5,q75/q25=64.73 attn_vo:H=0.6905,top10E=0.40,eRank=179.6,q75/q25=23.80 mlp_w1:H=0.9018,top10E=0.11,eRank=429.6,q75/q25=6.89 mlp_w2:H=0.8849,top10E=0.14,eRank=401.4,q75/q25=10.61 vo_prod:H=0.4687,top10E=0.69,eRank=59.7,q75/q25=265.99 train_time:181978ms step_avg:90.99ms +[2025-08-22 18:13:29] [Rank 0] PRINT: step:2000/10000 val_loss:4.2749 svd_entropy: attn_qk:H=0.5135,top10E=0.64,eRank=55.5,q75/q25=64.73 attn_vo:H=0.6905,top10E=0.40,eRank=179.6,q75/q25=23.80 mlp_w1:H=0.9018,top10E=0.11,eRank=429.6,q75/q25=6.89 mlp_w2:H=0.8849,top10E=0.14,eRank=401.4,q75/q25=10.61 vo_prod:H=0.4687,top10E=0.69,eRank=59.7,q75/q25=265.99 train_time:181978ms step_avg:90.99ms +[2025-08-22 18:13:29] [Rank 0] step:2001/10000 train_time:181999ms step_avg:90.95ms +[2025-08-22 18:13:29] [Rank 0] step:2001/10000 train_time:181999ms step_avg:90.95ms +[2025-08-22 18:13:31] [Rank 0] step:2021/10000 train_time:183809ms step_avg:90.95ms +[2025-08-22 18:13:31] [Rank 0] step:2021/10000 train_time:183809ms step_avg:90.95ms +[2025-08-22 18:13:33] [Rank 0] step:2041/10000 train_time:186293ms step_avg:91.28ms +[2025-08-22 18:13:33] [Rank 0] step:2041/10000 train_time:186293ms step_avg:91.28ms +[2025-08-22 18:13:35] [Rank 0] step:2061/10000 train_time:188113ms step_avg:91.27ms +[2025-08-22 18:13:35] [Rank 0] step:2061/10000 train_time:188113ms step_avg:91.27ms +[2025-08-22 18:13:37] [Rank 0] step:2081/10000 train_time:189936ms step_avg:91.27ms +[2025-08-22 18:13:37] [Rank 0] step:2081/10000 train_time:189936ms step_avg:91.27ms +[2025-08-22 18:13:39] [Rank 0] step:2101/10000 train_time:191759ms step_avg:91.27ms +[2025-08-22 18:13:39] [Rank 0] step:2101/10000 train_time:191759ms step_avg:91.27ms +[2025-08-22 18:13:41] [Rank 0] step:2121/10000 train_time:193585ms step_avg:91.27ms +[2025-08-22 18:13:41] [Rank 0] step:2121/10000 train_time:193585ms step_avg:91.27ms +[2025-08-22 18:13:42] [Rank 0] step:2141/10000 train_time:195411ms step_avg:91.27ms +[2025-08-22 18:13:42] [Rank 0] step:2141/10000 train_time:195411ms step_avg:91.27ms +[2025-08-22 18:13:44] [Rank 0] step:2161/10000 train_time:197238ms step_avg:91.27ms +[2025-08-22 18:13:44] [Rank 0] step:2161/10000 train_time:197238ms step_avg:91.27ms +[2025-08-22 18:13:46] [Rank 0] step:2181/10000 train_time:199077ms step_avg:91.28ms +[2025-08-22 18:13:46] [Rank 0] step:2181/10000 train_time:199077ms step_avg:91.28ms +[2025-08-22 18:13:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:13:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:14:01] [Rank 0] PRINT: step:2200/10000 val_loss:4.2325 svd_entropy: attn_qk:H=0.5097,top10E=0.64,eRank=55.6,q75/q25=65.85 attn_vo:H=0.6903,top10E=0.40,eRank=179.9,q75/q25=23.96 mlp_w1:H=0.9010,top10E=0.12,eRank=427.6,q75/q25=6.92 mlp_w2:H=0.8833,top10E=0.14,eRank=398.6,q75/q25=10.74 vo_prod:H=0.4760,top10E=0.68,eRank=61.1,q75/q25=267.26 train_time:200909ms step_avg:91.32ms +[2025-08-22 18:14:01] [Rank 0] PRINT: step:2200/10000 val_loss:4.2325 svd_entropy: attn_qk:H=0.5097,top10E=0.64,eRank=55.6,q75/q25=65.85 attn_vo:H=0.6903,top10E=0.40,eRank=179.9,q75/q25=23.96 mlp_w1:H=0.9010,top10E=0.12,eRank=427.6,q75/q25=6.92 mlp_w2:H=0.8833,top10E=0.14,eRank=398.6,q75/q25=10.74 vo_prod:H=0.4760,top10E=0.68,eRank=61.1,q75/q25=267.26 train_time:200909ms step_avg:91.32ms +[2025-08-22 18:14:02] [Rank 0] step:2201/10000 train_time:200930ms step_avg:91.29ms +[2025-08-22 18:14:02] [Rank 0] step:2201/10000 train_time:200930ms step_avg:91.29ms +[2025-08-22 18:14:04] [Rank 0] step:2221/10000 train_time:202863ms step_avg:91.34ms +[2025-08-22 18:14:04] [Rank 0] step:2221/10000 train_time:202863ms step_avg:91.34ms +[2025-08-22 18:14:05] [Rank 0] step:2241/10000 train_time:204725ms step_avg:91.35ms +[2025-08-22 18:14:05] [Rank 0] step:2241/10000 train_time:204725ms step_avg:91.35ms +[2025-08-22 18:14:07] [Rank 0] step:2261/10000 train_time:206594ms step_avg:91.37ms +[2025-08-22 18:14:07] [Rank 0] step:2261/10000 train_time:206594ms step_avg:91.37ms +[2025-08-22 18:14:09] [Rank 0] step:2281/10000 train_time:208467ms step_avg:91.39ms +[2025-08-22 18:14:09] [Rank 0] step:2281/10000 train_time:208467ms step_avg:91.39ms +[2025-08-22 18:14:11] [Rank 0] step:2301/10000 train_time:210337ms step_avg:91.41ms +[2025-08-22 18:14:11] [Rank 0] step:2301/10000 train_time:210337ms step_avg:91.41ms +[2025-08-22 18:14:13] [Rank 0] step:2321/10000 train_time:212210ms step_avg:91.43ms +[2025-08-22 18:14:13] [Rank 0] step:2321/10000 train_time:212210ms step_avg:91.43ms +[2025-08-22 18:14:15] [Rank 0] step:2341/10000 train_time:214085ms step_avg:91.45ms +[2025-08-22 18:14:15] [Rank 0] step:2341/10000 train_time:214085ms step_avg:91.45ms +[2025-08-22 18:14:17] [Rank 0] step:2361/10000 train_time:215961ms step_avg:91.47ms +[2025-08-22 18:14:17] [Rank 0] step:2361/10000 train_time:215961ms step_avg:91.47ms +[2025-08-22 18:14:18] [Rank 0] step:2381/10000 train_time:217839ms step_avg:91.49ms +[2025-08-22 18:14:18] [Rank 0] step:2381/10000 train_time:217839ms step_avg:91.49ms +[2025-08-22 18:14:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:14:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:14:34] [Rank 0] PRINT: step:2400/10000 val_loss:4.1750 svd_entropy: attn_qk:H=0.5140,top10E=0.64,eRank=56.4,q75/q25=62.45 attn_vo:H=0.6903,top10E=0.40,eRank=179.0,q75/q25=24.09 mlp_w1:H=0.8987,top10E=0.12,eRank=423.0,q75/q25=6.97 mlp_w2:H=0.8808,top10E=0.15,eRank=394.3,q75/q25=10.77 vo_prod:H=0.4735,top10E=0.69,eRank=60.3,q75/q25=265.88 train_time:219723ms step_avg:91.55ms +[2025-08-22 18:14:34] [Rank 0] PRINT: step:2400/10000 val_loss:4.1750 svd_entropy: attn_qk:H=0.5140,top10E=0.64,eRank=56.4,q75/q25=62.45 attn_vo:H=0.6903,top10E=0.40,eRank=179.0,q75/q25=24.09 mlp_w1:H=0.8987,top10E=0.12,eRank=423.0,q75/q25=6.97 mlp_w2:H=0.8808,top10E=0.15,eRank=394.3,q75/q25=10.77 vo_prod:H=0.4735,top10E=0.69,eRank=60.3,q75/q25=265.88 train_time:219723ms step_avg:91.55ms +[2025-08-22 18:14:34] [Rank 0] step:2401/10000 train_time:219743ms step_avg:91.52ms +[2025-08-22 18:14:34] [Rank 0] step:2401/10000 train_time:219743ms step_avg:91.52ms +[2025-08-22 18:14:36] [Rank 0] step:2421/10000 train_time:221594ms step_avg:91.53ms +[2025-08-22 18:14:36] [Rank 0] step:2421/10000 train_time:221594ms step_avg:91.53ms +[2025-08-22 18:14:38] [Rank 0] step:2441/10000 train_time:223459ms step_avg:91.54ms +[2025-08-22 18:14:38] [Rank 0] step:2441/10000 train_time:223459ms step_avg:91.54ms +[2025-08-22 18:14:40] [Rank 0] step:2461/10000 train_time:225327ms step_avg:91.56ms +[2025-08-22 18:14:40] [Rank 0] step:2461/10000 train_time:225327ms step_avg:91.56ms +[2025-08-22 18:14:42] [Rank 0] step:2481/10000 train_time:227194ms step_avg:91.57ms +[2025-08-22 18:14:42] [Rank 0] step:2481/10000 train_time:227194ms step_avg:91.57ms +[2025-08-22 18:14:43] [Rank 0] step:2501/10000 train_time:229062ms step_avg:91.59ms +[2025-08-22 18:14:43] [Rank 0] step:2501/10000 train_time:229062ms step_avg:91.59ms +[2025-08-22 18:14:45] [Rank 0] step:2521/10000 train_time:230932ms step_avg:91.60ms +[2025-08-22 18:14:45] [Rank 0] step:2521/10000 train_time:230932ms step_avg:91.60ms +[2025-08-22 18:14:47] [Rank 0] step:2541/10000 train_time:232800ms step_avg:91.62ms +[2025-08-22 18:14:47] [Rank 0] step:2541/10000 train_time:232800ms step_avg:91.62ms +[2025-08-22 18:14:49] [Rank 0] step:2561/10000 train_time:234669ms step_avg:91.63ms +[2025-08-22 18:14:49] [Rank 0] step:2561/10000 train_time:234669ms step_avg:91.63ms +[2025-08-22 18:14:51] [Rank 0] step:2581/10000 train_time:236539ms step_avg:91.65ms +[2025-08-22 18:14:51] [Rank 0] step:2581/10000 train_time:236539ms step_avg:91.65ms +[2025-08-22 18:14:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:14:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:15:06] [Rank 0] PRINT: step:2600/10000 val_loss:4.1470 svd_entropy: attn_qk:H=0.5182,top10E=0.63,eRank=57.2,q75/q25=63.07 attn_vo:H=0.6903,top10E=0.40,eRank=178.6,q75/q25=24.19 mlp_w1:H=0.8957,top10E=0.12,eRank=417.3,q75/q25=7.06 mlp_w2:H=0.8788,top10E=0.15,eRank=390.8,q75/q25=10.80 vo_prod:H=0.4751,top10E=0.69,eRank=60.8,q75/q25=266.25 train_time:238415ms step_avg:91.70ms +[2025-08-22 18:15:06] [Rank 0] PRINT: step:2600/10000 val_loss:4.1470 svd_entropy: attn_qk:H=0.5182,top10E=0.63,eRank=57.2,q75/q25=63.07 attn_vo:H=0.6903,top10E=0.40,eRank=178.6,q75/q25=24.19 mlp_w1:H=0.8957,top10E=0.12,eRank=417.3,q75/q25=7.06 mlp_w2:H=0.8788,top10E=0.15,eRank=390.8,q75/q25=10.80 vo_prod:H=0.4751,top10E=0.69,eRank=60.8,q75/q25=266.25 train_time:238415ms step_avg:91.70ms +[2025-08-22 18:15:06] [Rank 0] step:2601/10000 train_time:238436ms step_avg:91.67ms +[2025-08-22 18:15:06] [Rank 0] step:2601/10000 train_time:238436ms step_avg:91.67ms +[2025-08-22 18:15:08] [Rank 0] step:2621/10000 train_time:240302ms step_avg:91.68ms +[2025-08-22 18:15:08] [Rank 0] step:2621/10000 train_time:240302ms step_avg:91.68ms +[2025-08-22 18:15:10] [Rank 0] step:2641/10000 train_time:242165ms step_avg:91.69ms +[2025-08-22 18:15:10] [Rank 0] step:2641/10000 train_time:242165ms step_avg:91.69ms +[2025-08-22 18:15:12] [Rank 0] step:2661/10000 train_time:244029ms step_avg:91.71ms +[2025-08-22 18:15:12] [Rank 0] step:2661/10000 train_time:244029ms step_avg:91.71ms +[2025-08-22 18:15:14] [Rank 0] step:2681/10000 train_time:245896ms step_avg:91.72ms +[2025-08-22 18:15:14] [Rank 0] step:2681/10000 train_time:245896ms step_avg:91.72ms +[2025-08-22 18:15:16] [Rank 0] step:2701/10000 train_time:247763ms step_avg:91.73ms +[2025-08-22 18:15:16] [Rank 0] step:2701/10000 train_time:247763ms step_avg:91.73ms +[2025-08-22 18:15:18] [Rank 0] step:2721/10000 train_time:249630ms step_avg:91.74ms +[2025-08-22 18:15:18] [Rank 0] step:2721/10000 train_time:249630ms step_avg:91.74ms +[2025-08-22 18:15:19] [Rank 0] step:2741/10000 train_time:251496ms step_avg:91.75ms +[2025-08-22 18:15:19] [Rank 0] step:2741/10000 train_time:251496ms step_avg:91.75ms +[2025-08-22 18:15:21] [Rank 0] step:2761/10000 train_time:253365ms step_avg:91.77ms +[2025-08-22 18:15:21] [Rank 0] step:2761/10000 train_time:253365ms step_avg:91.77ms +[2025-08-22 18:15:23] [Rank 0] step:2781/10000 train_time:255235ms step_avg:91.78ms +[2025-08-22 18:15:23] [Rank 0] step:2781/10000 train_time:255235ms step_avg:91.78ms +[2025-08-22 18:15:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:15:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:15:39] [Rank 0] PRINT: step:2800/10000 val_loss:4.1315 svd_entropy: attn_qk:H=0.5239,top10E=0.63,eRank=58.1,q75/q25=57.51 attn_vo:H=0.6932,top10E=0.39,eRank=180.2,q75/q25=23.99 mlp_w1:H=0.8938,top10E=0.13,eRank=413.5,q75/q25=7.14 mlp_w2:H=0.8774,top10E=0.15,eRank=387.8,q75/q25=10.87 vo_prod:H=0.4765,top10E=0.68,eRank=61.1,q75/q25=260.75 train_time:257111ms step_avg:91.83ms +[2025-08-22 18:15:39] [Rank 0] PRINT: step:2800/10000 val_loss:4.1315 svd_entropy: attn_qk:H=0.5239,top10E=0.63,eRank=58.1,q75/q25=57.51 attn_vo:H=0.6932,top10E=0.39,eRank=180.2,q75/q25=23.99 mlp_w1:H=0.8938,top10E=0.13,eRank=413.5,q75/q25=7.14 mlp_w2:H=0.8774,top10E=0.15,eRank=387.8,q75/q25=10.87 vo_prod:H=0.4765,top10E=0.68,eRank=61.1,q75/q25=260.75 train_time:257111ms step_avg:91.83ms +[2025-08-22 18:15:39] [Rank 0] step:2801/10000 train_time:257131ms step_avg:91.80ms +[2025-08-22 18:15:39] [Rank 0] step:2801/10000 train_time:257131ms step_avg:91.80ms +[2025-08-22 18:15:41] [Rank 0] step:2821/10000 train_time:258986ms step_avg:91.81ms +[2025-08-22 18:15:41] [Rank 0] step:2821/10000 train_time:258986ms step_avg:91.81ms +[2025-08-22 18:15:43] [Rank 0] step:2841/10000 train_time:260850ms step_avg:91.82ms +[2025-08-22 18:15:43] [Rank 0] step:2841/10000 train_time:260850ms step_avg:91.82ms +[2025-08-22 18:15:45] [Rank 0] step:2861/10000 train_time:262718ms step_avg:91.83ms +[2025-08-22 18:15:45] [Rank 0] step:2861/10000 train_time:262718ms step_avg:91.83ms +[2025-08-22 18:15:46] [Rank 0] step:2881/10000 train_time:264586ms step_avg:91.84ms +[2025-08-22 18:15:46] [Rank 0] step:2881/10000 train_time:264586ms step_avg:91.84ms +[2025-08-22 18:15:48] [Rank 0] step:2901/10000 train_time:266453ms step_avg:91.85ms +[2025-08-22 18:15:48] [Rank 0] step:2901/10000 train_time:266453ms step_avg:91.85ms +[2025-08-22 18:15:50] [Rank 0] step:2921/10000 train_time:268323ms step_avg:91.86ms +[2025-08-22 18:15:50] [Rank 0] step:2921/10000 train_time:268323ms step_avg:91.86ms +[2025-08-22 18:15:52] [Rank 0] step:2941/10000 train_time:270193ms step_avg:91.87ms +[2025-08-22 18:15:52] [Rank 0] step:2941/10000 train_time:270193ms step_avg:91.87ms +[2025-08-22 18:15:54] [Rank 0] step:2961/10000 train_time:272062ms step_avg:91.88ms +[2025-08-22 18:15:54] [Rank 0] step:2961/10000 train_time:272062ms step_avg:91.88ms +[2025-08-22 18:15:56] [Rank 0] step:2981/10000 train_time:273939ms step_avg:91.89ms +[2025-08-22 18:15:56] [Rank 0] step:2981/10000 train_time:273939ms step_avg:91.89ms +[2025-08-22 18:15:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:15:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:16:11] [Rank 0] PRINT: step:3000/10000 val_loss:4.1009 svd_entropy: attn_qk:H=0.5250,top10E=0.63,eRank=58.4,q75/q25=57.58 attn_vo:H=0.6961,top10E=0.39,eRank=183.3,q75/q25=25.35 mlp_w1:H=0.8902,top10E=0.13,eRank=406.1,q75/q25=7.28 mlp_w2:H=0.8736,top10E=0.16,eRank=380.8,q75/q25=11.02 vo_prod:H=0.4801,top10E=0.68,eRank=62.4,q75/q25=290.67 train_time:275823ms step_avg:91.94ms +[2025-08-22 18:16:11] [Rank 0] PRINT: step:3000/10000 val_loss:4.1009 svd_entropy: attn_qk:H=0.5250,top10E=0.63,eRank=58.4,q75/q25=57.58 attn_vo:H=0.6961,top10E=0.39,eRank=183.3,q75/q25=25.35 mlp_w1:H=0.8902,top10E=0.13,eRank=406.1,q75/q25=7.28 mlp_w2:H=0.8736,top10E=0.16,eRank=380.8,q75/q25=11.02 vo_prod:H=0.4801,top10E=0.68,eRank=62.4,q75/q25=290.67 train_time:275823ms step_avg:91.94ms +[2025-08-22 18:16:11] [Rank 0] step:3001/10000 train_time:275843ms step_avg:91.92ms +[2025-08-22 18:16:11] [Rank 0] step:3001/10000 train_time:275843ms step_avg:91.92ms +[2025-08-22 18:16:13] [Rank 0] step:3021/10000 train_time:277715ms step_avg:91.93ms +[2025-08-22 18:16:13] [Rank 0] step:3021/10000 train_time:277715ms step_avg:91.93ms +[2025-08-22 18:16:15] [Rank 0] step:3041/10000 train_time:279593ms step_avg:91.94ms +[2025-08-22 18:16:15] [Rank 0] step:3041/10000 train_time:279593ms step_avg:91.94ms +[2025-08-22 18:16:17] [Rank 0] step:3061/10000 train_time:281469ms step_avg:91.95ms +[2025-08-22 18:16:17] [Rank 0] step:3061/10000 train_time:281469ms step_avg:91.95ms +[2025-08-22 18:16:19] [Rank 0] step:3081/10000 train_time:283347ms step_avg:91.97ms +[2025-08-22 18:16:19] [Rank 0] step:3081/10000 train_time:283347ms step_avg:91.97ms +[2025-08-22 18:16:21] [Rank 0] step:3101/10000 train_time:285225ms step_avg:91.98ms +[2025-08-22 18:16:21] [Rank 0] step:3101/10000 train_time:285225ms step_avg:91.98ms +[2025-08-22 18:16:23] [Rank 0] step:3121/10000 train_time:287104ms step_avg:91.99ms +[2025-08-22 18:16:23] [Rank 0] step:3121/10000 train_time:287104ms step_avg:91.99ms +[2025-08-22 18:16:25] [Rank 0] step:3141/10000 train_time:288984ms step_avg:92.00ms +[2025-08-22 18:16:25] [Rank 0] step:3141/10000 train_time:288984ms step_avg:92.00ms +[2025-08-22 18:16:27] [Rank 0] step:3161/10000 train_time:290867ms step_avg:92.02ms +[2025-08-22 18:16:27] [Rank 0] step:3161/10000 train_time:290867ms step_avg:92.02ms +[2025-08-22 18:16:28] [Rank 0] step:3181/10000 train_time:292749ms step_avg:92.03ms +[2025-08-22 18:16:28] [Rank 0] step:3181/10000 train_time:292749ms step_avg:92.03ms +[2025-08-22 18:16:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:16:30] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:16:44] [Rank 0] PRINT: step:3200/10000 val_loss:4.0806 svd_entropy: attn_qk:H=0.5255,top10E=0.62,eRank=59.0,q75/q25=57.55 attn_vo:H=0.6962,top10E=0.39,eRank=183.6,q75/q25=25.31 mlp_w1:H=0.8890,top10E=0.13,eRank=403.5,q75/q25=7.35 mlp_w2:H=0.8724,top10E=0.16,eRank=378.6,q75/q25=11.10 vo_prod:H=0.4844,top10E=0.67,eRank=63.2,q75/q25=291.13 train_time:294639ms step_avg:92.07ms +[2025-08-22 18:16:44] [Rank 0] PRINT: step:3200/10000 val_loss:4.0806 svd_entropy: attn_qk:H=0.5255,top10E=0.62,eRank=59.0,q75/q25=57.55 attn_vo:H=0.6962,top10E=0.39,eRank=183.6,q75/q25=25.31 mlp_w1:H=0.8890,top10E=0.13,eRank=403.5,q75/q25=7.35 mlp_w2:H=0.8724,top10E=0.16,eRank=378.6,q75/q25=11.10 vo_prod:H=0.4844,top10E=0.67,eRank=63.2,q75/q25=291.13 train_time:294639ms step_avg:92.07ms +[2025-08-22 18:16:44] [Rank 0] step:3201/10000 train_time:294659ms step_avg:92.05ms +[2025-08-22 18:16:44] [Rank 0] step:3201/10000 train_time:294659ms step_avg:92.05ms +[2025-08-22 18:16:46] [Rank 0] step:3221/10000 train_time:296530ms step_avg:92.06ms +[2025-08-22 18:16:46] [Rank 0] step:3221/10000 train_time:296530ms step_avg:92.06ms +[2025-08-22 18:16:48] [Rank 0] step:3241/10000 train_time:298403ms step_avg:92.07ms +[2025-08-22 18:16:48] [Rank 0] step:3241/10000 train_time:298403ms step_avg:92.07ms +[2025-08-22 18:16:50] [Rank 0] step:3261/10000 train_time:300279ms step_avg:92.08ms +[2025-08-22 18:16:50] [Rank 0] step:3261/10000 train_time:300279ms step_avg:92.08ms +[2025-08-22 18:16:52] [Rank 0] step:3281/10000 train_time:302157ms step_avg:92.09ms +[2025-08-22 18:16:52] [Rank 0] step:3281/10000 train_time:302157ms step_avg:92.09ms +[2025-08-22 18:16:54] [Rank 0] step:3301/10000 train_time:304035ms step_avg:92.10ms +[2025-08-22 18:16:54] [Rank 0] step:3301/10000 train_time:304035ms step_avg:92.10ms +[2025-08-22 18:16:55] [Rank 0] step:3321/10000 train_time:305914ms step_avg:92.11ms +[2025-08-22 18:16:55] [Rank 0] step:3321/10000 train_time:305914ms step_avg:92.11ms +[2025-08-22 18:16:57] [Rank 0] step:3341/10000 train_time:307791ms step_avg:92.13ms +[2025-08-22 18:16:57] [Rank 0] step:3341/10000 train_time:307791ms step_avg:92.13ms +[2025-08-22 18:16:59] [Rank 0] step:3361/10000 train_time:309671ms step_avg:92.14ms +[2025-08-22 18:16:59] [Rank 0] step:3361/10000 train_time:309671ms step_avg:92.14ms +[2025-08-22 18:17:01] [Rank 0] step:3381/10000 train_time:311549ms step_avg:92.15ms +[2025-08-22 18:17:01] [Rank 0] step:3381/10000 train_time:311549ms step_avg:92.15ms +[2025-08-22 18:17:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:17:03] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:17:17] [Rank 0] PRINT: step:3400/10000 val_loss:4.0642 svd_entropy: attn_qk:H=0.5286,top10E=0.62,eRank=59.5,q75/q25=56.40 attn_vo:H=0.6953,top10E=0.39,eRank=182.3,q75/q25=25.18 mlp_w1:H=0.8863,top10E=0.14,eRank=399.3,q75/q25=7.45 mlp_w2:H=0.8695,top10E=0.16,eRank=374.3,q75/q25=11.27 vo_prod:H=0.4873,top10E=0.67,eRank=63.7,q75/q25=289.79 train_time:313433ms step_avg:92.19ms +[2025-08-22 18:17:17] [Rank 0] PRINT: step:3400/10000 val_loss:4.0642 svd_entropy: attn_qk:H=0.5286,top10E=0.62,eRank=59.5,q75/q25=56.40 attn_vo:H=0.6953,top10E=0.39,eRank=182.3,q75/q25=25.18 mlp_w1:H=0.8863,top10E=0.14,eRank=399.3,q75/q25=7.45 mlp_w2:H=0.8695,top10E=0.16,eRank=374.3,q75/q25=11.27 vo_prod:H=0.4873,top10E=0.67,eRank=63.7,q75/q25=289.79 train_time:313433ms step_avg:92.19ms +[2025-08-22 18:17:17] [Rank 0] step:3401/10000 train_time:313453ms step_avg:92.16ms +[2025-08-22 18:17:17] [Rank 0] step:3401/10000 train_time:313453ms step_avg:92.16ms +[2025-08-22 18:17:19] [Rank 0] step:3421/10000 train_time:315322ms step_avg:92.17ms +[2025-08-22 18:17:19] [Rank 0] step:3421/10000 train_time:315322ms step_avg:92.17ms +[2025-08-22 18:17:21] [Rank 0] step:3441/10000 train_time:317195ms step_avg:92.18ms +[2025-08-22 18:17:21] [Rank 0] step:3441/10000 train_time:317195ms step_avg:92.18ms +[2025-08-22 18:17:23] [Rank 0] step:3461/10000 train_time:319070ms step_avg:92.19ms +[2025-08-22 18:17:23] [Rank 0] step:3461/10000 train_time:319070ms step_avg:92.19ms +[2025-08-22 18:17:24] [Rank 0] step:3481/10000 train_time:320943ms step_avg:92.20ms +[2025-08-22 18:17:24] [Rank 0] step:3481/10000 train_time:320943ms step_avg:92.20ms +[2025-08-22 18:17:26] [Rank 0] step:3501/10000 train_time:322820ms step_avg:92.21ms +[2025-08-22 18:17:26] [Rank 0] step:3501/10000 train_time:322820ms step_avg:92.21ms +[2025-08-22 18:17:28] [Rank 0] step:3521/10000 train_time:324697ms step_avg:92.22ms +[2025-08-22 18:17:28] [Rank 0] step:3521/10000 train_time:324697ms step_avg:92.22ms +[2025-08-22 18:17:30] [Rank 0] step:3541/10000 train_time:326573ms step_avg:92.23ms +[2025-08-22 18:17:30] [Rank 0] step:3541/10000 train_time:326573ms step_avg:92.23ms +[2025-08-22 18:17:32] [Rank 0] step:3561/10000 train_time:328449ms step_avg:92.23ms +[2025-08-22 18:17:32] [Rank 0] step:3561/10000 train_time:328449ms step_avg:92.23ms +[2025-08-22 18:17:34] [Rank 0] step:3581/10000 train_time:330326ms step_avg:92.24ms +[2025-08-22 18:17:34] [Rank 0] step:3581/10000 train_time:330326ms step_avg:92.24ms +[2025-08-22 18:17:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:17:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:17:49] [Rank 0] PRINT: step:3600/10000 val_loss:4.0584 svd_entropy: attn_qk:H=0.5324,top10E=0.62,eRank=60.3,q75/q25=56.75 attn_vo:H=0.6957,top10E=0.39,eRank=182.5,q75/q25=24.97 mlp_w1:H=0.8846,top10E=0.14,eRank=396.3,q75/q25=7.54 mlp_w2:H=0.8692,top10E=0.16,eRank=373.2,q75/q25=11.45 vo_prod:H=0.4896,top10E=0.67,eRank=63.9,q75/q25=282.64 train_time:332211ms step_avg:92.28ms +[2025-08-22 18:17:49] [Rank 0] PRINT: step:3600/10000 val_loss:4.0584 svd_entropy: attn_qk:H=0.5324,top10E=0.62,eRank=60.3,q75/q25=56.75 attn_vo:H=0.6957,top10E=0.39,eRank=182.5,q75/q25=24.97 mlp_w1:H=0.8846,top10E=0.14,eRank=396.3,q75/q25=7.54 mlp_w2:H=0.8692,top10E=0.16,eRank=373.2,q75/q25=11.45 vo_prod:H=0.4896,top10E=0.67,eRank=63.9,q75/q25=282.64 train_time:332211ms step_avg:92.28ms +[2025-08-22 18:17:50] [Rank 0] step:3601/10000 train_time:332231ms step_avg:92.26ms +[2025-08-22 18:17:50] [Rank 0] step:3601/10000 train_time:332231ms step_avg:92.26ms +[2025-08-22 18:17:51] [Rank 0] step:3621/10000 train_time:334105ms step_avg:92.27ms +[2025-08-22 18:17:51] [Rank 0] step:3621/10000 train_time:334105ms step_avg:92.27ms +[2025-08-22 18:17:53] [Rank 0] step:3641/10000 train_time:335978ms step_avg:92.28ms +[2025-08-22 18:17:53] [Rank 0] step:3641/10000 train_time:335978ms step_avg:92.28ms +[2025-08-22 18:17:55] [Rank 0] step:3661/10000 train_time:337850ms step_avg:92.28ms +[2025-08-22 18:17:55] [Rank 0] step:3661/10000 train_time:337850ms step_avg:92.28ms +[2025-08-22 18:17:57] [Rank 0] step:3681/10000 train_time:339727ms step_avg:92.29ms +[2025-08-22 18:17:57] [Rank 0] step:3681/10000 train_time:339727ms step_avg:92.29ms +[2025-08-22 18:17:59] [Rank 0] step:3701/10000 train_time:341603ms step_avg:92.30ms +[2025-08-22 18:17:59] [Rank 0] step:3701/10000 train_time:341603ms step_avg:92.30ms +[2025-08-22 18:18:01] [Rank 0] step:3721/10000 train_time:343509ms step_avg:92.32ms +[2025-08-22 18:18:01] [Rank 0] step:3721/10000 train_time:343509ms step_avg:92.32ms +[2025-08-22 18:18:03] [Rank 0] step:3741/10000 train_time:345422ms step_avg:92.33ms +[2025-08-22 18:18:03] [Rank 0] step:3741/10000 train_time:345422ms step_avg:92.33ms +[2025-08-22 18:18:05] [Rank 0] step:3761/10000 train_time:347337ms step_avg:92.35ms +[2025-08-22 18:18:05] [Rank 0] step:3761/10000 train_time:347337ms step_avg:92.35ms +[2025-08-22 18:18:07] [Rank 0] step:3781/10000 train_time:349255ms step_avg:92.37ms +[2025-08-22 18:18:07] [Rank 0] step:3781/10000 train_time:349255ms step_avg:92.37ms +[2025-08-22 18:18:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:18:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:18:22] [Rank 0] PRINT: step:3800/10000 val_loss:4.0267 svd_entropy: attn_qk:H=0.5355,top10E=0.61,eRank=60.8,q75/q25=55.99 attn_vo:H=0.6938,top10E=0.39,eRank=181.0,q75/q25=25.01 mlp_w1:H=0.8833,top10E=0.14,eRank=394.1,q75/q25=7.58 mlp_w2:H=0.8681,top10E=0.16,eRank=371.1,q75/q25=11.56 vo_prod:H=0.4857,top10E=0.67,eRank=63.4,q75/q25=284.56 train_time:351179ms step_avg:92.42ms +[2025-08-22 18:18:22] [Rank 0] PRINT: step:3800/10000 val_loss:4.0267 svd_entropy: attn_qk:H=0.5355,top10E=0.61,eRank=60.8,q75/q25=55.99 attn_vo:H=0.6938,top10E=0.39,eRank=181.0,q75/q25=25.01 mlp_w1:H=0.8833,top10E=0.14,eRank=394.1,q75/q25=7.58 mlp_w2:H=0.8681,top10E=0.16,eRank=371.1,q75/q25=11.56 vo_prod:H=0.4857,top10E=0.67,eRank=63.4,q75/q25=284.56 train_time:351179ms step_avg:92.42ms +[2025-08-22 18:18:22] [Rank 0] step:3801/10000 train_time:351199ms step_avg:92.40ms +[2025-08-22 18:18:22] [Rank 0] step:3801/10000 train_time:351199ms step_avg:92.40ms +[2025-08-22 18:18:24] [Rank 0] step:3821/10000 train_time:353121ms step_avg:92.42ms +[2025-08-22 18:18:24] [Rank 0] step:3821/10000 train_time:353121ms step_avg:92.42ms +[2025-08-22 18:18:26] [Rank 0] step:3841/10000 train_time:355038ms step_avg:92.43ms +[2025-08-22 18:18:26] [Rank 0] step:3841/10000 train_time:355038ms step_avg:92.43ms +[2025-08-22 18:18:28] [Rank 0] step:3861/10000 train_time:356956ms step_avg:92.45ms +[2025-08-22 18:18:28] [Rank 0] step:3861/10000 train_time:356956ms step_avg:92.45ms +[2025-08-22 18:18:30] [Rank 0] step:3881/10000 train_time:358872ms step_avg:92.47ms +[2025-08-22 18:18:30] [Rank 0] step:3881/10000 train_time:358872ms step_avg:92.47ms +[2025-08-22 18:18:32] [Rank 0] step:3901/10000 train_time:360789ms step_avg:92.49ms +[2025-08-22 18:18:32] [Rank 0] step:3901/10000 train_time:360789ms step_avg:92.49ms +[2025-08-22 18:18:34] [Rank 0] step:3921/10000 train_time:362707ms step_avg:92.50ms +[2025-08-22 18:18:34] [Rank 0] step:3921/10000 train_time:362707ms step_avg:92.50ms +[2025-08-22 18:18:36] [Rank 0] step:3941/10000 train_time:364628ms step_avg:92.52ms +[2025-08-22 18:18:36] [Rank 0] step:3941/10000 train_time:364628ms step_avg:92.52ms +[2025-08-22 18:18:38] [Rank 0] step:3961/10000 train_time:366545ms step_avg:92.54ms +[2025-08-22 18:18:38] [Rank 0] step:3961/10000 train_time:366545ms step_avg:92.54ms +[2025-08-22 18:18:40] [Rank 0] step:3981/10000 train_time:368464ms step_avg:92.56ms +[2025-08-22 18:18:40] [Rank 0] step:3981/10000 train_time:368464ms step_avg:92.56ms +[2025-08-22 18:18:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:18:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:18:55] [Rank 0] PRINT: step:4000/10000 val_loss:4.0383 svd_entropy: attn_qk:H=0.5370,top10E=0.61,eRank=61.0,q75/q25=55.29 attn_vo:H=0.6929,top10E=0.39,eRank=179.8,q75/q25=24.84 mlp_w1:H=0.8804,top10E=0.14,eRank=389.1,q75/q25=7.64 mlp_w2:H=0.8660,top10E=0.16,eRank=367.8,q75/q25=11.61 vo_prod:H=0.4855,top10E=0.67,eRank=63.3,q75/q25=282.77 train_time:370390ms step_avg:92.60ms +[2025-08-22 18:18:55] [Rank 0] PRINT: step:4000/10000 val_loss:4.0383 svd_entropy: attn_qk:H=0.5370,top10E=0.61,eRank=61.0,q75/q25=55.29 attn_vo:H=0.6929,top10E=0.39,eRank=179.8,q75/q25=24.84 mlp_w1:H=0.8804,top10E=0.14,eRank=389.1,q75/q25=7.64 mlp_w2:H=0.8660,top10E=0.16,eRank=367.8,q75/q25=11.61 vo_prod:H=0.4855,top10E=0.67,eRank=63.3,q75/q25=282.77 train_time:370390ms step_avg:92.60ms +[2025-08-22 18:18:56] [Rank 0] step:4001/10000 train_time:370411ms step_avg:92.58ms +[2025-08-22 18:18:56] [Rank 0] step:4001/10000 train_time:370411ms step_avg:92.58ms +[2025-08-22 18:18:58] [Rank 0] step:4021/10000 train_time:372322ms step_avg:92.59ms +[2025-08-22 18:18:58] [Rank 0] step:4021/10000 train_time:372322ms step_avg:92.59ms +[2025-08-22 18:18:59] [Rank 0] step:4041/10000 train_time:374235ms step_avg:92.61ms +[2025-08-22 18:18:59] [Rank 0] step:4041/10000 train_time:374235ms step_avg:92.61ms +[2025-08-22 18:19:01] [Rank 0] step:4061/10000 train_time:376145ms step_avg:92.62ms +[2025-08-22 18:19:01] [Rank 0] step:4061/10000 train_time:376145ms step_avg:92.62ms +[2025-08-22 18:19:04] [Rank 0] step:4081/10000 train_time:378722ms step_avg:92.80ms +[2025-08-22 18:19:04] [Rank 0] step:4081/10000 train_time:378722ms step_avg:92.80ms +[2025-08-22 18:19:06] [Rank 0] step:4101/10000 train_time:380635ms step_avg:92.82ms +[2025-08-22 18:19:06] [Rank 0] step:4101/10000 train_time:380635ms step_avg:92.82ms +[2025-08-22 18:19:08] [Rank 0] step:4121/10000 train_time:382546ms step_avg:92.83ms +[2025-08-22 18:19:08] [Rank 0] step:4121/10000 train_time:382546ms step_avg:92.83ms +[2025-08-22 18:19:10] [Rank 0] step:4141/10000 train_time:384459ms step_avg:92.84ms +[2025-08-22 18:19:10] [Rank 0] step:4141/10000 train_time:384459ms step_avg:92.84ms +[2025-08-22 18:19:12] [Rank 0] step:4161/10000 train_time:386371ms step_avg:92.86ms +[2025-08-22 18:19:12] [Rank 0] step:4161/10000 train_time:386371ms step_avg:92.86ms +[2025-08-22 18:19:13] [Rank 0] step:4181/10000 train_time:388286ms step_avg:92.87ms +[2025-08-22 18:19:13] [Rank 0] step:4181/10000 train_time:388286ms step_avg:92.87ms +[2025-08-22 18:19:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:19:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:19:29] [Rank 0] PRINT: step:4200/10000 val_loss:4.0085 svd_entropy: attn_qk:H=0.5384,top10E=0.61,eRank=61.4,q75/q25=55.66 attn_vo:H=0.6923,top10E=0.39,eRank=179.3,q75/q25=24.88 mlp_w1:H=0.8784,top10E=0.15,eRank=386.1,q75/q25=7.71 mlp_w2:H=0.8654,top10E=0.16,eRank=366.9,q75/q25=11.69 vo_prod:H=0.4837,top10E=0.68,eRank=62.4,q75/q25=278.80 train_time:390205ms step_avg:92.91ms +[2025-08-22 18:19:29] [Rank 0] PRINT: step:4200/10000 val_loss:4.0085 svd_entropy: attn_qk:H=0.5384,top10E=0.61,eRank=61.4,q75/q25=55.66 attn_vo:H=0.6923,top10E=0.39,eRank=179.3,q75/q25=24.88 mlp_w1:H=0.8784,top10E=0.15,eRank=386.1,q75/q25=7.71 mlp_w2:H=0.8654,top10E=0.16,eRank=366.9,q75/q25=11.69 vo_prod:H=0.4837,top10E=0.68,eRank=62.4,q75/q25=278.80 train_time:390205ms step_avg:92.91ms +[2025-08-22 18:19:29] [Rank 0] step:4201/10000 train_time:390225ms step_avg:92.89ms +[2025-08-22 18:19:29] [Rank 0] step:4201/10000 train_time:390225ms step_avg:92.89ms +[2025-08-22 18:19:31] [Rank 0] step:4221/10000 train_time:392121ms step_avg:92.90ms +[2025-08-22 18:19:31] [Rank 0] step:4221/10000 train_time:392121ms step_avg:92.90ms +[2025-08-22 18:19:33] [Rank 0] step:4241/10000 train_time:394031ms step_avg:92.91ms +[2025-08-22 18:19:33] [Rank 0] step:4241/10000 train_time:394031ms step_avg:92.91ms +[2025-08-22 18:19:35] [Rank 0] step:4261/10000 train_time:395942ms step_avg:92.92ms +[2025-08-22 18:19:35] [Rank 0] step:4261/10000 train_time:395942ms step_avg:92.92ms +[2025-08-22 18:19:37] [Rank 0] step:4281/10000 train_time:397852ms step_avg:92.93ms +[2025-08-22 18:19:37] [Rank 0] step:4281/10000 train_time:397852ms step_avg:92.93ms +[2025-08-22 18:19:39] [Rank 0] step:4301/10000 train_time:399764ms step_avg:92.95ms +[2025-08-22 18:19:39] [Rank 0] step:4301/10000 train_time:399764ms step_avg:92.95ms +[2025-08-22 18:19:41] [Rank 0] step:4321/10000 train_time:401677ms step_avg:92.96ms +[2025-08-22 18:19:41] [Rank 0] step:4321/10000 train_time:401677ms step_avg:92.96ms +[2025-08-22 18:19:43] [Rank 0] step:4341/10000 train_time:403589ms step_avg:92.97ms +[2025-08-22 18:19:43] [Rank 0] step:4341/10000 train_time:403589ms step_avg:92.97ms +[2025-08-22 18:19:45] [Rank 0] step:4361/10000 train_time:405503ms step_avg:92.98ms +[2025-08-22 18:19:45] [Rank 0] step:4361/10000 train_time:405503ms step_avg:92.98ms +[2025-08-22 18:19:47] [Rank 0] step:4381/10000 train_time:407417ms step_avg:93.00ms +[2025-08-22 18:19:47] [Rank 0] step:4381/10000 train_time:407417ms step_avg:93.00ms +[2025-08-22 18:19:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:19:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:20:02] [Rank 0] PRINT: step:4400/10000 val_loss:3.9900 svd_entropy: attn_qk:H=0.5399,top10E=0.61,eRank=61.7,q75/q25=55.95 attn_vo:H=0.6940,top10E=0.39,eRank=179.7,q75/q25=25.00 mlp_w1:H=0.8766,top10E=0.15,eRank=383.4,q75/q25=7.76 mlp_w2:H=0.8650,top10E=0.17,eRank=366.2,q75/q25=11.69 vo_prod:H=0.4864,top10E=0.68,eRank=62.3,q75/q25=279.90 train_time:409338ms step_avg:93.03ms +[2025-08-22 18:20:02] [Rank 0] PRINT: step:4400/10000 val_loss:3.9900 svd_entropy: attn_qk:H=0.5399,top10E=0.61,eRank=61.7,q75/q25=55.95 attn_vo:H=0.6940,top10E=0.39,eRank=179.7,q75/q25=25.00 mlp_w1:H=0.8766,top10E=0.15,eRank=383.4,q75/q25=7.76 mlp_w2:H=0.8650,top10E=0.17,eRank=366.2,q75/q25=11.69 vo_prod:H=0.4864,top10E=0.68,eRank=62.3,q75/q25=279.90 train_time:409338ms step_avg:93.03ms +[2025-08-22 18:20:02] [Rank 0] step:4401/10000 train_time:409358ms step_avg:93.01ms +[2025-08-22 18:20:02] [Rank 0] step:4401/10000 train_time:409358ms step_avg:93.01ms +[2025-08-22 18:20:04] [Rank 0] step:4421/10000 train_time:411261ms step_avg:93.02ms +[2025-08-22 18:20:04] [Rank 0] step:4421/10000 train_time:411261ms step_avg:93.02ms +[2025-08-22 18:20:06] [Rank 0] step:4441/10000 train_time:413171ms step_avg:93.04ms +[2025-08-22 18:20:06] [Rank 0] step:4441/10000 train_time:413171ms step_avg:93.04ms +[2025-08-22 18:20:08] [Rank 0] step:4461/10000 train_time:415085ms step_avg:93.05ms +[2025-08-22 18:20:08] [Rank 0] step:4461/10000 train_time:415085ms step_avg:93.05ms +[2025-08-22 18:20:10] [Rank 0] step:4481/10000 train_time:417002ms step_avg:93.06ms +[2025-08-22 18:20:10] [Rank 0] step:4481/10000 train_time:417002ms step_avg:93.06ms +[2025-08-22 18:20:12] [Rank 0] step:4501/10000 train_time:418916ms step_avg:93.07ms +[2025-08-22 18:20:12] [Rank 0] step:4501/10000 train_time:418916ms step_avg:93.07ms +[2025-08-22 18:20:14] [Rank 0] step:4521/10000 train_time:420833ms step_avg:93.08ms +[2025-08-22 18:20:14] [Rank 0] step:4521/10000 train_time:420833ms step_avg:93.08ms +[2025-08-22 18:20:16] [Rank 0] step:4541/10000 train_time:422753ms step_avg:93.10ms +[2025-08-22 18:20:16] [Rank 0] step:4541/10000 train_time:422753ms step_avg:93.10ms +[2025-08-22 18:20:18] [Rank 0] step:4561/10000 train_time:424674ms step_avg:93.11ms +[2025-08-22 18:20:18] [Rank 0] step:4561/10000 train_time:424674ms step_avg:93.11ms +[2025-08-22 18:20:20] [Rank 0] step:4581/10000 train_time:426666ms step_avg:93.14ms +[2025-08-22 18:20:20] [Rank 0] step:4581/10000 train_time:426666ms step_avg:93.14ms +[2025-08-22 18:20:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:20:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:20:36] [Rank 0] PRINT: step:4600/10000 val_loss:3.9700 svd_entropy: attn_qk:H=0.5395,top10E=0.61,eRank=62.0,q75/q25=56.18 attn_vo:H=0.6952,top10E=0.39,eRank=180.2,q75/q25=25.06 mlp_w1:H=0.8745,top10E=0.15,eRank=380.5,q75/q25=7.83 mlp_w2:H=0.8652,top10E=0.16,eRank=366.2,q75/q25=11.93 vo_prod:H=0.4905,top10E=0.67,eRank=62.7,q75/q25=278.27 train_time:428669ms step_avg:93.19ms +[2025-08-22 18:20:36] [Rank 0] PRINT: step:4600/10000 val_loss:3.9700 svd_entropy: attn_qk:H=0.5395,top10E=0.61,eRank=62.0,q75/q25=56.18 attn_vo:H=0.6952,top10E=0.39,eRank=180.2,q75/q25=25.06 mlp_w1:H=0.8745,top10E=0.15,eRank=380.5,q75/q25=7.83 mlp_w2:H=0.8652,top10E=0.16,eRank=366.2,q75/q25=11.93 vo_prod:H=0.4905,top10E=0.67,eRank=62.7,q75/q25=278.27 train_time:428669ms step_avg:93.19ms +[2025-08-22 18:20:36] [Rank 0] step:4601/10000 train_time:428689ms step_avg:93.17ms +[2025-08-22 18:20:36] [Rank 0] step:4601/10000 train_time:428689ms step_avg:93.17ms +[2025-08-22 18:20:38] [Rank 0] step:4621/10000 train_time:430606ms step_avg:93.18ms +[2025-08-22 18:20:38] [Rank 0] step:4621/10000 train_time:430606ms step_avg:93.18ms +[2025-08-22 18:20:40] [Rank 0] step:4641/10000 train_time:432529ms step_avg:93.20ms +[2025-08-22 18:20:40] [Rank 0] step:4641/10000 train_time:432529ms step_avg:93.20ms +[2025-08-22 18:20:41] [Rank 0] step:4661/10000 train_time:434447ms step_avg:93.21ms +[2025-08-22 18:20:41] [Rank 0] step:4661/10000 train_time:434447ms step_avg:93.21ms +[2025-08-22 18:20:43] [Rank 0] step:4681/10000 train_time:436367ms step_avg:93.22ms +[2025-08-22 18:20:43] [Rank 0] step:4681/10000 train_time:436367ms step_avg:93.22ms +[2025-08-22 18:20:45] [Rank 0] step:4701/10000 train_time:438289ms step_avg:93.23ms +[2025-08-22 18:20:45] [Rank 0] step:4701/10000 train_time:438289ms step_avg:93.23ms +[2025-08-22 18:20:47] [Rank 0] step:4721/10000 train_time:440213ms step_avg:93.25ms +[2025-08-22 18:20:47] [Rank 0] step:4721/10000 train_time:440213ms step_avg:93.25ms +[2025-08-22 18:20:49] [Rank 0] step:4741/10000 train_time:442138ms step_avg:93.26ms +[2025-08-22 18:20:49] [Rank 0] step:4741/10000 train_time:442138ms step_avg:93.26ms +[2025-08-22 18:20:51] [Rank 0] step:4761/10000 train_time:444063ms step_avg:93.27ms +[2025-08-22 18:20:51] [Rank 0] step:4761/10000 train_time:444063ms step_avg:93.27ms +[2025-08-22 18:20:53] [Rank 0] step:4781/10000 train_time:445987ms step_avg:93.28ms +[2025-08-22 18:20:53] [Rank 0] step:4781/10000 train_time:445987ms step_avg:93.28ms +[2025-08-22 18:20:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:20:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:21:09] [Rank 0] PRINT: step:4800/10000 val_loss:3.9621 svd_entropy: attn_qk:H=0.5368,top10E=0.61,eRank=62.1,q75/q25=56.54 attn_vo:H=0.6966,top10E=0.39,eRank=180.4,q75/q25=25.13 mlp_w1:H=0.8722,top10E=0.15,eRank=377.3,q75/q25=7.89 mlp_w2:H=0.8655,top10E=0.16,eRank=366.1,q75/q25=11.97 vo_prod:H=0.4918,top10E=0.67,eRank=62.7,q75/q25=275.86 train_time:447919ms step_avg:93.32ms +[2025-08-22 18:21:09] [Rank 0] PRINT: step:4800/10000 val_loss:3.9621 svd_entropy: attn_qk:H=0.5368,top10E=0.61,eRank=62.1,q75/q25=56.54 attn_vo:H=0.6966,top10E=0.39,eRank=180.4,q75/q25=25.13 mlp_w1:H=0.8722,top10E=0.15,eRank=377.3,q75/q25=7.89 mlp_w2:H=0.8655,top10E=0.16,eRank=366.1,q75/q25=11.97 vo_prod:H=0.4918,top10E=0.67,eRank=62.7,q75/q25=275.86 train_time:447919ms step_avg:93.32ms +[2025-08-22 18:21:09] [Rank 0] step:4801/10000 train_time:447939ms step_avg:93.30ms +[2025-08-22 18:21:09] [Rank 0] step:4801/10000 train_time:447939ms step_avg:93.30ms +[2025-08-22 18:21:11] [Rank 0] step:4821/10000 train_time:449844ms step_avg:93.31ms +[2025-08-22 18:21:11] [Rank 0] step:4821/10000 train_time:449844ms step_avg:93.31ms +[2025-08-22 18:21:13] [Rank 0] step:4841/10000 train_time:451757ms step_avg:93.32ms +[2025-08-22 18:21:13] [Rank 0] step:4841/10000 train_time:451757ms step_avg:93.32ms +[2025-08-22 18:21:15] [Rank 0] step:4861/10000 train_time:453674ms step_avg:93.33ms +[2025-08-22 18:21:15] [Rank 0] step:4861/10000 train_time:453674ms step_avg:93.33ms +[2025-08-22 18:21:17] [Rank 0] step:4881/10000 train_time:455589ms step_avg:93.34ms +[2025-08-22 18:21:17] [Rank 0] step:4881/10000 train_time:455589ms step_avg:93.34ms +[2025-08-22 18:21:18] [Rank 0] step:4901/10000 train_time:457504ms step_avg:93.35ms +[2025-08-22 18:21:18] [Rank 0] step:4901/10000 train_time:457504ms step_avg:93.35ms +[2025-08-22 18:21:20] [Rank 0] step:4921/10000 train_time:459421ms step_avg:93.36ms +[2025-08-22 18:21:20] [Rank 0] step:4921/10000 train_time:459421ms step_avg:93.36ms +[2025-08-22 18:21:22] [Rank 0] step:4941/10000 train_time:461340ms step_avg:93.37ms +[2025-08-22 18:21:22] [Rank 0] step:4941/10000 train_time:461340ms step_avg:93.37ms +[2025-08-22 18:21:24] [Rank 0] step:4961/10000 train_time:463335ms step_avg:93.40ms +[2025-08-22 18:21:24] [Rank 0] step:4961/10000 train_time:463335ms step_avg:93.40ms +[2025-08-22 18:21:26] [Rank 0] step:4981/10000 train_time:465331ms step_avg:93.42ms +[2025-08-22 18:21:26] [Rank 0] step:4981/10000 train_time:465331ms step_avg:93.42ms +[2025-08-22 18:21:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:21:28] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:21:42] [Rank 0] PRINT: step:5000/10000 val_loss:3.9553 svd_entropy: attn_qk:H=0.5360,top10E=0.61,eRank=62.2,q75/q25=57.04 attn_vo:H=0.6982,top10E=0.39,eRank=180.9,q75/q25=25.09 mlp_w1:H=0.8700,top10E=0.16,eRank=374.3,q75/q25=7.95 mlp_w2:H=0.8658,top10E=0.16,eRank=366.0,q75/q25=12.09 vo_prod:H=0.4947,top10E=0.67,eRank=63.0,q75/q25=276.69 train_time:467259ms step_avg:93.45ms +[2025-08-22 18:21:42] [Rank 0] PRINT: step:5000/10000 val_loss:3.9553 svd_entropy: attn_qk:H=0.5360,top10E=0.61,eRank=62.2,q75/q25=57.04 attn_vo:H=0.6982,top10E=0.39,eRank=180.9,q75/q25=25.09 mlp_w1:H=0.8700,top10E=0.16,eRank=374.3,q75/q25=7.95 mlp_w2:H=0.8658,top10E=0.16,eRank=366.0,q75/q25=12.09 vo_prod:H=0.4947,top10E=0.67,eRank=63.0,q75/q25=276.69 train_time:467259ms step_avg:93.45ms +[2025-08-22 18:21:42] [Rank 0] step:5001/10000 train_time:467279ms step_avg:93.44ms +[2025-08-22 18:21:42] [Rank 0] step:5001/10000 train_time:467279ms step_avg:93.44ms +[2025-08-22 18:21:44] [Rank 0] step:5021/10000 train_time:469178ms step_avg:93.44ms +[2025-08-22 18:21:44] [Rank 0] step:5021/10000 train_time:469178ms step_avg:93.44ms +[2025-08-22 18:21:46] [Rank 0] step:5041/10000 train_time:471093ms step_avg:93.45ms +[2025-08-22 18:21:46] [Rank 0] step:5041/10000 train_time:471093ms step_avg:93.45ms +[2025-08-22 18:21:48] [Rank 0] step:5061/10000 train_time:473006ms step_avg:93.46ms +[2025-08-22 18:21:48] [Rank 0] step:5061/10000 train_time:473006ms step_avg:93.46ms +[2025-08-22 18:21:50] [Rank 0] step:5081/10000 train_time:474925ms step_avg:93.47ms +[2025-08-22 18:21:50] [Rank 0] step:5081/10000 train_time:474925ms step_avg:93.47ms +[2025-08-22 18:21:52] [Rank 0] step:5101/10000 train_time:476840ms step_avg:93.48ms +[2025-08-22 18:21:52] [Rank 0] step:5101/10000 train_time:476840ms step_avg:93.48ms +[2025-08-22 18:21:53] [Rank 0] step:5121/10000 train_time:478758ms step_avg:93.49ms +[2025-08-22 18:21:53] [Rank 0] step:5121/10000 train_time:478758ms step_avg:93.49ms +[2025-08-22 18:21:55] [Rank 0] step:5141/10000 train_time:480680ms step_avg:93.50ms +[2025-08-22 18:21:55] [Rank 0] step:5141/10000 train_time:480680ms step_avg:93.50ms +[2025-08-22 18:21:57] [Rank 0] step:5161/10000 train_time:482598ms step_avg:93.51ms +[2025-08-22 18:21:57] [Rank 0] step:5161/10000 train_time:482598ms step_avg:93.51ms +[2025-08-22 18:21:59] [Rank 0] step:5181/10000 train_time:484519ms step_avg:93.52ms +[2025-08-22 18:21:59] [Rank 0] step:5181/10000 train_time:484519ms step_avg:93.52ms +[2025-08-22 18:22:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:22:01] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:22:15] [Rank 0] PRINT: step:5200/10000 val_loss:3.9391 svd_entropy: attn_qk:H=0.5346,top10E=0.61,eRank=62.3,q75/q25=56.59 attn_vo:H=0.6987,top10E=0.39,eRank=180.8,q75/q25=25.06 mlp_w1:H=0.8687,top10E=0.16,eRank=372.4,q75/q25=7.98 mlp_w2:H=0.8665,top10E=0.16,eRank=366.4,q75/q25=12.00 vo_prod:H=0.4975,top10E=0.67,eRank=63.1,q75/q25=277.07 train_time:486468ms step_avg:93.55ms +[2025-08-22 18:22:15] [Rank 0] PRINT: step:5200/10000 val_loss:3.9391 svd_entropy: attn_qk:H=0.5346,top10E=0.61,eRank=62.3,q75/q25=56.59 attn_vo:H=0.6987,top10E=0.39,eRank=180.8,q75/q25=25.06 mlp_w1:H=0.8687,top10E=0.16,eRank=372.4,q75/q25=7.98 mlp_w2:H=0.8665,top10E=0.16,eRank=366.4,q75/q25=12.00 vo_prod:H=0.4975,top10E=0.67,eRank=63.1,q75/q25=277.07 train_time:486468ms step_avg:93.55ms +[2025-08-22 18:22:15] [Rank 0] step:5201/10000 train_time:486487ms step_avg:93.54ms +[2025-08-22 18:22:15] [Rank 0] step:5201/10000 train_time:486487ms step_avg:93.54ms +[2025-08-22 18:22:17] [Rank 0] step:5221/10000 train_time:488441ms step_avg:93.55ms +[2025-08-22 18:22:17] [Rank 0] step:5221/10000 train_time:488441ms step_avg:93.55ms +[2025-08-22 18:22:19] [Rank 0] step:5241/10000 train_time:490387ms step_avg:93.57ms +[2025-08-22 18:22:19] [Rank 0] step:5241/10000 train_time:490387ms step_avg:93.57ms +[2025-08-22 18:22:21] [Rank 0] step:5261/10000 train_time:492333ms step_avg:93.58ms +[2025-08-22 18:22:21] [Rank 0] step:5261/10000 train_time:492333ms step_avg:93.58ms +[2025-08-22 18:22:23] [Rank 0] step:5281/10000 train_time:494283ms step_avg:93.60ms +[2025-08-22 18:22:23] [Rank 0] step:5281/10000 train_time:494283ms step_avg:93.60ms +[2025-08-22 18:22:25] [Rank 0] step:5301/10000 train_time:496242ms step_avg:93.61ms +[2025-08-22 18:22:25] [Rank 0] step:5301/10000 train_time:496242ms step_avg:93.61ms +[2025-08-22 18:22:27] [Rank 0] step:5321/10000 train_time:498251ms step_avg:93.64ms +[2025-08-22 18:22:27] [Rank 0] step:5321/10000 train_time:498251ms step_avg:93.64ms +[2025-08-22 18:22:29] [Rank 0] step:5341/10000 train_time:500273ms step_avg:93.67ms +[2025-08-22 18:22:29] [Rank 0] step:5341/10000 train_time:500273ms step_avg:93.67ms +[2025-08-22 18:22:31] [Rank 0] step:5361/10000 train_time:502223ms step_avg:93.68ms +[2025-08-22 18:22:31] [Rank 0] step:5361/10000 train_time:502223ms step_avg:93.68ms +[2025-08-22 18:22:33] [Rank 0] step:5381/10000 train_time:504174ms step_avg:93.70ms +[2025-08-22 18:22:33] [Rank 0] step:5381/10000 train_time:504174ms step_avg:93.70ms +[2025-08-22 18:22:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:22:35] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:22:49] [Rank 0] PRINT: step:5400/10000 val_loss:3.9290 svd_entropy: attn_qk:H=0.5344,top10E=0.61,eRank=62.6,q75/q25=56.88 attn_vo:H=0.6995,top10E=0.39,eRank=180.8,q75/q25=25.08 mlp_w1:H=0.8671,top10E=0.16,eRank=370.0,q75/q25=8.03 mlp_w2:H=0.8660,top10E=0.16,eRank=365.3,q75/q25=12.06 vo_prod:H=0.5018,top10E=0.67,eRank=63.6,q75/q25=274.69 train_time:506131ms step_avg:93.73ms +[2025-08-22 18:22:49] [Rank 0] PRINT: step:5400/10000 val_loss:3.9290 svd_entropy: attn_qk:H=0.5344,top10E=0.61,eRank=62.6,q75/q25=56.88 attn_vo:H=0.6995,top10E=0.39,eRank=180.8,q75/q25=25.08 mlp_w1:H=0.8671,top10E=0.16,eRank=370.0,q75/q25=8.03 mlp_w2:H=0.8660,top10E=0.16,eRank=365.3,q75/q25=12.06 vo_prod:H=0.5018,top10E=0.67,eRank=63.6,q75/q25=274.69 train_time:506131ms step_avg:93.73ms +[2025-08-22 18:22:49] [Rank 0] step:5401/10000 train_time:506151ms step_avg:93.71ms +[2025-08-22 18:22:49] [Rank 0] step:5401/10000 train_time:506151ms step_avg:93.71ms +[2025-08-22 18:22:51] [Rank 0] step:5421/10000 train_time:508096ms step_avg:93.73ms +[2025-08-22 18:22:51] [Rank 0] step:5421/10000 train_time:508096ms step_avg:93.73ms +[2025-08-22 18:22:53] [Rank 0] step:5441/10000 train_time:510044ms step_avg:93.74ms +[2025-08-22 18:22:53] [Rank 0] step:5441/10000 train_time:510044ms step_avg:93.74ms +[2025-08-22 18:22:55] [Rank 0] step:5461/10000 train_time:511998ms step_avg:93.76ms +[2025-08-22 18:22:55] [Rank 0] step:5461/10000 train_time:511998ms step_avg:93.76ms +[2025-08-22 18:22:57] [Rank 0] step:5481/10000 train_time:513949ms step_avg:93.77ms +[2025-08-22 18:22:57] [Rank 0] step:5481/10000 train_time:513949ms step_avg:93.77ms +[2025-08-22 18:22:59] [Rank 0] step:5501/10000 train_time:515910ms step_avg:93.78ms +[2025-08-22 18:22:59] [Rank 0] step:5501/10000 train_time:515910ms step_avg:93.78ms +[2025-08-22 18:23:00] [Rank 0] step:5521/10000 train_time:517869ms step_avg:93.80ms +[2025-08-22 18:23:00] [Rank 0] step:5521/10000 train_time:517869ms step_avg:93.80ms +[2025-08-22 18:23:02] [Rank 0] step:5541/10000 train_time:519827ms step_avg:93.81ms +[2025-08-22 18:23:02] [Rank 0] step:5541/10000 train_time:519827ms step_avg:93.81ms +[2025-08-22 18:23:04] [Rank 0] step:5561/10000 train_time:521781ms step_avg:93.83ms +[2025-08-22 18:23:04] [Rank 0] step:5561/10000 train_time:521781ms step_avg:93.83ms +[2025-08-22 18:23:06] [Rank 0] step:5581/10000 train_time:523736ms step_avg:93.84ms +[2025-08-22 18:23:06] [Rank 0] step:5581/10000 train_time:523736ms step_avg:93.84ms +[2025-08-22 18:23:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:23:08] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:23:22] [Rank 0] PRINT: step:5600/10000 val_loss:3.9220 svd_entropy: attn_qk:H=0.5324,top10E=0.61,eRank=62.7,q75/q25=56.76 attn_vo:H=0.7000,top10E=0.38,eRank=180.9,q75/q25=25.17 mlp_w1:H=0.8656,top10E=0.16,eRank=367.6,q75/q25=8.10 mlp_w2:H=0.8654,top10E=0.16,eRank=364.2,q75/q25=12.22 vo_prod:H=0.5040,top10E=0.66,eRank=64.1,q75/q25=272.91 train_time:525700ms step_avg:93.88ms +[2025-08-22 18:23:22] [Rank 0] PRINT: step:5600/10000 val_loss:3.9220 svd_entropy: attn_qk:H=0.5324,top10E=0.61,eRank=62.7,q75/q25=56.76 attn_vo:H=0.7000,top10E=0.38,eRank=180.9,q75/q25=25.17 mlp_w1:H=0.8656,top10E=0.16,eRank=367.6,q75/q25=8.10 mlp_w2:H=0.8654,top10E=0.16,eRank=364.2,q75/q25=12.22 vo_prod:H=0.5040,top10E=0.66,eRank=64.1,q75/q25=272.91 train_time:525700ms step_avg:93.88ms +[2025-08-22 18:23:22] [Rank 0] step:5601/10000 train_time:525720ms step_avg:93.86ms +[2025-08-22 18:23:22] [Rank 0] step:5601/10000 train_time:525720ms step_avg:93.86ms +[2025-08-22 18:23:24] [Rank 0] step:5621/10000 train_time:527666ms step_avg:93.87ms +[2025-08-22 18:23:24] [Rank 0] step:5621/10000 train_time:527666ms step_avg:93.87ms +[2025-08-22 18:23:26] [Rank 0] step:5641/10000 train_time:529614ms step_avg:93.89ms +[2025-08-22 18:23:26] [Rank 0] step:5641/10000 train_time:529614ms step_avg:93.89ms +[2025-08-22 18:23:28] [Rank 0] step:5661/10000 train_time:531558ms step_avg:93.90ms +[2025-08-22 18:23:28] [Rank 0] step:5661/10000 train_time:531558ms step_avg:93.90ms +[2025-08-22 18:23:30] [Rank 0] step:5681/10000 train_time:533569ms step_avg:93.92ms +[2025-08-22 18:23:30] [Rank 0] step:5681/10000 train_time:533569ms step_avg:93.92ms +[2025-08-22 18:23:32] [Rank 0] step:5701/10000 train_time:535589ms step_avg:93.95ms +[2025-08-22 18:23:32] [Rank 0] step:5701/10000 train_time:535589ms step_avg:93.95ms +[2025-08-22 18:23:34] [Rank 0] step:5721/10000 train_time:537546ms step_avg:93.96ms +[2025-08-22 18:23:34] [Rank 0] step:5721/10000 train_time:537546ms step_avg:93.96ms +[2025-08-22 18:23:36] [Rank 0] step:5741/10000 train_time:539493ms step_avg:93.97ms +[2025-08-22 18:23:36] [Rank 0] step:5741/10000 train_time:539493ms step_avg:93.97ms +[2025-08-22 18:23:38] [Rank 0] step:5761/10000 train_time:541445ms step_avg:93.98ms +[2025-08-22 18:23:38] [Rank 0] step:5761/10000 train_time:541445ms step_avg:93.98ms +[2025-08-22 18:23:40] [Rank 0] step:5781/10000 train_time:543396ms step_avg:94.00ms +[2025-08-22 18:23:40] [Rank 0] step:5781/10000 train_time:543396ms step_avg:94.00ms +[2025-08-22 18:23:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:23:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:23:56] [Rank 0] PRINT: step:5800/10000 val_loss:3.9191 svd_entropy: attn_qk:H=0.5336,top10E=0.61,eRank=63.2,q75/q25=56.82 attn_vo:H=0.6998,top10E=0.39,eRank=180.5,q75/q25=25.08 mlp_w1:H=0.8644,top10E=0.17,eRank=366.0,q75/q25=8.15 mlp_w2:H=0.8646,top10E=0.16,eRank=362.9,q75/q25=12.22 vo_prod:H=0.5014,top10E=0.66,eRank=64.1,q75/q25=271.33 train_time:545356ms step_avg:94.03ms +[2025-08-22 18:23:56] [Rank 0] PRINT: step:5800/10000 val_loss:3.9191 svd_entropy: attn_qk:H=0.5336,top10E=0.61,eRank=63.2,q75/q25=56.82 attn_vo:H=0.6998,top10E=0.39,eRank=180.5,q75/q25=25.08 mlp_w1:H=0.8644,top10E=0.17,eRank=366.0,q75/q25=8.15 mlp_w2:H=0.8646,top10E=0.16,eRank=362.9,q75/q25=12.22 vo_prod:H=0.5014,top10E=0.66,eRank=64.1,q75/q25=271.33 train_time:545356ms step_avg:94.03ms +[2025-08-22 18:23:56] [Rank 0] step:5801/10000 train_time:545375ms step_avg:94.01ms +[2025-08-22 18:23:56] [Rank 0] step:5801/10000 train_time:545375ms step_avg:94.01ms +[2025-08-22 18:23:58] [Rank 0] step:5821/10000 train_time:547325ms step_avg:94.03ms +[2025-08-22 18:23:58] [Rank 0] step:5821/10000 train_time:547325ms step_avg:94.03ms +[2025-08-22 18:24:00] [Rank 0] step:5841/10000 train_time:549269ms step_avg:94.04ms +[2025-08-22 18:24:00] [Rank 0] step:5841/10000 train_time:549269ms step_avg:94.04ms +[2025-08-22 18:24:02] [Rank 0] step:5861/10000 train_time:551220ms step_avg:94.05ms +[2025-08-22 18:24:02] [Rank 0] step:5861/10000 train_time:551220ms step_avg:94.05ms +[2025-08-22 18:24:04] [Rank 0] step:5881/10000 train_time:553169ms step_avg:94.06ms +[2025-08-22 18:24:04] [Rank 0] step:5881/10000 train_time:553169ms step_avg:94.06ms +[2025-08-22 18:24:06] [Rank 0] step:5901/10000 train_time:555117ms step_avg:94.07ms +[2025-08-22 18:24:06] [Rank 0] step:5901/10000 train_time:555117ms step_avg:94.07ms +[2025-08-22 18:24:08] [Rank 0] step:5921/10000 train_time:557066ms step_avg:94.08ms +[2025-08-22 18:24:08] [Rank 0] step:5921/10000 train_time:557066ms step_avg:94.08ms +[2025-08-22 18:24:10] [Rank 0] step:5941/10000 train_time:559022ms step_avg:94.10ms +[2025-08-22 18:24:10] [Rank 0] step:5941/10000 train_time:559022ms step_avg:94.10ms +[2025-08-22 18:24:12] [Rank 0] step:5961/10000 train_time:560975ms step_avg:94.11ms +[2025-08-22 18:24:12] [Rank 0] step:5961/10000 train_time:560975ms step_avg:94.11ms +[2025-08-22 18:24:13] [Rank 0] step:5981/10000 train_time:562925ms step_avg:94.12ms +[2025-08-22 18:24:13] [Rank 0] step:5981/10000 train_time:562925ms step_avg:94.12ms +[2025-08-22 18:24:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:24:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:24:29] [Rank 0] PRINT: step:6000/10000 val_loss:3.9005 svd_entropy: attn_qk:H=0.5324,top10E=0.61,eRank=63.3,q75/q25=56.93 attn_vo:H=0.7007,top10E=0.38,eRank=180.5,q75/q25=25.16 mlp_w1:H=0.8631,top10E=0.17,eRank=364.2,q75/q25=8.22 mlp_w2:H=0.8640,top10E=0.16,eRank=361.8,q75/q25=12.40 vo_prod:H=0.5027,top10E=0.66,eRank=64.4,q75/q25=272.33 train_time:564882ms step_avg:94.15ms +[2025-08-22 18:24:29] [Rank 0] PRINT: step:6000/10000 val_loss:3.9005 svd_entropy: attn_qk:H=0.5324,top10E=0.61,eRank=63.3,q75/q25=56.93 attn_vo:H=0.7007,top10E=0.38,eRank=180.5,q75/q25=25.16 mlp_w1:H=0.8631,top10E=0.17,eRank=364.2,q75/q25=8.22 mlp_w2:H=0.8640,top10E=0.16,eRank=361.8,q75/q25=12.40 vo_prod:H=0.5027,top10E=0.66,eRank=64.4,q75/q25=272.33 train_time:564882ms step_avg:94.15ms +[2025-08-22 18:24:29] [Rank 0] step:6001/10000 train_time:564902ms step_avg:94.13ms +[2025-08-22 18:24:29] [Rank 0] step:6001/10000 train_time:564902ms step_avg:94.13ms +[2025-08-22 18:24:31] [Rank 0] step:6021/10000 train_time:566855ms step_avg:94.15ms +[2025-08-22 18:24:31] [Rank 0] step:6021/10000 train_time:566855ms step_avg:94.15ms +[2025-08-22 18:24:33] [Rank 0] step:6041/10000 train_time:568874ms step_avg:94.17ms +[2025-08-22 18:24:33] [Rank 0] step:6041/10000 train_time:568874ms step_avg:94.17ms +[2025-08-22 18:24:35] [Rank 0] step:6061/10000 train_time:570902ms step_avg:94.19ms +[2025-08-22 18:24:35] [Rank 0] step:6061/10000 train_time:570902ms step_avg:94.19ms +[2025-08-22 18:24:37] [Rank 0] step:6081/10000 train_time:572852ms step_avg:94.20ms +[2025-08-22 18:24:37] [Rank 0] step:6081/10000 train_time:572852ms step_avg:94.20ms +[2025-08-22 18:24:39] [Rank 0] step:6101/10000 train_time:574811ms step_avg:94.22ms +[2025-08-22 18:24:39] [Rank 0] step:6101/10000 train_time:574811ms step_avg:94.22ms +[2025-08-22 18:24:41] [Rank 0] step:6121/10000 train_time:577027ms step_avg:94.27ms +[2025-08-22 18:24:41] [Rank 0] step:6121/10000 train_time:577027ms step_avg:94.27ms +[2025-08-22 18:24:43] [Rank 0] step:6141/10000 train_time:578993ms step_avg:94.28ms +[2025-08-22 18:24:43] [Rank 0] step:6141/10000 train_time:578993ms step_avg:94.28ms +[2025-08-22 18:24:45] [Rank 0] step:6161/10000 train_time:580958ms step_avg:94.30ms +[2025-08-22 18:24:45] [Rank 0] step:6161/10000 train_time:580958ms step_avg:94.30ms +[2025-08-22 18:24:47] [Rank 0] step:6181/10000 train_time:582917ms step_avg:94.31ms +[2025-08-22 18:24:47] [Rank 0] step:6181/10000 train_time:582917ms step_avg:94.31ms +[2025-08-22 18:24:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:24:49] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:25:03] [Rank 0] PRINT: step:6200/10000 val_loss:3.8871 svd_entropy: attn_qk:H=0.5297,top10E=0.62,eRank=63.3,q75/q25=57.07 attn_vo:H=0.7015,top10E=0.38,eRank=180.6,q75/q25=25.05 mlp_w1:H=0.8621,top10E=0.17,eRank=362.7,q75/q25=8.28 mlp_w2:H=0.8635,top10E=0.16,eRank=361.0,q75/q25=12.51 vo_prod:H=0.5059,top10E=0.66,eRank=64.6,q75/q25=270.84 train_time:584882ms step_avg:94.34ms +[2025-08-22 18:25:03] [Rank 0] PRINT: step:6200/10000 val_loss:3.8871 svd_entropy: attn_qk:H=0.5297,top10E=0.62,eRank=63.3,q75/q25=57.07 attn_vo:H=0.7015,top10E=0.38,eRank=180.6,q75/q25=25.05 mlp_w1:H=0.8621,top10E=0.17,eRank=362.7,q75/q25=8.28 mlp_w2:H=0.8635,top10E=0.16,eRank=361.0,q75/q25=12.51 vo_prod:H=0.5059,top10E=0.66,eRank=64.6,q75/q25=270.84 train_time:584882ms step_avg:94.34ms +[2025-08-22 18:25:03] [Rank 0] step:6201/10000 train_time:584902ms step_avg:94.32ms +[2025-08-22 18:25:03] [Rank 0] step:6201/10000 train_time:584902ms step_avg:94.32ms +[2025-08-22 18:25:05] [Rank 0] step:6221/10000 train_time:586859ms step_avg:94.34ms +[2025-08-22 18:25:05] [Rank 0] step:6221/10000 train_time:586859ms step_avg:94.34ms +[2025-08-22 18:25:07] [Rank 0] step:6241/10000 train_time:588810ms step_avg:94.35ms +[2025-08-22 18:25:07] [Rank 0] step:6241/10000 train_time:588810ms step_avg:94.35ms +[2025-08-22 18:25:09] [Rank 0] step:6261/10000 train_time:590767ms step_avg:94.36ms +[2025-08-22 18:25:09] [Rank 0] step:6261/10000 train_time:590767ms step_avg:94.36ms +[2025-08-22 18:25:11] [Rank 0] step:6281/10000 train_time:592728ms step_avg:94.37ms +[2025-08-22 18:25:11] [Rank 0] step:6281/10000 train_time:592728ms step_avg:94.37ms +[2025-08-22 18:25:13] [Rank 0] step:6301/10000 train_time:594686ms step_avg:94.38ms +[2025-08-22 18:25:13] [Rank 0] step:6301/10000 train_time:594686ms step_avg:94.38ms +[2025-08-22 18:25:15] [Rank 0] step:6321/10000 train_time:596647ms step_avg:94.39ms +[2025-08-22 18:25:15] [Rank 0] step:6321/10000 train_time:596647ms step_avg:94.39ms +[2025-08-22 18:25:16] [Rank 0] step:6341/10000 train_time:598604ms step_avg:94.40ms +[2025-08-22 18:25:16] [Rank 0] step:6341/10000 train_time:598604ms step_avg:94.40ms +[2025-08-22 18:25:18] [Rank 0] step:6361/10000 train_time:600570ms step_avg:94.41ms +[2025-08-22 18:25:18] [Rank 0] step:6361/10000 train_time:600570ms step_avg:94.41ms +[2025-08-22 18:25:20] [Rank 0] step:6381/10000 train_time:602531ms step_avg:94.43ms +[2025-08-22 18:25:20] [Rank 0] step:6381/10000 train_time:602531ms step_avg:94.43ms +[2025-08-22 18:25:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:25:22] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:25:36] [Rank 0] PRINT: step:6400/10000 val_loss:3.8763 svd_entropy: attn_qk:H=0.5284,top10E=0.62,eRank=63.4,q75/q25=57.62 attn_vo:H=0.7027,top10E=0.38,eRank=181.0,q75/q25=25.06 mlp_w1:H=0.8608,top10E=0.17,eRank=361.0,q75/q25=8.34 mlp_w2:H=0.8630,top10E=0.16,eRank=360.2,q75/q25=12.66 vo_prod:H=0.5085,top10E=0.65,eRank=64.9,q75/q25=271.84 train_time:604495ms step_avg:94.45ms +[2025-08-22 18:25:36] [Rank 0] PRINT: step:6400/10000 val_loss:3.8763 svd_entropy: attn_qk:H=0.5284,top10E=0.62,eRank=63.4,q75/q25=57.62 attn_vo:H=0.7027,top10E=0.38,eRank=181.0,q75/q25=25.06 mlp_w1:H=0.8608,top10E=0.17,eRank=361.0,q75/q25=8.34 mlp_w2:H=0.8630,top10E=0.16,eRank=360.2,q75/q25=12.66 vo_prod:H=0.5085,top10E=0.65,eRank=64.9,q75/q25=271.84 train_time:604495ms step_avg:94.45ms +[2025-08-22 18:25:36] [Rank 0] step:6401/10000 train_time:604515ms step_avg:94.44ms +[2025-08-22 18:25:36] [Rank 0] step:6401/10000 train_time:604515ms step_avg:94.44ms +[2025-08-22 18:25:38] [Rank 0] step:6421/10000 train_time:606538ms step_avg:94.46ms +[2025-08-22 18:25:38] [Rank 0] step:6421/10000 train_time:606538ms step_avg:94.46ms +[2025-08-22 18:25:40] [Rank 0] step:6441/10000 train_time:608488ms step_avg:94.47ms +[2025-08-22 18:25:40] [Rank 0] step:6441/10000 train_time:608488ms step_avg:94.47ms +[2025-08-22 18:25:42] [Rank 0] step:6461/10000 train_time:610442ms step_avg:94.48ms +[2025-08-22 18:25:42] [Rank 0] step:6461/10000 train_time:610442ms step_avg:94.48ms +[2025-08-22 18:25:44] [Rank 0] step:6481/10000 train_time:612401ms step_avg:94.49ms +[2025-08-22 18:25:44] [Rank 0] step:6481/10000 train_time:612401ms step_avg:94.49ms +[2025-08-22 18:25:46] [Rank 0] step:6501/10000 train_time:614351ms step_avg:94.50ms +[2025-08-22 18:25:46] [Rank 0] step:6501/10000 train_time:614351ms step_avg:94.50ms +[2025-08-22 18:25:48] [Rank 0] step:6521/10000 train_time:616300ms step_avg:94.51ms +[2025-08-22 18:25:48] [Rank 0] step:6521/10000 train_time:616300ms step_avg:94.51ms +[2025-08-22 18:25:50] [Rank 0] step:6541/10000 train_time:618258ms step_avg:94.52ms +[2025-08-22 18:25:50] [Rank 0] step:6541/10000 train_time:618258ms step_avg:94.52ms +[2025-08-22 18:25:52] [Rank 0] step:6561/10000 train_time:620214ms step_avg:94.53ms +[2025-08-22 18:25:52] [Rank 0] step:6561/10000 train_time:620214ms step_avg:94.53ms +[2025-08-22 18:25:54] [Rank 0] step:6581/10000 train_time:622166ms step_avg:94.54ms +[2025-08-22 18:25:54] [Rank 0] step:6581/10000 train_time:622166ms step_avg:94.54ms +[2025-08-22 18:25:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:25:55] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:26:09] [Rank 0] PRINT: step:6600/10000 val_loss:3.8631 svd_entropy: attn_qk:H=0.5260,top10E=0.62,eRank=63.4,q75/q25=57.54 attn_vo:H=0.7042,top10E=0.38,eRank=181.4,q75/q25=25.10 mlp_w1:H=0.8599,top10E=0.17,eRank=359.6,q75/q25=8.39 mlp_w2:H=0.8627,top10E=0.16,eRank=359.6,q75/q25=12.73 vo_prod:H=0.5114,top10E=0.65,eRank=65.5,q75/q25=272.51 train_time:624130ms step_avg:94.57ms +[2025-08-22 18:26:09] [Rank 0] PRINT: step:6600/10000 val_loss:3.8631 svd_entropy: attn_qk:H=0.5260,top10E=0.62,eRank=63.4,q75/q25=57.54 attn_vo:H=0.7042,top10E=0.38,eRank=181.4,q75/q25=25.10 mlp_w1:H=0.8599,top10E=0.17,eRank=359.6,q75/q25=8.39 mlp_w2:H=0.8627,top10E=0.16,eRank=359.6,q75/q25=12.73 vo_prod:H=0.5114,top10E=0.65,eRank=65.5,q75/q25=272.51 train_time:624130ms step_avg:94.57ms +[2025-08-22 18:26:09] [Rank 0] step:6601/10000 train_time:624149ms step_avg:94.55ms +[2025-08-22 18:26:09] [Rank 0] step:6601/10000 train_time:624149ms step_avg:94.55ms +[2025-08-22 18:26:11] [Rank 0] step:6621/10000 train_time:626094ms step_avg:94.56ms +[2025-08-22 18:26:11] [Rank 0] step:6621/10000 train_time:626094ms step_avg:94.56ms +[2025-08-22 18:26:13] [Rank 0] step:6641/10000 train_time:628051ms step_avg:94.57ms +[2025-08-22 18:26:13] [Rank 0] step:6641/10000 train_time:628051ms step_avg:94.57ms +[2025-08-22 18:26:15] [Rank 0] step:6661/10000 train_time:630002ms step_avg:94.58ms +[2025-08-22 18:26:15] [Rank 0] step:6661/10000 train_time:630002ms step_avg:94.58ms +[2025-08-22 18:26:17] [Rank 0] step:6681/10000 train_time:631972ms step_avg:94.59ms +[2025-08-22 18:26:17] [Rank 0] step:6681/10000 train_time:631972ms step_avg:94.59ms +[2025-08-22 18:26:19] [Rank 0] step:6701/10000 train_time:633962ms step_avg:94.61ms +[2025-08-22 18:26:19] [Rank 0] step:6701/10000 train_time:633962ms step_avg:94.61ms +[2025-08-22 18:26:21] [Rank 0] step:6721/10000 train_time:635945ms step_avg:94.62ms +[2025-08-22 18:26:21] [Rank 0] step:6721/10000 train_time:635945ms step_avg:94.62ms +[2025-08-22 18:26:23] [Rank 0] step:6741/10000 train_time:637926ms step_avg:94.63ms +[2025-08-22 18:26:23] [Rank 0] step:6741/10000 train_time:637926ms step_avg:94.63ms +[2025-08-22 18:26:25] [Rank 0] step:6761/10000 train_time:639906ms step_avg:94.65ms +[2025-08-22 18:26:25] [Rank 0] step:6761/10000 train_time:639906ms step_avg:94.65ms +[2025-08-22 18:26:27] [Rank 0] step:6781/10000 train_time:641891ms step_avg:94.66ms +[2025-08-22 18:26:27] [Rank 0] step:6781/10000 train_time:641891ms step_avg:94.66ms +[2025-08-22 18:26:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:26:29] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:26:43] [Rank 0] PRINT: step:6800/10000 val_loss:3.8493 svd_entropy: attn_qk:H=0.5251,top10E=0.62,eRank=63.5,q75/q25=58.06 attn_vo:H=0.7054,top10E=0.38,eRank=181.6,q75/q25=25.15 mlp_w1:H=0.8590,top10E=0.17,eRank=358.3,q75/q25=8.47 mlp_w2:H=0.8624,top10E=0.16,eRank=359.0,q75/q25=12.81 vo_prod:H=0.5135,top10E=0.65,eRank=65.8,q75/q25=273.37 train_time:643885ms step_avg:94.69ms +[2025-08-22 18:26:43] [Rank 0] PRINT: step:6800/10000 val_loss:3.8493 svd_entropy: attn_qk:H=0.5251,top10E=0.62,eRank=63.5,q75/q25=58.06 attn_vo:H=0.7054,top10E=0.38,eRank=181.6,q75/q25=25.15 mlp_w1:H=0.8590,top10E=0.17,eRank=358.3,q75/q25=8.47 mlp_w2:H=0.8624,top10E=0.16,eRank=359.0,q75/q25=12.81 vo_prod:H=0.5135,top10E=0.65,eRank=65.8,q75/q25=273.37 train_time:643885ms step_avg:94.69ms +[2025-08-22 18:26:43] [Rank 0] step:6801/10000 train_time:643905ms step_avg:94.68ms +[2025-08-22 18:26:43] [Rank 0] step:6801/10000 train_time:643905ms step_avg:94.68ms +[2025-08-22 18:26:45] [Rank 0] step:6821/10000 train_time:645884ms step_avg:94.69ms +[2025-08-22 18:26:45] [Rank 0] step:6821/10000 train_time:645884ms step_avg:94.69ms +[2025-08-22 18:26:47] [Rank 0] step:6841/10000 train_time:647862ms step_avg:94.70ms +[2025-08-22 18:26:47] [Rank 0] step:6841/10000 train_time:647862ms step_avg:94.70ms +[2025-08-22 18:26:49] [Rank 0] step:6861/10000 train_time:649838ms step_avg:94.71ms +[2025-08-22 18:26:49] [Rank 0] step:6861/10000 train_time:649838ms step_avg:94.71ms +[2025-08-22 18:26:51] [Rank 0] step:6881/10000 train_time:651823ms step_avg:94.73ms +[2025-08-22 18:26:51] [Rank 0] step:6881/10000 train_time:651823ms step_avg:94.73ms +[2025-08-22 18:26:53] [Rank 0] step:6901/10000 train_time:653801ms step_avg:94.74ms +[2025-08-22 18:26:53] [Rank 0] step:6901/10000 train_time:653801ms step_avg:94.74ms +[2025-08-22 18:26:55] [Rank 0] step:6921/10000 train_time:655779ms step_avg:94.75ms +[2025-08-22 18:26:55] [Rank 0] step:6921/10000 train_time:655779ms step_avg:94.75ms +[2025-08-22 18:26:57] [Rank 0] step:6941/10000 train_time:657767ms step_avg:94.77ms +[2025-08-22 18:26:57] [Rank 0] step:6941/10000 train_time:657767ms step_avg:94.77ms +[2025-08-22 18:26:59] [Rank 0] step:6961/10000 train_time:659764ms step_avg:94.78ms +[2025-08-22 18:26:59] [Rank 0] step:6961/10000 train_time:659764ms step_avg:94.78ms +[2025-08-22 18:27:01] [Rank 0] step:6981/10000 train_time:661755ms step_avg:94.79ms +[2025-08-22 18:27:01] [Rank 0] step:6981/10000 train_time:661755ms step_avg:94.79ms +[2025-08-22 18:27:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:27:02] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:27:16] [Rank 0] PRINT: step:7000/10000 val_loss:3.8357 svd_entropy: attn_qk:H=0.5228,top10E=0.62,eRank=63.5,q75/q25=58.14 attn_vo:H=0.7065,top10E=0.38,eRank=181.9,q75/q25=25.06 mlp_w1:H=0.8582,top10E=0.17,eRank=357.2,q75/q25=8.54 mlp_w2:H=0.8621,top10E=0.16,eRank=358.5,q75/q25=12.85 vo_prod:H=0.5150,top10E=0.65,eRank=66.1,q75/q25=269.34 train_time:663750ms step_avg:94.82ms +[2025-08-22 18:27:16] [Rank 0] PRINT: step:7000/10000 val_loss:3.8357 svd_entropy: attn_qk:H=0.5228,top10E=0.62,eRank=63.5,q75/q25=58.14 attn_vo:H=0.7065,top10E=0.38,eRank=181.9,q75/q25=25.06 mlp_w1:H=0.8582,top10E=0.17,eRank=357.2,q75/q25=8.54 mlp_w2:H=0.8621,top10E=0.16,eRank=358.5,q75/q25=12.85 vo_prod:H=0.5150,top10E=0.65,eRank=66.1,q75/q25=269.34 train_time:663750ms step_avg:94.82ms +[2025-08-22 18:27:16] [Rank 0] step:7001/10000 train_time:663770ms step_avg:94.81ms +[2025-08-22 18:27:16] [Rank 0] step:7001/10000 train_time:663770ms step_avg:94.81ms +[2025-08-22 18:27:18] [Rank 0] step:7021/10000 train_time:665744ms step_avg:94.82ms +[2025-08-22 18:27:18] [Rank 0] step:7021/10000 train_time:665744ms step_avg:94.82ms +[2025-08-22 18:27:20] [Rank 0] step:7041/10000 train_time:667726ms step_avg:94.83ms +[2025-08-22 18:27:20] [Rank 0] step:7041/10000 train_time:667726ms step_avg:94.83ms +[2025-08-22 18:27:22] [Rank 0] step:7061/10000 train_time:669710ms step_avg:94.85ms +[2025-08-22 18:27:22] [Rank 0] step:7061/10000 train_time:669710ms step_avg:94.85ms +[2025-08-22 18:27:24] [Rank 0] step:7081/10000 train_time:671692ms step_avg:94.86ms +[2025-08-22 18:27:24] [Rank 0] step:7081/10000 train_time:671692ms step_avg:94.86ms +[2025-08-22 18:27:26] [Rank 0] step:7101/10000 train_time:673687ms step_avg:94.87ms +[2025-08-22 18:27:26] [Rank 0] step:7101/10000 train_time:673687ms step_avg:94.87ms +[2025-08-22 18:27:28] [Rank 0] step:7121/10000 train_time:675671ms step_avg:94.88ms +[2025-08-22 18:27:28] [Rank 0] step:7121/10000 train_time:675671ms step_avg:94.88ms +[2025-08-22 18:27:30] [Rank 0] step:7141/10000 train_time:677658ms step_avg:94.90ms +[2025-08-22 18:27:30] [Rank 0] step:7141/10000 train_time:677658ms step_avg:94.90ms +[2025-08-22 18:27:32] [Rank 0] step:7161/10000 train_time:679650ms step_avg:94.91ms +[2025-08-22 18:27:32] [Rank 0] step:7161/10000 train_time:679650ms step_avg:94.91ms +[2025-08-22 18:27:34] [Rank 0] step:7181/10000 train_time:681638ms step_avg:94.92ms +[2025-08-22 18:27:34] [Rank 0] step:7181/10000 train_time:681638ms step_avg:94.92ms +[2025-08-22 18:27:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:27:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:27:50] [Rank 0] PRINT: step:7200/10000 val_loss:3.8254 svd_entropy: attn_qk:H=0.5225,top10E=0.62,eRank=63.7,q75/q25=58.18 attn_vo:H=0.7077,top10E=0.38,eRank=182.3,q75/q25=25.04 mlp_w1:H=0.8573,top10E=0.17,eRank=355.9,q75/q25=8.62 mlp_w2:H=0.8619,top10E=0.16,eRank=358.1,q75/q25=13.00 vo_prod:H=0.5167,top10E=0.64,eRank=66.4,q75/q25=270.62 train_time:683636ms step_avg:94.95ms +[2025-08-22 18:27:50] [Rank 0] PRINT: step:7200/10000 val_loss:3.8254 svd_entropy: attn_qk:H=0.5225,top10E=0.62,eRank=63.7,q75/q25=58.18 attn_vo:H=0.7077,top10E=0.38,eRank=182.3,q75/q25=25.04 mlp_w1:H=0.8573,top10E=0.17,eRank=355.9,q75/q25=8.62 mlp_w2:H=0.8619,top10E=0.16,eRank=358.1,q75/q25=13.00 vo_prod:H=0.5167,top10E=0.64,eRank=66.4,q75/q25=270.62 train_time:683636ms step_avg:94.95ms +[2025-08-22 18:27:50] [Rank 0] step:7201/10000 train_time:683657ms step_avg:94.94ms +[2025-08-22 18:27:50] [Rank 0] step:7201/10000 train_time:683657ms step_avg:94.94ms +[2025-08-22 18:27:52] [Rank 0] step:7221/10000 train_time:685643ms step_avg:94.95ms +[2025-08-22 18:27:52] [Rank 0] step:7221/10000 train_time:685643ms step_avg:94.95ms +[2025-08-22 18:27:54] [Rank 0] step:7241/10000 train_time:687625ms step_avg:94.96ms +[2025-08-22 18:27:54] [Rank 0] step:7241/10000 train_time:687625ms step_avg:94.96ms +[2025-08-22 18:27:56] [Rank 0] step:7261/10000 train_time:689601ms step_avg:94.97ms +[2025-08-22 18:27:56] [Rank 0] step:7261/10000 train_time:689601ms step_avg:94.97ms +[2025-08-22 18:27:58] [Rank 0] step:7281/10000 train_time:691591ms step_avg:94.99ms +[2025-08-22 18:27:58] [Rank 0] step:7281/10000 train_time:691591ms step_avg:94.99ms +[2025-08-22 18:28:00] [Rank 0] step:7301/10000 train_time:693573ms step_avg:95.00ms +[2025-08-22 18:28:00] [Rank 0] step:7301/10000 train_time:693573ms step_avg:95.00ms +[2025-08-22 18:28:02] [Rank 0] step:7321/10000 train_time:695572ms step_avg:95.01ms +[2025-08-22 18:28:02] [Rank 0] step:7321/10000 train_time:695572ms step_avg:95.01ms +[2025-08-22 18:28:04] [Rank 0] step:7341/10000 train_time:697559ms step_avg:95.02ms +[2025-08-22 18:28:04] [Rank 0] step:7341/10000 train_time:697559ms step_avg:95.02ms +[2025-08-22 18:28:06] [Rank 0] step:7361/10000 train_time:699554ms step_avg:95.04ms +[2025-08-22 18:28:06] [Rank 0] step:7361/10000 train_time:699554ms step_avg:95.04ms +[2025-08-22 18:28:08] [Rank 0] step:7381/10000 train_time:701546ms step_avg:95.05ms +[2025-08-22 18:28:08] [Rank 0] step:7381/10000 train_time:701546ms step_avg:95.05ms +[2025-08-22 18:28:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:28:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:28:23] [Rank 0] PRINT: step:7400/10000 val_loss:3.8085 svd_entropy: attn_qk:H=0.5210,top10E=0.62,eRank=63.7,q75/q25=57.86 attn_vo:H=0.7090,top10E=0.37,eRank=182.6,q75/q25=25.12 mlp_w1:H=0.8570,top10E=0.17,eRank=355.3,q75/q25=8.68 mlp_w2:H=0.8617,top10E=0.16,eRank=357.8,q75/q25=13.07 vo_prod:H=0.5184,top10E=0.64,eRank=66.7,q75/q25=272.53 train_time:703524ms step_avg:95.07ms +[2025-08-22 18:28:23] [Rank 0] PRINT: step:7400/10000 val_loss:3.8085 svd_entropy: attn_qk:H=0.5210,top10E=0.62,eRank=63.7,q75/q25=57.86 attn_vo:H=0.7090,top10E=0.37,eRank=182.6,q75/q25=25.12 mlp_w1:H=0.8570,top10E=0.17,eRank=355.3,q75/q25=8.68 mlp_w2:H=0.8617,top10E=0.16,eRank=357.8,q75/q25=13.07 vo_prod:H=0.5184,top10E=0.64,eRank=66.7,q75/q25=272.53 train_time:703524ms step_avg:95.07ms +[2025-08-22 18:28:24] [Rank 0] step:7401/10000 train_time:703543ms step_avg:95.06ms +[2025-08-22 18:28:24] [Rank 0] step:7401/10000 train_time:703543ms step_avg:95.06ms +[2025-08-22 18:28:26] [Rank 0] step:7421/10000 train_time:705536ms step_avg:95.07ms +[2025-08-22 18:28:26] [Rank 0] step:7421/10000 train_time:705536ms step_avg:95.07ms +[2025-08-22 18:28:28] [Rank 0] step:7441/10000 train_time:707515ms step_avg:95.08ms +[2025-08-22 18:28:28] [Rank 0] step:7441/10000 train_time:707515ms step_avg:95.08ms +[2025-08-22 18:28:29] [Rank 0] step:7461/10000 train_time:709497ms step_avg:95.09ms +[2025-08-22 18:28:29] [Rank 0] step:7461/10000 train_time:709497ms step_avg:95.09ms +[2025-08-22 18:28:31] [Rank 0] step:7481/10000 train_time:711486ms step_avg:95.11ms +[2025-08-22 18:28:31] [Rank 0] step:7481/10000 train_time:711486ms step_avg:95.11ms +[2025-08-22 18:28:33] [Rank 0] step:7501/10000 train_time:713474ms step_avg:95.12ms +[2025-08-22 18:28:33] [Rank 0] step:7501/10000 train_time:713474ms step_avg:95.12ms +[2025-08-22 18:28:35] [Rank 0] step:7521/10000 train_time:715465ms step_avg:95.13ms +[2025-08-22 18:28:35] [Rank 0] step:7521/10000 train_time:715465ms step_avg:95.13ms +[2025-08-22 18:28:37] [Rank 0] step:7541/10000 train_time:717460ms step_avg:95.14ms +[2025-08-22 18:28:37] [Rank 0] step:7541/10000 train_time:717460ms step_avg:95.14ms +[2025-08-22 18:28:39] [Rank 0] step:7561/10000 train_time:719437ms step_avg:95.15ms +[2025-08-22 18:28:39] [Rank 0] step:7561/10000 train_time:719437ms step_avg:95.15ms +[2025-08-22 18:28:41] [Rank 0] step:7581/10000 train_time:721434ms step_avg:95.16ms +[2025-08-22 18:28:41] [Rank 0] step:7581/10000 train_time:721434ms step_avg:95.16ms +[2025-08-22 18:28:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:28:43] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:28:57] [Rank 0] PRINT: step:7600/10000 val_loss:3.7985 svd_entropy: attn_qk:H=0.5209,top10E=0.62,eRank=63.9,q75/q25=57.85 attn_vo:H=0.7097,top10E=0.37,eRank=182.9,q75/q25=25.11 mlp_w1:H=0.8566,top10E=0.17,eRank=354.8,q75/q25=8.72 mlp_w2:H=0.8614,top10E=0.16,eRank=357.4,q75/q25=13.11 vo_prod:H=0.5204,top10E=0.64,eRank=67.0,q75/q25=271.50 train_time:723437ms step_avg:95.19ms +[2025-08-22 18:28:57] [Rank 0] PRINT: step:7600/10000 val_loss:3.7985 svd_entropy: attn_qk:H=0.5209,top10E=0.62,eRank=63.9,q75/q25=57.85 attn_vo:H=0.7097,top10E=0.37,eRank=182.9,q75/q25=25.11 mlp_w1:H=0.8566,top10E=0.17,eRank=354.8,q75/q25=8.72 mlp_w2:H=0.8614,top10E=0.16,eRank=357.4,q75/q25=13.11 vo_prod:H=0.5204,top10E=0.64,eRank=67.0,q75/q25=271.50 train_time:723437ms step_avg:95.19ms +[2025-08-22 18:28:57] [Rank 0] step:7601/10000 train_time:723457ms step_avg:95.18ms +[2025-08-22 18:28:57] [Rank 0] step:7601/10000 train_time:723457ms step_avg:95.18ms +[2025-08-22 18:28:59] [Rank 0] step:7621/10000 train_time:725440ms step_avg:95.19ms +[2025-08-22 18:28:59] [Rank 0] step:7621/10000 train_time:725440ms step_avg:95.19ms +[2025-08-22 18:29:01] [Rank 0] step:7641/10000 train_time:727422ms step_avg:95.20ms +[2025-08-22 18:29:01] [Rank 0] step:7641/10000 train_time:727422ms step_avg:95.20ms +[2025-08-22 18:29:03] [Rank 0] step:7661/10000 train_time:729407ms step_avg:95.21ms +[2025-08-22 18:29:03] [Rank 0] step:7661/10000 train_time:729407ms step_avg:95.21ms +[2025-08-22 18:29:05] [Rank 0] step:7681/10000 train_time:731387ms step_avg:95.22ms +[2025-08-22 18:29:05] [Rank 0] step:7681/10000 train_time:731387ms step_avg:95.22ms +[2025-08-22 18:29:07] [Rank 0] step:7701/10000 train_time:733369ms step_avg:95.23ms +[2025-08-22 18:29:07] [Rank 0] step:7701/10000 train_time:733369ms step_avg:95.23ms +[2025-08-22 18:29:09] [Rank 0] step:7721/10000 train_time:735364ms step_avg:95.24ms +[2025-08-22 18:29:09] [Rank 0] step:7721/10000 train_time:735364ms step_avg:95.24ms +[2025-08-22 18:29:11] [Rank 0] step:7741/10000 train_time:737351ms step_avg:95.25ms +[2025-08-22 18:29:11] [Rank 0] step:7741/10000 train_time:737351ms step_avg:95.25ms +[2025-08-22 18:29:13] [Rank 0] step:7761/10000 train_time:739345ms step_avg:95.26ms +[2025-08-22 18:29:13] [Rank 0] step:7761/10000 train_time:739345ms step_avg:95.26ms +[2025-08-22 18:29:15] [Rank 0] step:7781/10000 train_time:741333ms step_avg:95.27ms +[2025-08-22 18:29:15] [Rank 0] step:7781/10000 train_time:741333ms step_avg:95.27ms +[2025-08-22 18:29:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:29:17] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:29:31] [Rank 0] PRINT: step:7800/10000 val_loss:3.7874 svd_entropy: attn_qk:H=0.5195,top10E=0.62,eRank=64.0,q75/q25=57.88 attn_vo:H=0.7105,top10E=0.37,eRank=183.0,q75/q25=25.14 mlp_w1:H=0.8562,top10E=0.17,eRank=354.2,q75/q25=8.77 mlp_w2:H=0.8612,top10E=0.16,eRank=357.2,q75/q25=13.21 vo_prod:H=0.5218,top10E=0.64,eRank=67.4,q75/q25=272.74 train_time:743336ms step_avg:95.30ms +[2025-08-22 18:29:31] [Rank 0] PRINT: step:7800/10000 val_loss:3.7874 svd_entropy: attn_qk:H=0.5195,top10E=0.62,eRank=64.0,q75/q25=57.88 attn_vo:H=0.7105,top10E=0.37,eRank=183.0,q75/q25=25.14 mlp_w1:H=0.8562,top10E=0.17,eRank=354.2,q75/q25=8.77 mlp_w2:H=0.8612,top10E=0.16,eRank=357.2,q75/q25=13.21 vo_prod:H=0.5218,top10E=0.64,eRank=67.4,q75/q25=272.74 train_time:743336ms step_avg:95.30ms +[2025-08-22 18:29:31] [Rank 0] step:7801/10000 train_time:743357ms step_avg:95.29ms +[2025-08-22 18:29:31] [Rank 0] step:7801/10000 train_time:743357ms step_avg:95.29ms +[2025-08-22 18:29:33] [Rank 0] step:7821/10000 train_time:745325ms step_avg:95.30ms +[2025-08-22 18:29:33] [Rank 0] step:7821/10000 train_time:745325ms step_avg:95.30ms +[2025-08-22 18:29:35] [Rank 0] step:7841/10000 train_time:747301ms step_avg:95.31ms +[2025-08-22 18:29:35] [Rank 0] step:7841/10000 train_time:747301ms step_avg:95.31ms +[2025-08-22 18:29:37] [Rank 0] step:7861/10000 train_time:749290ms step_avg:95.32ms +[2025-08-22 18:29:37] [Rank 0] step:7861/10000 train_time:749290ms step_avg:95.32ms +[2025-08-22 18:29:39] [Rank 0] step:7881/10000 train_time:751283ms step_avg:95.33ms +[2025-08-22 18:29:39] [Rank 0] step:7881/10000 train_time:751283ms step_avg:95.33ms +[2025-08-22 18:29:41] [Rank 0] step:7901/10000 train_time:753266ms step_avg:95.34ms +[2025-08-22 18:29:41] [Rank 0] step:7901/10000 train_time:753266ms step_avg:95.34ms +[2025-08-22 18:29:43] [Rank 0] step:7921/10000 train_time:755260ms step_avg:95.35ms +[2025-08-22 18:29:43] [Rank 0] step:7921/10000 train_time:755260ms step_avg:95.35ms +[2025-08-22 18:29:45] [Rank 0] step:7941/10000 train_time:757257ms step_avg:95.36ms +[2025-08-22 18:29:45] [Rank 0] step:7941/10000 train_time:757257ms step_avg:95.36ms +[2025-08-22 18:29:47] [Rank 0] step:7961/10000 train_time:759248ms step_avg:95.37ms +[2025-08-22 18:29:47] [Rank 0] step:7961/10000 train_time:759248ms step_avg:95.37ms +[2025-08-22 18:29:49] [Rank 0] step:7981/10000 train_time:761230ms step_avg:95.38ms +[2025-08-22 18:29:49] [Rank 0] step:7981/10000 train_time:761230ms step_avg:95.38ms +[2025-08-22 18:29:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:29:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:30:04] [Rank 0] PRINT: step:8000/10000 val_loss:3.7712 svd_entropy: attn_qk:H=0.5189,top10E=0.62,eRank=64.1,q75/q25=58.31 attn_vo:H=0.7112,top10E=0.37,eRank=183.3,q75/q25=25.11 mlp_w1:H=0.8560,top10E=0.17,eRank=353.7,q75/q25=8.81 mlp_w2:H=0.8611,top10E=0.16,eRank=357.0,q75/q25=13.33 vo_prod:H=0.5231,top10E=0.64,eRank=67.4,q75/q25=273.74 train_time:763296ms step_avg:95.41ms +[2025-08-22 18:30:04] [Rank 0] PRINT: step:8000/10000 val_loss:3.7712 svd_entropy: attn_qk:H=0.5189,top10E=0.62,eRank=64.1,q75/q25=58.31 attn_vo:H=0.7112,top10E=0.37,eRank=183.3,q75/q25=25.11 mlp_w1:H=0.8560,top10E=0.17,eRank=353.7,q75/q25=8.81 mlp_w2:H=0.8611,top10E=0.16,eRank=357.0,q75/q25=13.33 vo_prod:H=0.5231,top10E=0.64,eRank=67.4,q75/q25=273.74 train_time:763296ms step_avg:95.41ms +[2025-08-22 18:30:04] [Rank 0] step:8001/10000 train_time:763316ms step_avg:95.40ms +[2025-08-22 18:30:04] [Rank 0] step:8001/10000 train_time:763316ms step_avg:95.40ms +[2025-08-22 18:30:06] [Rank 0] step:8021/10000 train_time:765306ms step_avg:95.41ms +[2025-08-22 18:30:06] [Rank 0] step:8021/10000 train_time:765306ms step_avg:95.41ms +[2025-08-22 18:30:08] [Rank 0] step:8041/10000 train_time:767302ms step_avg:95.42ms +[2025-08-22 18:30:08] [Rank 0] step:8041/10000 train_time:767302ms step_avg:95.42ms +[2025-08-22 18:30:10] [Rank 0] step:8061/10000 train_time:769294ms step_avg:95.43ms +[2025-08-22 18:30:10] [Rank 0] step:8061/10000 train_time:769294ms step_avg:95.43ms +[2025-08-22 18:30:12] [Rank 0] step:8081/10000 train_time:771276ms step_avg:95.44ms +[2025-08-22 18:30:12] [Rank 0] step:8081/10000 train_time:771276ms step_avg:95.44ms +[2025-08-22 18:30:14] [Rank 0] step:8101/10000 train_time:773275ms step_avg:95.45ms +[2025-08-22 18:30:14] [Rank 0] step:8101/10000 train_time:773275ms step_avg:95.45ms +[2025-08-22 18:30:16] [Rank 0] step:8121/10000 train_time:775266ms step_avg:95.46ms +[2025-08-22 18:30:16] [Rank 0] step:8121/10000 train_time:775266ms step_avg:95.46ms +[2025-08-22 18:30:19] [Rank 0] step:8141/10000 train_time:777929ms step_avg:95.56ms +[2025-08-22 18:30:19] [Rank 0] step:8141/10000 train_time:777929ms step_avg:95.56ms +[2025-08-22 18:30:21] [Rank 0] step:8161/10000 train_time:779940ms step_avg:95.57ms +[2025-08-22 18:30:21] [Rank 0] step:8161/10000 train_time:779940ms step_avg:95.57ms +[2025-08-22 18:30:23] [Rank 0] step:8181/10000 train_time:781963ms step_avg:95.58ms +[2025-08-22 18:30:23] [Rank 0] step:8181/10000 train_time:781963ms step_avg:95.58ms +[2025-08-22 18:30:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:30:25] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:30:39] [Rank 0] PRINT: step:8200/10000 val_loss:3.7611 svd_entropy: attn_qk:H=0.5188,top10E=0.62,eRank=64.3,q75/q25=58.41 attn_vo:H=0.7120,top10E=0.37,eRank=183.6,q75/q25=25.10 mlp_w1:H=0.8557,top10E=0.17,eRank=353.4,q75/q25=8.85 mlp_w2:H=0.8610,top10E=0.16,eRank=356.8,q75/q25=13.47 vo_prod:H=0.5244,top10E=0.64,eRank=67.6,q75/q25=273.68 train_time:784012ms step_avg:95.61ms +[2025-08-22 18:30:39] [Rank 0] PRINT: step:8200/10000 val_loss:3.7611 svd_entropy: attn_qk:H=0.5188,top10E=0.62,eRank=64.3,q75/q25=58.41 attn_vo:H=0.7120,top10E=0.37,eRank=183.6,q75/q25=25.10 mlp_w1:H=0.8557,top10E=0.17,eRank=353.4,q75/q25=8.85 mlp_w2:H=0.8610,top10E=0.16,eRank=356.8,q75/q25=13.47 vo_prod:H=0.5244,top10E=0.64,eRank=67.6,q75/q25=273.68 train_time:784012ms step_avg:95.61ms +[2025-08-22 18:30:39] [Rank 0] step:8201/10000 train_time:784032ms step_avg:95.60ms +[2025-08-22 18:30:39] [Rank 0] step:8201/10000 train_time:784032ms step_avg:95.60ms +[2025-08-22 18:30:41] [Rank 0] step:8221/10000 train_time:786042ms step_avg:95.61ms +[2025-08-22 18:30:41] [Rank 0] step:8221/10000 train_time:786042ms step_avg:95.61ms +[2025-08-22 18:30:43] [Rank 0] step:8241/10000 train_time:788061ms step_avg:95.63ms +[2025-08-22 18:30:43] [Rank 0] step:8241/10000 train_time:788061ms step_avg:95.63ms +[2025-08-22 18:30:45] [Rank 0] step:8261/10000 train_time:790083ms step_avg:95.64ms +[2025-08-22 18:30:45] [Rank 0] step:8261/10000 train_time:790083ms step_avg:95.64ms +[2025-08-22 18:30:47] [Rank 0] step:8281/10000 train_time:792093ms step_avg:95.65ms +[2025-08-22 18:30:47] [Rank 0] step:8281/10000 train_time:792093ms step_avg:95.65ms +[2025-08-22 18:30:49] [Rank 0] step:8301/10000 train_time:794106ms step_avg:95.66ms +[2025-08-22 18:30:49] [Rank 0] step:8301/10000 train_time:794106ms step_avg:95.66ms +[2025-08-22 18:30:51] [Rank 0] step:8321/10000 train_time:796117ms step_avg:95.68ms +[2025-08-22 18:30:51] [Rank 0] step:8321/10000 train_time:796117ms step_avg:95.68ms +[2025-08-22 18:30:53] [Rank 0] step:8341/10000 train_time:798215ms step_avg:95.70ms +[2025-08-22 18:30:53] [Rank 0] step:8341/10000 train_time:798215ms step_avg:95.70ms +[2025-08-22 18:30:55] [Rank 0] step:8361/10000 train_time:800306ms step_avg:95.72ms +[2025-08-22 18:30:55] [Rank 0] step:8361/10000 train_time:800306ms step_avg:95.72ms +[2025-08-22 18:30:57] [Rank 0] step:8381/10000 train_time:802320ms step_avg:95.73ms +[2025-08-22 18:30:57] [Rank 0] step:8381/10000 train_time:802320ms step_avg:95.73ms +[2025-08-22 18:30:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:30:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:31:13] [Rank 0] PRINT: step:8400/10000 val_loss:3.7497 svd_entropy: attn_qk:H=0.5180,top10E=0.62,eRank=64.4,q75/q25=58.61 attn_vo:H=0.7127,top10E=0.37,eRank=183.9,q75/q25=25.12 mlp_w1:H=0.8555,top10E=0.17,eRank=353.0,q75/q25=8.88 mlp_w2:H=0.8608,top10E=0.16,eRank=356.7,q75/q25=13.51 vo_prod:H=0.5254,top10E=0.63,eRank=67.8,q75/q25=273.18 train_time:804340ms step_avg:95.75ms +[2025-08-22 18:31:13] [Rank 0] PRINT: step:8400/10000 val_loss:3.7497 svd_entropy: attn_qk:H=0.5180,top10E=0.62,eRank=64.4,q75/q25=58.61 attn_vo:H=0.7127,top10E=0.37,eRank=183.9,q75/q25=25.12 mlp_w1:H=0.8555,top10E=0.17,eRank=353.0,q75/q25=8.88 mlp_w2:H=0.8608,top10E=0.16,eRank=356.7,q75/q25=13.51 vo_prod:H=0.5254,top10E=0.63,eRank=67.8,q75/q25=273.18 train_time:804340ms step_avg:95.75ms +[2025-08-22 18:31:13] [Rank 0] step:8401/10000 train_time:804360ms step_avg:95.75ms +[2025-08-22 18:31:13] [Rank 0] step:8401/10000 train_time:804360ms step_avg:95.75ms +[2025-08-22 18:31:15] [Rank 0] step:8421/10000 train_time:806362ms step_avg:95.76ms +[2025-08-22 18:31:15] [Rank 0] step:8421/10000 train_time:806362ms step_avg:95.76ms +[2025-08-22 18:31:17] [Rank 0] step:8441/10000 train_time:808372ms step_avg:95.77ms +[2025-08-22 18:31:17] [Rank 0] step:8441/10000 train_time:808372ms step_avg:95.77ms +[2025-08-22 18:31:19] [Rank 0] step:8461/10000 train_time:810381ms step_avg:95.78ms +[2025-08-22 18:31:19] [Rank 0] step:8461/10000 train_time:810381ms step_avg:95.78ms +[2025-08-22 18:31:21] [Rank 0] step:8481/10000 train_time:812395ms step_avg:95.79ms +[2025-08-22 18:31:21] [Rank 0] step:8481/10000 train_time:812395ms step_avg:95.79ms +[2025-08-22 18:31:23] [Rank 0] step:8501/10000 train_time:814431ms step_avg:95.80ms +[2025-08-22 18:31:23] [Rank 0] step:8501/10000 train_time:814431ms step_avg:95.80ms +[2025-08-22 18:31:25] [Rank 0] step:8521/10000 train_time:816451ms step_avg:95.82ms +[2025-08-22 18:31:25] [Rank 0] step:8521/10000 train_time:816451ms step_avg:95.82ms +[2025-08-22 18:31:27] [Rank 0] step:8541/10000 train_time:818482ms step_avg:95.83ms +[2025-08-22 18:31:27] [Rank 0] step:8541/10000 train_time:818482ms step_avg:95.83ms +[2025-08-22 18:31:29] [Rank 0] step:8561/10000 train_time:820503ms step_avg:95.84ms +[2025-08-22 18:31:29] [Rank 0] step:8561/10000 train_time:820503ms step_avg:95.84ms +[2025-08-22 18:31:31] [Rank 0] step:8581/10000 train_time:822520ms step_avg:95.85ms +[2025-08-22 18:31:31] [Rank 0] step:8581/10000 train_time:822520ms step_avg:95.85ms +[2025-08-22 18:31:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:31:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:31:47] [Rank 0] PRINT: step:8600/10000 val_loss:3.7411 svd_entropy: attn_qk:H=0.5185,top10E=0.62,eRank=64.6,q75/q25=58.67 attn_vo:H=0.7134,top10E=0.37,eRank=184.2,q75/q25=25.18 mlp_w1:H=0.8552,top10E=0.17,eRank=352.6,q75/q25=8.91 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.53 vo_prod:H=0.5266,top10E=0.63,eRank=68.1,q75/q25=273.18 train_time:824539ms step_avg:95.88ms +[2025-08-22 18:31:47] [Rank 0] PRINT: step:8600/10000 val_loss:3.7411 svd_entropy: attn_qk:H=0.5185,top10E=0.62,eRank=64.6,q75/q25=58.67 attn_vo:H=0.7134,top10E=0.37,eRank=184.2,q75/q25=25.18 mlp_w1:H=0.8552,top10E=0.17,eRank=352.6,q75/q25=8.91 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.53 vo_prod:H=0.5266,top10E=0.63,eRank=68.1,q75/q25=273.18 train_time:824539ms step_avg:95.88ms +[2025-08-22 18:31:47] [Rank 0] step:8601/10000 train_time:824560ms step_avg:95.87ms +[2025-08-22 18:31:47] [Rank 0] step:8601/10000 train_time:824560ms step_avg:95.87ms +[2025-08-22 18:31:49] [Rank 0] step:8621/10000 train_time:826564ms step_avg:95.88ms +[2025-08-22 18:31:49] [Rank 0] step:8621/10000 train_time:826564ms step_avg:95.88ms +[2025-08-22 18:31:51] [Rank 0] step:8641/10000 train_time:828575ms step_avg:95.89ms +[2025-08-22 18:31:51] [Rank 0] step:8641/10000 train_time:828575ms step_avg:95.89ms +[2025-08-22 18:31:53] [Rank 0] step:8661/10000 train_time:830585ms step_avg:95.90ms +[2025-08-22 18:31:53] [Rank 0] step:8661/10000 train_time:830585ms step_avg:95.90ms +[2025-08-22 18:31:55] [Rank 0] step:8681/10000 train_time:832683ms step_avg:95.92ms +[2025-08-22 18:31:55] [Rank 0] step:8681/10000 train_time:832683ms step_avg:95.92ms +[2025-08-22 18:31:57] [Rank 0] step:8701/10000 train_time:834796ms step_avg:95.94ms +[2025-08-22 18:31:57] [Rank 0] step:8701/10000 train_time:834796ms step_avg:95.94ms +[2025-08-22 18:31:59] [Rank 0] step:8721/10000 train_time:836823ms step_avg:95.95ms +[2025-08-22 18:31:59] [Rank 0] step:8721/10000 train_time:836823ms step_avg:95.95ms +[2025-08-22 18:32:01] [Rank 0] step:8741/10000 train_time:838835ms step_avg:95.97ms +[2025-08-22 18:32:01] [Rank 0] step:8741/10000 train_time:838835ms step_avg:95.97ms +[2025-08-22 18:32:03] [Rank 0] step:8761/10000 train_time:840854ms step_avg:95.98ms +[2025-08-22 18:32:03] [Rank 0] step:8761/10000 train_time:840854ms step_avg:95.98ms +[2025-08-22 18:32:05] [Rank 0] step:8781/10000 train_time:842874ms step_avg:95.99ms +[2025-08-22 18:32:05] [Rank 0] step:8781/10000 train_time:842874ms step_avg:95.99ms +[2025-08-22 18:32:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:32:07] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:32:21] [Rank 0] PRINT: step:8800/10000 val_loss:3.7294 svd_entropy: attn_qk:H=0.5183,top10E=0.62,eRank=64.7,q75/q25=58.75 attn_vo:H=0.7140,top10E=0.36,eRank=184.5,q75/q25=25.13 mlp_w1:H=0.8551,top10E=0.17,eRank=352.4,q75/q25=8.92 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.53 vo_prod:H=0.5278,top10E=0.63,eRank=68.4,q75/q25=274.23 train_time:844901ms step_avg:96.01ms +[2025-08-22 18:32:21] [Rank 0] PRINT: step:8800/10000 val_loss:3.7294 svd_entropy: attn_qk:H=0.5183,top10E=0.62,eRank=64.7,q75/q25=58.75 attn_vo:H=0.7140,top10E=0.36,eRank=184.5,q75/q25=25.13 mlp_w1:H=0.8551,top10E=0.17,eRank=352.4,q75/q25=8.92 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.53 vo_prod:H=0.5278,top10E=0.63,eRank=68.4,q75/q25=274.23 train_time:844901ms step_avg:96.01ms +[2025-08-22 18:32:21] [Rank 0] step:8801/10000 train_time:844922ms step_avg:96.00ms +[2025-08-22 18:32:21] [Rank 0] step:8801/10000 train_time:844922ms step_avg:96.00ms +[2025-08-22 18:32:23] [Rank 0] step:8821/10000 train_time:846923ms step_avg:96.01ms +[2025-08-22 18:32:23] [Rank 0] step:8821/10000 train_time:846923ms step_avg:96.01ms +[2025-08-22 18:32:25] [Rank 0] step:8841/10000 train_time:848959ms step_avg:96.03ms +[2025-08-22 18:32:25] [Rank 0] step:8841/10000 train_time:848959ms step_avg:96.03ms +[2025-08-22 18:32:27] [Rank 0] step:8861/10000 train_time:850973ms step_avg:96.04ms +[2025-08-22 18:32:27] [Rank 0] step:8861/10000 train_time:850973ms step_avg:96.04ms +[2025-08-22 18:32:29] [Rank 0] step:8881/10000 train_time:852992ms step_avg:96.05ms +[2025-08-22 18:32:29] [Rank 0] step:8881/10000 train_time:852992ms step_avg:96.05ms +[2025-08-22 18:32:31] [Rank 0] step:8901/10000 train_time:855015ms step_avg:96.06ms +[2025-08-22 18:32:31] [Rank 0] step:8901/10000 train_time:855015ms step_avg:96.06ms +[2025-08-22 18:32:33] [Rank 0] step:8921/10000 train_time:857050ms step_avg:96.07ms +[2025-08-22 18:32:33] [Rank 0] step:8921/10000 train_time:857050ms step_avg:96.07ms +[2025-08-22 18:32:35] [Rank 0] step:8941/10000 train_time:859077ms step_avg:96.08ms +[2025-08-22 18:32:35] [Rank 0] step:8941/10000 train_time:859077ms step_avg:96.08ms +[2025-08-22 18:32:37] [Rank 0] step:8961/10000 train_time:861094ms step_avg:96.09ms +[2025-08-22 18:32:37] [Rank 0] step:8961/10000 train_time:861094ms step_avg:96.09ms +[2025-08-22 18:32:39] [Rank 0] step:8981/10000 train_time:863117ms step_avg:96.10ms +[2025-08-22 18:32:39] [Rank 0] step:8981/10000 train_time:863117ms step_avg:96.10ms +[2025-08-22 18:32:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:32:41] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:32:55] [Rank 0] PRINT: step:9000/10000 val_loss:3.7197 svd_entropy: attn_qk:H=0.5186,top10E=0.62,eRank=64.8,q75/q25=58.70 attn_vo:H=0.7145,top10E=0.36,eRank=184.7,q75/q25=25.13 mlp_w1:H=0.8550,top10E=0.17,eRank=352.2,q75/q25=8.95 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.54 vo_prod:H=0.5289,top10E=0.63,eRank=68.6,q75/q25=275.06 train_time:865142ms step_avg:96.13ms +[2025-08-22 18:32:55] [Rank 0] PRINT: step:9000/10000 val_loss:3.7197 svd_entropy: attn_qk:H=0.5186,top10E=0.62,eRank=64.8,q75/q25=58.70 attn_vo:H=0.7145,top10E=0.36,eRank=184.7,q75/q25=25.13 mlp_w1:H=0.8550,top10E=0.17,eRank=352.2,q75/q25=8.95 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.54 vo_prod:H=0.5289,top10E=0.63,eRank=68.6,q75/q25=275.06 train_time:865142ms step_avg:96.13ms +[2025-08-22 18:32:55] [Rank 0] step:9001/10000 train_time:865162ms step_avg:96.12ms +[2025-08-22 18:32:55] [Rank 0] step:9001/10000 train_time:865162ms step_avg:96.12ms +[2025-08-22 18:32:57] [Rank 0] step:9021/10000 train_time:867186ms step_avg:96.13ms +[2025-08-22 18:32:57] [Rank 0] step:9021/10000 train_time:867186ms step_avg:96.13ms +[2025-08-22 18:33:00] [Rank 0] step:9041/10000 train_time:869267ms step_avg:96.15ms +[2025-08-22 18:33:00] [Rank 0] step:9041/10000 train_time:869267ms step_avg:96.15ms +[2025-08-22 18:33:02] [Rank 0] step:9061/10000 train_time:871345ms step_avg:96.16ms +[2025-08-22 18:33:02] [Rank 0] step:9061/10000 train_time:871345ms step_avg:96.16ms +[2025-08-22 18:33:04] [Rank 0] step:9081/10000 train_time:873370ms step_avg:96.18ms +[2025-08-22 18:33:04] [Rank 0] step:9081/10000 train_time:873370ms step_avg:96.18ms +[2025-08-22 18:33:06] [Rank 0] step:9101/10000 train_time:875399ms step_avg:96.19ms +[2025-08-22 18:33:06] [Rank 0] step:9101/10000 train_time:875399ms step_avg:96.19ms +[2025-08-22 18:33:08] [Rank 0] step:9121/10000 train_time:877418ms step_avg:96.20ms +[2025-08-22 18:33:08] [Rank 0] step:9121/10000 train_time:877418ms step_avg:96.20ms +[2025-08-22 18:33:10] [Rank 0] step:9141/10000 train_time:879424ms step_avg:96.21ms +[2025-08-22 18:33:10] [Rank 0] step:9141/10000 train_time:879424ms step_avg:96.21ms +[2025-08-22 18:33:12] [Rank 0] step:9161/10000 train_time:881437ms step_avg:96.22ms +[2025-08-22 18:33:12] [Rank 0] step:9161/10000 train_time:881437ms step_avg:96.22ms +[2025-08-22 18:33:14] [Rank 0] step:9181/10000 train_time:883490ms step_avg:96.23ms +[2025-08-22 18:33:14] [Rank 0] step:9181/10000 train_time:883490ms step_avg:96.23ms +[2025-08-22 18:33:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:33:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:33:29] [Rank 0] PRINT: step:9200/10000 val_loss:3.7119 svd_entropy: attn_qk:H=0.5187,top10E=0.62,eRank=64.9,q75/q25=58.88 attn_vo:H=0.7149,top10E=0.36,eRank=184.8,q75/q25=25.13 mlp_w1:H=0.8548,top10E=0.17,eRank=351.9,q75/q25=8.98 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.60 vo_prod:H=0.5297,top10E=0.63,eRank=68.7,q75/q25=274.10 train_time:885509ms step_avg:96.25ms +[2025-08-22 18:33:29] [Rank 0] PRINT: step:9200/10000 val_loss:3.7119 svd_entropy: attn_qk:H=0.5187,top10E=0.62,eRank=64.9,q75/q25=58.88 attn_vo:H=0.7149,top10E=0.36,eRank=184.8,q75/q25=25.13 mlp_w1:H=0.8548,top10E=0.17,eRank=351.9,q75/q25=8.98 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.60 vo_prod:H=0.5297,top10E=0.63,eRank=68.7,q75/q25=274.10 train_time:885509ms step_avg:96.25ms +[2025-08-22 18:33:29] [Rank 0] step:9201/10000 train_time:885532ms step_avg:96.24ms +[2025-08-22 18:33:29] [Rank 0] step:9201/10000 train_time:885532ms step_avg:96.24ms +[2025-08-22 18:33:31] [Rank 0] step:9221/10000 train_time:887548ms step_avg:96.25ms +[2025-08-22 18:33:31] [Rank 0] step:9221/10000 train_time:887548ms step_avg:96.25ms +[2025-08-22 18:33:34] [Rank 0] step:9241/10000 train_time:889577ms step_avg:96.26ms +[2025-08-22 18:33:34] [Rank 0] step:9241/10000 train_time:889577ms step_avg:96.26ms +[2025-08-22 18:33:36] [Rank 0] step:9261/10000 train_time:891600ms step_avg:96.27ms +[2025-08-22 18:33:36] [Rank 0] step:9261/10000 train_time:891600ms step_avg:96.27ms +[2025-08-22 18:33:38] [Rank 0] step:9281/10000 train_time:893608ms step_avg:96.28ms +[2025-08-22 18:33:38] [Rank 0] step:9281/10000 train_time:893608ms step_avg:96.28ms +[2025-08-22 18:33:40] [Rank 0] step:9301/10000 train_time:895624ms step_avg:96.29ms +[2025-08-22 18:33:40] [Rank 0] step:9301/10000 train_time:895624ms step_avg:96.29ms +[2025-08-22 18:33:42] [Rank 0] step:9321/10000 train_time:897644ms step_avg:96.30ms +[2025-08-22 18:33:42] [Rank 0] step:9321/10000 train_time:897644ms step_avg:96.30ms +[2025-08-22 18:33:44] [Rank 0] step:9341/10000 train_time:899665ms step_avg:96.31ms +[2025-08-22 18:33:44] [Rank 0] step:9341/10000 train_time:899665ms step_avg:96.31ms +[2025-08-22 18:33:46] [Rank 0] step:9361/10000 train_time:901690ms step_avg:96.32ms +[2025-08-22 18:33:46] [Rank 0] step:9361/10000 train_time:901690ms step_avg:96.32ms +[2025-08-22 18:33:48] [Rank 0] step:9381/10000 train_time:903725ms step_avg:96.34ms +[2025-08-22 18:33:48] [Rank 0] step:9381/10000 train_time:903725ms step_avg:96.34ms +[2025-08-22 18:33:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:33:50] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:34:03] [Rank 0] PRINT: step:9400/10000 val_loss:3.7036 svd_entropy: attn_qk:H=0.5192,top10E=0.62,eRank=65.1,q75/q25=58.86 attn_vo:H=0.7154,top10E=0.36,eRank=185.0,q75/q25=25.17 mlp_w1:H=0.8547,top10E=0.17,eRank=351.8,q75/q25=8.98 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.63 vo_prod:H=0.5305,top10E=0.63,eRank=68.8,q75/q25=274.13 train_time:905757ms step_avg:96.36ms +[2025-08-22 18:34:03] [Rank 0] PRINT: step:9400/10000 val_loss:3.7036 svd_entropy: attn_qk:H=0.5192,top10E=0.62,eRank=65.1,q75/q25=58.86 attn_vo:H=0.7154,top10E=0.36,eRank=185.0,q75/q25=25.17 mlp_w1:H=0.8547,top10E=0.17,eRank=351.8,q75/q25=8.98 mlp_w2:H=0.8607,top10E=0.16,eRank=356.5,q75/q25=13.63 vo_prod:H=0.5305,top10E=0.63,eRank=68.8,q75/q25=274.13 train_time:905757ms step_avg:96.36ms +[2025-08-22 18:34:03] [Rank 0] step:9401/10000 train_time:905777ms step_avg:96.35ms +[2025-08-22 18:34:03] [Rank 0] step:9401/10000 train_time:905777ms step_avg:96.35ms +[2025-08-22 18:34:05] [Rank 0] step:9421/10000 train_time:907834ms step_avg:96.36ms +[2025-08-22 18:34:05] [Rank 0] step:9421/10000 train_time:907834ms step_avg:96.36ms +[2025-08-22 18:34:07] [Rank 0] step:9441/10000 train_time:909853ms step_avg:96.37ms +[2025-08-22 18:34:07] [Rank 0] step:9441/10000 train_time:909853ms step_avg:96.37ms +[2025-08-22 18:34:10] [Rank 0] step:9461/10000 train_time:911875ms step_avg:96.38ms +[2025-08-22 18:34:10] [Rank 0] step:9461/10000 train_time:911875ms step_avg:96.38ms +[2025-08-22 18:34:12] [Rank 0] step:9481/10000 train_time:913903ms step_avg:96.39ms +[2025-08-22 18:34:12] [Rank 0] step:9481/10000 train_time:913903ms step_avg:96.39ms +[2025-08-22 18:34:14] [Rank 0] step:9501/10000 train_time:915931ms step_avg:96.40ms +[2025-08-22 18:34:14] [Rank 0] step:9501/10000 train_time:915931ms step_avg:96.40ms +[2025-08-22 18:34:16] [Rank 0] step:9521/10000 train_time:917943ms step_avg:96.41ms +[2025-08-22 18:34:16] [Rank 0] step:9521/10000 train_time:917943ms step_avg:96.41ms +[2025-08-22 18:34:18] [Rank 0] step:9541/10000 train_time:919961ms step_avg:96.42ms +[2025-08-22 18:34:18] [Rank 0] step:9541/10000 train_time:919961ms step_avg:96.42ms +[2025-08-22 18:34:20] [Rank 0] step:9561/10000 train_time:921974ms step_avg:96.43ms +[2025-08-22 18:34:20] [Rank 0] step:9561/10000 train_time:921974ms step_avg:96.43ms +[2025-08-22 18:34:22] [Rank 0] step:9581/10000 train_time:923993ms step_avg:96.44ms +[2025-08-22 18:34:22] [Rank 0] step:9581/10000 train_time:923993ms step_avg:96.44ms +[2025-08-22 18:34:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:34:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:34:37] [Rank 0] PRINT: step:9600/10000 val_loss:3.6951 svd_entropy: attn_qk:H=0.5189,top10E=0.62,eRank=65.2,q75/q25=58.75 attn_vo:H=0.7157,top10E=0.36,eRank=185.1,q75/q25=25.17 mlp_w1:H=0.8547,top10E=0.17,eRank=351.7,q75/q25=9.00 mlp_w2:H=0.8607,top10E=0.16,eRank=356.6,q75/q25=13.64 vo_prod:H=0.5310,top10E=0.63,eRank=68.9,q75/q25=274.54 train_time:926027ms step_avg:96.46ms +[2025-08-22 18:34:37] [Rank 0] PRINT: step:9600/10000 val_loss:3.6951 svd_entropy: attn_qk:H=0.5189,top10E=0.62,eRank=65.2,q75/q25=58.75 attn_vo:H=0.7157,top10E=0.36,eRank=185.1,q75/q25=25.17 mlp_w1:H=0.8547,top10E=0.17,eRank=351.7,q75/q25=9.00 mlp_w2:H=0.8607,top10E=0.16,eRank=356.6,q75/q25=13.64 vo_prod:H=0.5310,top10E=0.63,eRank=68.9,q75/q25=274.54 train_time:926027ms step_avg:96.46ms +[2025-08-22 18:34:38] [Rank 0] step:9601/10000 train_time:926048ms step_avg:96.45ms +[2025-08-22 18:34:38] [Rank 0] step:9601/10000 train_time:926048ms step_avg:96.45ms +[2025-08-22 18:34:40] [Rank 0] step:9621/10000 train_time:928075ms step_avg:96.46ms +[2025-08-22 18:34:40] [Rank 0] step:9621/10000 train_time:928075ms step_avg:96.46ms +[2025-08-22 18:34:42] [Rank 0] step:9641/10000 train_time:930094ms step_avg:96.47ms +[2025-08-22 18:34:42] [Rank 0] step:9641/10000 train_time:930094ms step_avg:96.47ms +[2025-08-22 18:34:44] [Rank 0] step:9661/10000 train_time:932143ms step_avg:96.49ms +[2025-08-22 18:34:44] [Rank 0] step:9661/10000 train_time:932143ms step_avg:96.49ms +[2025-08-22 18:34:46] [Rank 0] step:9681/10000 train_time:934184ms step_avg:96.50ms +[2025-08-22 18:34:46] [Rank 0] step:9681/10000 train_time:934184ms step_avg:96.50ms +[2025-08-22 18:34:48] [Rank 0] step:9701/10000 train_time:936237ms step_avg:96.51ms +[2025-08-22 18:34:48] [Rank 0] step:9701/10000 train_time:936237ms step_avg:96.51ms +[2025-08-22 18:34:50] [Rank 0] step:9721/10000 train_time:938272ms step_avg:96.52ms +[2025-08-22 18:34:50] [Rank 0] step:9721/10000 train_time:938272ms step_avg:96.52ms +[2025-08-22 18:34:52] [Rank 0] step:9741/10000 train_time:940332ms step_avg:96.53ms +[2025-08-22 18:34:52] [Rank 0] step:9741/10000 train_time:940332ms step_avg:96.53ms +[2025-08-22 18:34:54] [Rank 0] step:9761/10000 train_time:942379ms step_avg:96.55ms +[2025-08-22 18:34:54] [Rank 0] step:9761/10000 train_time:942379ms step_avg:96.55ms +[2025-08-22 18:34:56] [Rank 0] step:9781/10000 train_time:944433ms step_avg:96.56ms +[2025-08-22 18:34:56] [Rank 0] step:9781/10000 train_time:944433ms step_avg:96.56ms +[2025-08-22 18:34:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:34:58] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:35:12] [Rank 0] PRINT: step:9800/10000 val_loss:3.6881 svd_entropy: attn_qk:H=0.5191,top10E=0.62,eRank=65.3,q75/q25=58.66 attn_vo:H=0.7159,top10E=0.36,eRank=185.2,q75/q25=25.20 mlp_w1:H=0.8546,top10E=0.17,eRank=351.6,q75/q25=9.01 mlp_w2:H=0.8607,top10E=0.16,eRank=356.6,q75/q25=13.67 vo_prod:H=0.5315,top10E=0.62,eRank=69.0,q75/q25=274.23 train_time:946502ms step_avg:96.58ms +[2025-08-22 18:35:12] [Rank 0] PRINT: step:9800/10000 val_loss:3.6881 svd_entropy: attn_qk:H=0.5191,top10E=0.62,eRank=65.3,q75/q25=58.66 attn_vo:H=0.7159,top10E=0.36,eRank=185.2,q75/q25=25.20 mlp_w1:H=0.8546,top10E=0.17,eRank=351.6,q75/q25=9.01 mlp_w2:H=0.8607,top10E=0.16,eRank=356.6,q75/q25=13.67 vo_prod:H=0.5315,top10E=0.62,eRank=69.0,q75/q25=274.23 train_time:946502ms step_avg:96.58ms +[2025-08-22 18:35:12] [Rank 0] step:9801/10000 train_time:946522ms step_avg:96.57ms +[2025-08-22 18:35:12] [Rank 0] step:9801/10000 train_time:946522ms step_avg:96.57ms +[2025-08-22 18:35:14] [Rank 0] step:9821/10000 train_time:948558ms step_avg:96.58ms +[2025-08-22 18:35:14] [Rank 0] step:9821/10000 train_time:948558ms step_avg:96.58ms +[2025-08-22 18:35:16] [Rank 0] step:9841/10000 train_time:950614ms step_avg:96.60ms +[2025-08-22 18:35:16] [Rank 0] step:9841/10000 train_time:950614ms step_avg:96.60ms +[2025-08-22 18:35:18] [Rank 0] step:9861/10000 train_time:952649ms step_avg:96.61ms +[2025-08-22 18:35:18] [Rank 0] step:9861/10000 train_time:952649ms step_avg:96.61ms +[2025-08-22 18:35:20] [Rank 0] step:9881/10000 train_time:954690ms step_avg:96.62ms +[2025-08-22 18:35:20] [Rank 0] step:9881/10000 train_time:954690ms step_avg:96.62ms +[2025-08-22 18:35:22] [Rank 0] step:9901/10000 train_time:956746ms step_avg:96.63ms +[2025-08-22 18:35:22] [Rank 0] step:9901/10000 train_time:956746ms step_avg:96.63ms +[2025-08-22 18:35:24] [Rank 0] step:9921/10000 train_time:958791ms step_avg:96.64ms +[2025-08-22 18:35:24] [Rank 0] step:9921/10000 train_time:958791ms step_avg:96.64ms +[2025-08-22 18:35:26] [Rank 0] step:9941/10000 train_time:960848ms step_avg:96.66ms +[2025-08-22 18:35:26] [Rank 0] step:9941/10000 train_time:960848ms step_avg:96.66ms +[2025-08-22 18:35:28] [Rank 0] step:9961/10000 train_time:962888ms step_avg:96.67ms +[2025-08-22 18:35:28] [Rank 0] step:9961/10000 train_time:962888ms step_avg:96.67ms +[2025-08-22 18:35:30] [Rank 0] step:9981/10000 train_time:964944ms step_avg:96.68ms +[2025-08-22 18:35:30] [Rank 0] step:9981/10000 train_time:964944ms step_avg:96.68ms +[2025-08-22 18:35:32] [Rank 0] step:10000/10000 train_time:966899ms step_avg:96.69ms +[2025-08-22 18:35:32] [Rank 0] step:10000/10000 train_time:966899ms step_avg:96.69ms +[2025-08-22 18:35:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:35:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 18:35:46] [Rank 0] PRINT: step:10000/10000 val_loss:3.6819 svd_entropy: attn_qk:H=0.5193,top10E=0.62,eRank=65.3,q75/q25=58.67 attn_vo:H=0.7161,top10E=0.36,eRank=185.3,q75/q25=25.23 mlp_w1:H=0.8545,top10E=0.17,eRank=351.5,q75/q25=9.02 mlp_w2:H=0.8607,top10E=0.16,eRank=356.6,q75/q25=13.67 vo_prod:H=0.5319,top10E=0.62,eRank=69.0,q75/q25=274.54 train_time:967010ms step_avg:96.70ms +[2025-08-22 18:35:46] [Rank 0] PRINT: step:10000/10000 val_loss:3.6819 svd_entropy: attn_qk:H=0.5193,top10E=0.62,eRank=65.3,q75/q25=58.67 attn_vo:H=0.7161,top10E=0.36,eRank=185.3,q75/q25=25.23 mlp_w1:H=0.8545,top10E=0.17,eRank=351.5,q75/q25=9.02 mlp_w2:H=0.8607,top10E=0.16,eRank=356.6,q75/q25=13.67 vo_prod:H=0.5319,top10E=0.62,eRank=69.0,q75/q25=274.54 train_time:967010ms step_avg:96.70ms +[2025-08-22 18:35:46] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 18:35:46 2025 --- +[2025-08-22 18:35:46] [Rank 0] PRINT: --- Training Finished: Fri Aug 22 18:35:46 2025 --- +[2025-08-22 18:35:46] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16896 MiB +[2025-08-22 18:35:46] [Rank 0] PRINT: Peak memory allocated: 11166 MiB reserved: 16896 MiB diff --git a/logs_svd_gated/mode_9_param_gated_seed_43/config.json b/logs_svd_gated/mode_9_param_gated_seed_43/config.json new file mode 100644 index 0000000000000000000000000000000000000000..51271bb108d201ab34a40d9cf94d23750289750f --- /dev/null +++ b/logs_svd_gated/mode_9_param_gated_seed_43/config.json @@ -0,0 +1,25 @@ +{ + "cli_args": { + "unet": false, + "seed": 43, + "optimizer_mode": 9, + "model_parameterization": "gated", + "adam_lr": 0.05, + "muon_lr": 0.05, + "base_dir": "logs_svd_gated" + }, + "hyperparameters": { + "train_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "val_files": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "val_tokens": 1966080, + "train_seq_len": 12288, + "val_seq_len": 65536, + "num_iterations": 10000, + "cooldown_frac": 0.4, + "vocab_size": 50257, + "val_loss_every": 200, + "save_checkpoint": false + }, + "run_uuid_for_log": "b894cd59-23f1-49a1-bc90-450f285f32af", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/logs_svd_gated/mode_9_param_gated_seed_43/training_log_b894cd59-23f1-49a1-bc90-450f285f32af.txt b/logs_svd_gated/mode_9_param_gated_seed_43/training_log_b894cd59-23f1-49a1-bc90-450f285f32af.txt new file mode 100644 index 0000000000000000000000000000000000000000..c5bcb93551d021f88a310f1e952603140090c08e --- /dev/null +++ b/logs_svd_gated/mode_9_param_gated_seed_43/training_log_b894cd59-23f1-49a1-bc90-450f285f32af.txt @@ -0,0 +1,2692 @@ +[2025-08-22 23:20:51] [Rank 0] PRINT: --- Script Start: Fri Aug 22 23:20:51 2025 --- +[2025-08-22 23:20:51] [Rank 0] PRINT: --- Script Start: Fri Aug 22 23:20:51 2025 --- +[2025-08-22 23:20:51] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=9, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 23:20:51] [Rank 0] PRINT: Parsed CLI args: Namespace(unet=False, seed=43, optimizer_mode=9, model_parameterization='gated', adam_lr=0.05, muon_lr=0.05, base_dir='logs_svd_gated') +[2025-08-22 23:20:51] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 23:20:51] [Rank 0] PRINT: Hyperparameters: Hyperparameters() +[2025-08-22 23:20:51] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 23:20:51] [Rank 0] PRINT: Using fixed seed: 43 +[2025-08-22 23:20:51] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_9_param_gated_seed_43 +[2025-08-22 23:20:51] [Rank 0] PRINT: Run directory: logs_svd_gated/mode_9_param_gated_seed_43 +[2025-08-22 23:20:51] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 23:20:51] [Rank 0] import os +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging +import uuid +import time +import copy +import glob +from dataclasses import dataclass, asdict +from functools import lru_cache +from pathlib import Path +import argparse # Keep argparse for --unet and potentially --optimizer_mode +import json +import random +import numpy as np + +os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:True" +import torch +torch.empty(1, device="cuda", requires_grad=True).backward() # prevents a bug on some systems +from torch import Tensor, nn +import torch.nn.functional as F +import torch.distributed as dist +# use of FlexAttention contributed by @KoszarskyB +from torch.nn.attention.flex_attention import BlockMask, flex_attention +sys.path.append("/home/aiops/zhangfz/MUON_theory/modded-nanogpt") # Already present +from optimizers.MUON_new import Muon +from utils.float_compute import mm_op, backward as mm_backward_custom, setup_context as mm_setup_context_custom # Renamed + +#from kn_util.utils import setup_debugpy +#torch._inductor.config.coordinate_descent_tuning = True + +# ----------------------------------------------------------------------------- + +mm_op.register_autograd(mm_backward_custom, setup_context=mm_setup_context_custom) # Use renamed imports + +# ----------------------------------------------------------------------------- +# Seeding Function +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + print(f"PRINT: Set seed to {seed}", flush=True) # Print immediately for all ranks + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader (KEEP AS IS) +def _load_data_shard(file: Path): + header = torch.from_file(str(file), False, 256, dtype=torch.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + num_tokens = int(header[2]) + with file.open("rb", buffering=0) as f: + tokens = torch.empty(num_tokens, dtype=torch.uint16, pin_memory=True) + f.seek(256 * 4) + nbytes = f.readinto(tokens.numpy()) + assert nbytes == 2 * num_tokens, "number of tokens read does not match header" + return tokens + +def distributed_data_generator(filename_pattern: str, batch_size: int, rank : int, world_size : int): + files = [Path(file) for file in sorted(glob.glob(filename_pattern))] + assert batch_size % world_size == 0 + local_batch_size = batch_size // world_size + file_iter = iter(files) # use itertools.cycle(files) instead if you want to do multi-epoch training + tokens, pos = _load_data_shard(next(file_iter)), 0 + while True: + if pos + batch_size + 1 >= len(tokens): + tokens, pos = _load_data_shard(next(file_iter)), 0 + buf = tokens[pos + rank * local_batch_size:][:local_batch_size + 1] + inputs = buf[:-1].to(device="cuda", dtype=torch.int32, non_blocking=True) # no sync on host side; + targets = buf[1:].to(device="cuda", dtype=torch.int64, non_blocking=True) # H2D in another stream isn't helpful. + pos += batch_size + yield inputs, targets + +# ---- ADD: spectral metrics helper right after calculate_svd_entropy ---- +def calculate_svd_metrics(matrix: torch.Tensor, *, topk: int = 10): + """ + Returns dict with: + - entropy_norm: normalized SVD entropy (same normalization as your function) + - erank: effective rank = exp(Shannon entropy of p) + - topk_energy: sum of top-k p_i (energy fraction in the top-k singular values) + - q75_q25: ratio of 75th to 25th percentile of eigenvalues (sigma^2) + """ + with torch.no_grad(): + s = torch.linalg.svdvals(matrix.detach().to('cpu', torch.float32)) + s = s[s > 1e-9] + n = s.numel() + if n == 0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + s2 = s * s + S2_sum = float(torch.sum(s2)) + if S2_sum == 0.0: + return dict(entropy_norm=0.0, erank=0.0, topk_energy=0.0, q75_q25=float('inf')) + + p = s2 / S2_sum # energy distribution + # Shannon entropy H (natural log) + H = float(torch.sum(torch.special.entr(p))) + entropy_norm = H / np.log(max(n, 2)) # same normalization as your SVD entropy + erank = float(np.exp(H)) + + k = min(topk, n) + topk_energy = float(torch.topk(p, k).values.sum()) + + # eigenvalues = s^2, use quantiles on s^2 + q25 = float(torch.quantile(s2, 0.25)) + q75 = float(torch.quantile(s2, 0.75)) + q75_q25 = (q75 / q25) if q25 > 0 else float('inf') + + return dict( + entropy_norm=entropy_norm, + erank=erank, + topk_energy=topk_energy, + q75_q25=q75_q25, + ) + + +# ----------------------------------------------------------------------------- +# int main +parser = argparse.ArgumentParser(description="NanoGPT Training Script with Muon") +parser.add_argument("--unet", action="store_true", help="Use U-net architecture") +parser.add_argument("--seed", type=int, default=42, help="Random seed for reproducibility") +# --- MODIFICATION: Add optimizer_mode as a CLI argument --- +parser.add_argument("--optimizer_mode", type=int, default=0, + help="Defines how Muon is applied. " + "0: Muon(All Hidden Attn+MLP - original); " + "1: Muon(QK Attn)/Adam(VO Attn,MLP); " + "2: Muon(VO Attn)/Adam(QK Attn,MLP); " + "3: Muon(All Attn)/Adam(MLP); " + "4: Muon(MLP)/Adam(All Attn)" + "5: All Adam (No Muon, all applicable matrices to Adam)." + "6: Muon(W_2 MLP)/Adam(attn, W_1 MLP)." + "7: Muon(VO Attn, MLP)/Adam(QK Attn)." + "8: Muon(VO Attn, W_2 MLP)/Adam(QK Attn, W_1 MLP)." + ) +parser.add_argument("--model_parameterization", type=str, default="whole",choices=["whole","qkvo", "norope", "gated"]) +parser.add_argument("--adam_lr", type=float, default=0.008, help="Learning rate for Adam matrices") +parser.add_argument("--muon_lr", type=float, default=0.05, help="Learning rate for Muon matrices") +parser.add_argument("--base_dir", type=str, default="logs_all_0821/gated", help="Base directory for logs") +exp_args = parser.parse_args() +set_seed(exp_args.seed) + +# --- MODIFICATION: Import correct GPT model based on --unet flag --- +if exp_args.unet: + print("Using U-net architecture") + from models.nano_GPT_unet import GPT +elif exp_args.model_parameterization == "qkvo": + print("Using architecture (models.nano_gpt_qkvo) with CausalSelfAttention having q_w, k_w, v_w") + # This MUST be the nano_GPT.py file where CausalSelfAttention has q_w, k_w, v_w + + from models.nano_GPT_qkvo import GPT + +elif exp_args.model_parameterization == "norope": + print("Using architecture (models.nano_GPT_norope) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_norope import GPT + +elif exp_args.model_parameterization == "gated": + print("Using architecture (models.nano_GPT_gated) with CausalSelfAttention having q_w, k_w, v_w") + from models.nano_GPT_gated import GPT + +elif exp_args.model_parameterization == "whole": + print("Using original architecture") + from models.nano_GPT import GPT + +@dataclass +class Hyperparameters: + # data + + #train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + #val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + train_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin" + val_files = "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin" + val_tokens = 1966080 + #val_tokens = 10485760 + train_seq_len = 12*1024 + val_seq_len = 4*16*1024 + #train_seq_len = 48*1024 # FlexAttention sequence length + #train_seq_len = 12*1024 # FlexAttention sequence length + #val_seq_len = 4*64*1024 # FlexAttention sequence length for validation + + # optimization + num_iterations = 10000 #1770 # Original: 1770 + cooldown_frac = 0.4 + # architecture + + vocab_size = 50257 + + # evaluation and logging + val_loss_every = 200 # Original: 125 + save_checkpoint = False +args = Hyperparameters() + +# DDP setup (KEEP AS IS, but ensure rank and world_size are correctly used) +rank = int(os.environ.get("RANK", 0)) +local_rank = int(os.environ.get("LOCAL_RANK", 0)) # Used for device setting +world_size = int(os.environ.get("WORLD_SIZE", 1)) + +# print(f"[Rank {rank}] Global Rank: {rank}, Local Rank: {local_rank}, World Size: {world_size}", flush=True) # Debug + +assert torch.cuda.is_available() +device = torch.device("cuda", local_rank) # Use local_rank for device +torch.cuda.set_device(device) + +if not dist.is_initialized(): # Ensure DDP is initialized only once + dist.init_process_group(backend="nccl", rank=rank, world_size=world_size) # Pass rank and world_size +dist.barrier() +master_process = (rank == 0) + +# Logging setup (KEEP AS IS, but maybe add optimizer_mode to filename) +logfile = None +# --- MODIFICATION: Add optimizer_mode to log file name and specify new dir --- +#log_dir = "modded-nanogpt/logs_detailed_attn_minimal_changes" +#if master_process: +# run_id = uuid.uuid4() +# os.makedirs(log_dir, exist_ok=True) # Create new log directory +# logfile = f"{log_dir}/exp_mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_{run_id}.txt" +# print(f"Logging to: {logfile}") + +logfile = None +run_dir_path_str = None + +base_log_dir = Path(exp_args.base_dir) + +if master_process: + # Set seed again specifically for master process for operations like dir creation, config saving + set_seed(exp_args.seed) + + # Construct folder name based on config and seed + run_folder_name = f"mode_{exp_args.optimizer_mode}_param_{exp_args.model_parameterization}_seed_{exp_args.seed}" + run_dir_path = base_log_dir / run_folder_name + run_dir_path.mkdir(parents=True, exist_ok=True) + run_dir_path_str = str(run_dir_path) + + run_uuid = uuid.uuid4() + logfile = run_dir_path / f"training_log_{run_uuid}.txt" + print(f"Logging to: {logfile}") + + # Save configuration + config_to_save = { + "cli_args": vars(exp_args), + "hyperparameters": {k: v for k, v in args.__class__.__dict__.items() if not k.startswith('__') and not callable(v)}, + "run_uuid_for_log": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print(f"Saved configuration to: {config_file_path}") + +def print0(s, console=False): + if master_process: + # Add timestamp and rank for better log readability + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + log_message = f"[{timestamp}] [Rank {rank}] {s}" + + # Print to console if requested or if it's a specific "PRINT:" message + if console or s.startswith("PRINT:"): + actual_s = s[6:] if s.startswith("PRINT:") else s + print(actual_s) # Print to stdout for master process + + if logfile: + with open(logfile, "a") as f: + f.write(log_message + "\n") + + with open(logfile, "a") as f: + f.write(log_message + "\n") + + +print0(f"PRINT: --- Script Start: {time.ctime()} ---", console=True) +print0(f"PRINT: Parsed CLI args: {exp_args}", console=True) +print0(f"PRINT: Hyperparameters: {args}", console=True) +print0(f"PRINT: Using fixed seed: {exp_args.seed}", console=True) +if master_process: + print0(f"PRINT: Run directory: {run_dir_path_str}", console=True) +print0(code) # Log the code +# ... (other initial logs) + +######################################## +# Construct model and optimizer # +######################################## +print0("PRINT: Constructing model...", console=True) +model: nn.Module = GPT(vocab_size=args.vocab_size, num_layers=12, num_heads=6, model_dim=768, + max_seq_len=max(args.train_seq_len, args.val_seq_len)).cuda() +for m in model.modules(): + if isinstance(m, nn.Embedding): + m.bfloat16() +print0("PRINT: Broadcasting model parameters...", console=True) +for param in model.parameters(): + dist.broadcast(param.detach(), 0) +print0("PRINT: Model constructed and broadcasted.", console=True) + +# --- START MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +if exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "norope": + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) # Pass nesterov, ns_steps + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "gated" : + print0("PRINT: Collecting parameters for optimizers...", console=True) + head_params = [model.lm_head.weight] + embed_params = [model.embed.weight] + [ve.weight for ve in model.value_embeds] + + # Granular collection for attention and MLP parts + attn_q_params = [] + attn_k_params = [] + attn_v_params = [] + attn_o_params = [] # W_O from c_proj + mlp_fc_params = [] + mlp_proj_params = [] + mlp_up_params = [] + + for block_module in model.blocks: + if block_module.attn is not None: + # These attributes (q_w, k_w, v_w) MUST exist in your CausalSelfAttention class + if hasattr(block_module.attn, 'q_w'): attn_q_params.append(block_module.attn.q_w) + else: print0(f"PRINT: Warning: q_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'k_w'): attn_k_params.append(block_module.attn.k_w) + else: print0(f"PRINT: Warning: k_w not found in attn module of a block.", console=True) + if hasattr(block_module.attn, 'v_w'): attn_v_params.append(block_module.attn.v_w) + else: print0(f"PRINT: Warning: v_w not found in attn module of a block.", console=True) + attn_o_params.append(block_module.attn.c_proj.weight) + if block_module.mlp is not None: + mlp_fc_params.append(block_module.mlp.c_fc.weight) + mlp_proj_params.append(block_module.mlp.c_proj.weight) + mlp_up_params.append(block_module.mlp.c_up.weight) + + # Combine into logical groups for experiments + attn_qk_group = attn_q_params + attn_k_params + attn_vo_group = attn_v_params + attn_o_params + all_attn_matrices = attn_qk_group + attn_vo_group + mlp_w1_group = mlp_fc_params + mlp_up_params + mlp_w2_group = mlp_proj_params + all_mlp_matrices = mlp_fc_params + mlp_proj_params+ mlp_up_params + + # Scalar parameters (all others not explicitly grouped as matrices) + matrix_params_for_scalar_check = set(head_params + embed_params + all_attn_matrices + all_mlp_matrices) + scalar_params = [p for n, p in model.named_parameters() if p not in matrix_params_for_scalar_check] + for p_scalar in scalar_params: # Sanity check + if p_scalar.ndim >=2: + print0(f"PRINT: Warning - Parameter {p_scalar.shape} ended up in scalar_params but has ndim >= 2. Check grouping.", console=True) + + + # Determine parameter distribution based on optimizer_mode + muon_params_target_list = [] + adam_matrix_target_list = [] # Matrices that Adam will handle specifically + adam_matrix_lr = exp_args.adam_lr # LR for matrices if Adam handles them (can be tuned) + + current_optimizer_mode = exp_args.optimizer_mode + print0(f"PRINT: Configuring optimizers for EXPERIMENT_MODE = {current_optimizer_mode}", console=True) + + if current_optimizer_mode == 0: # Original behavior: Muon on all "hidden_matrix_params" + print0(f"PRINT: Mode 0: Muon on ALL Attention (QKVO) and ALL MLP matrices.", console=True) + muon_params_target_list = all_attn_matrices + all_mlp_matrices + # Adam handles embeds, head, scalars by default. No extra matrices for Adam here. + elif current_optimizer_mode == 1: # Muon on QK, Adam on VO and MLP + print0(f"PRINT: Mode 1: Muon on QK Attn. Adam on VO Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_qk_group + adam_matrix_target_list = attn_vo_group + all_mlp_matrices + elif current_optimizer_mode == 2: # Muon on VO, Adam on QK and MLP + print0(f"PRINT: Mode 2: Muon on VO Attn. Adam on QK Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + adam_matrix_target_list = attn_qk_group + all_mlp_matrices + elif current_optimizer_mode == 3: # Muon on All Attn (QKVO), Adam on MLP + print0(f"PRINT: Mode 3: Muon on ALL Attn (QKVO). Adam on MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_attn_matrices + adam_matrix_target_list = all_mlp_matrices + elif current_optimizer_mode == 4: # Muon on MLP, Adam on All Attn (QKVO) + print0(f"PRINT: Mode 4: Muon on MLP. Adam on ALL Attn (QKVO) (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = all_mlp_matrices + adam_matrix_target_list = all_attn_matrices + elif current_optimizer_mode == 5: # NEW MODE 5 - All Adam + print0(f"PRINT: Mode 5: All Adam. All Attn and MLP matrices to Adam (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = [] + adam_matrix_target_list = all_attn_matrices + all_mlp_matrices # All matrices to Adam + elif current_optimizer_mode == 6: # Muon on W_2 MLP, Adam on attn, W_1 MLP + print0(f"PRINT: Mode 6: Muon on W_2 MLP. Adam on attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = mlp_w2_group + adam_matrix_target_list = all_attn_matrices + mlp_w1_group + elif current_optimizer_mode == 7: # Muon on VO Attn, MLP, Adam on QK Attn + print0(f"PRINT: Mode 7: Muon on VO Attn, MLP. Adam on QK Attn (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + all_mlp_matrices + adam_matrix_target_list = attn_qk_group + elif current_optimizer_mode == 8: # Muon on VO Attn, W_2 MLP, Adam on QK Attn, W_1 MLP + print0(f"PRINT: Mode 8: Muon on VO Attn, W_2 MLP. Adam on QK Attn, W_1 MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_vo_group + mlp_w2_group + adam_matrix_target_list = attn_qk_group + mlp_w1_group + elif current_optimizer_mode == 9: # Muon on V Attn, MLP + print0(f"PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_v_params + all_mlp_matrices + adam_matrix_target_list = attn_o_params + attn_qk_group + elif current_optimizer_mode == 10: # Muon on O Attn, MLP + print0(f"PRINT: Mode 10: Muon on O Attn, MLP (Adam LR: {adam_matrix_lr}).", console=True) + muon_params_target_list = attn_o_params + all_mlp_matrices + adam_matrix_target_list = attn_v_params + attn_qk_group + else: + raise ValueError(f"Unsupported EXPERIMENT_MODE: {current_optimizer_mode}") + + # Adam optimizer setup + adam_param_groups_config = [ + dict(params=head_params, lr=adam_matrix_lr), + dict(params=embed_params, lr=adam_matrix_lr), + dict(params=scalar_params, lr=adam_matrix_lr) # Scalar params always go to Adam + ] + # Add matrices specifically assigned to Adam for this experiment mode + if adam_matrix_target_list: + # Ensure adam_matrix_target_list is flat and contains Parameters + flat_adam_matrices = [p for sublist_or_p in adam_matrix_target_list for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]) if p is not None] + if flat_adam_matrices: # Only add group if there are params + adam_param_groups_config.append(dict(params=flat_adam_matrices, lr=adam_matrix_lr)) + + # Filter out any Adam groups that might be empty (e.g., if scalar_params was empty) + adam_param_groups_config = [g for g in adam_param_groups_config if g['params']] + optimizer1 = torch.optim.Adam(adam_param_groups_config, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizers = [optimizer1] # Start with Adam + + # Muon optimizer setup + if muon_params_target_list: + # Ensure muon_params_target_list is flat, unique, and contains Parameters + flat_unique_muon_params = [] + seen_muon_ids = set() + for sublist_or_p in muon_params_target_list: + for p in (sublist_or_p if isinstance(sublist_or_p, list) else [sublist_or_p]): + if p is not None and id(p) not in seen_muon_ids: + flat_unique_muon_params.append(p) + seen_muon_ids.add(id(p)) + + if flat_unique_muon_params: # Only create Muon if it has parameters + optimizer2 = Muon(flat_unique_muon_params, lr=exp_args.muon_lr, momentum=0.95, weight_decay=0.0) + optimizers.append(optimizer2) + else: + print0("PRINT: Muon optimizer not created as its target parameter list was empty.", console=True) + optimizer2 = None # Explicitly set to None if not created + else: + print0("PRINT: Muon optimizer not created as muon_params_target_list was empty (e.g. mode where Adam handles all matrices).", console=True) + optimizer2 = None # Explicitly set to None + + print0(f"PRINT: Optimizers configured. Total optimizers: {len(optimizers)}", console=True) + if optimizer2: + print0(f"PRINT: Muon optimizer is active with {len(flat_unique_muon_params)} parameters.", console=True) + # --- END MODIFIED PARAMETER COLLECTION AND OPTIMIZER SETUP --- +elif exp_args.model_parameterization == "whole": + hidden_matrix_params = [p for n, p in model.blocks.named_parameters() if p.ndim >= 2 and "embed" not in n] + embed_params = [p for n, p in model.named_parameters() if "embed" in n] + scalar_params = [p for p in model.parameters() if p.ndim < 2] + head_params = [model.lm_head.weight] + + # init the optimizer(s) + adam_params = [dict(params=head_params, lr=0.22), dict(params=embed_params, lr=0.6), dict(params=scalar_params, lr=0.04)] + # small adam epsilon by @YouJiacheng. this is an alternate method of fixing the world_size dependence + # discovered by @fernbear.bsky.social https://x.com/hi_tysam/status/1879692937589875094 + optimizer1 = torch.optim.Adam(adam_params, betas=(0.8, 0.95), eps=1e-10, fused=True) + optimizer2 = Muon(hidden_matrix_params, lr=0.05, momentum=0.95, rank=rank, world_size=world_size) + optimizers = [optimizer1, optimizer2] + +for opt in optimizers: + for group in opt.param_groups: + group["initial_lr"] = group["lr"] + +# learning rate schedule: stable then decay (KEEP AS IS, but check assert) +def get_lr(step: int): + x = step / args.num_iterations # progress in training + # assert 0 <= x < 1 # Original assert, might fail on last step if step == num_iterations + # --- MODIFICATION: Adjust assert for LR schedule --- + if not (0 <= x <= 1): # Allow x=1 for the last step + x = min(max(x, 0.0), 1.0) # Clamp x if step goes beyond num_iterations + # print0(f"LR schedule x = {x:.4f} (step={step}) was clamped.", console=False) # Optional log + + if x < 1 - args.cooldown_frac: + return 1.0 + else: + # Ensure cooldown_frac is not zero to avoid division by zero + w = (1 - x) / max(args.cooldown_frac, 1e-9) + return w * 1.0 + (1 - w) * 0.1 + +# attention window size schedule (KEEP AS IS) +def next_multiple_of_n(v: float | int, *, n: int): + return next(x for x in range(n, int(v) + 1 + n, n) if x >= v) +@lru_cache(1) +def get_window_size_blocks_helper(window_size: int): + return torch.tensor(window_size // 128, dtype=torch.int32, pin_memory=True).cuda(non_blocking=True) +def get_window_size_blocks(step: int): + x = step / args.num_iterations # progress in training + # --- MODIFICATION: Adjust assert for window size schedule --- + if not (0 <= x <= 1): + x = min(max(x, 0.0), 1.0) # Clamp x + + # Ensure window_size is at least 128 + window_size = max(128, next_multiple_of_n(1728 * x, n=128)) + return get_window_size_blocks_helper(window_size) + +print0("PRINT: Compiling model with TorchInductor...", console=True) +# Use 'model' for compilation, not 'model_compiled' before it's defined +model_compiled: nn.Module = torch.compile(model, dynamic=False, mode="max-autotune") +print0("PRINT: Model compilation complete.", console=True) + +######################################## +# Warmup kernels # +######################################## +print0("PRINT: Starting warmup...", console=True) +warmup_steps = 10 +initial_state = dict(model=copy.deepcopy(model_compiled.state_dict()), # Use model_compiled + optimizers=[copy.deepcopy(opt.state_dict()) for opt in optimizers]) +for i in range(warmup_steps): + # print0(f"Warmup step {i+1}/{warmup_steps}", console=False) # Less verbose + inputs = targets = torch.randint(0, args.vocab_size, size=(args.train_seq_len,), device="cuda") + loss = model_compiled(inputs.to(torch.int32), targets, get_window_size_blocks(0)) # Use model_compiled + loss.backward() + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + for opt in optimizers: + opt.step() + model_compiled.zero_grad(set_to_none=True) # Use model_compiled +model_compiled.load_state_dict(initial_state["model"]) # Use model_compiled +for opt, opt_state in zip(optimizers, initial_state["optimizers"]): + opt.load_state_dict(opt_state) +del initial_state +print0("PRINT: Warmup complete.", console=True) +torch.cuda.synchronize() + + +params_to_analyze = [] + +if exp_args.model_parameterization == "whole": + params_to_analyze = [p for p in hidden_matrix_params if p.requires_grad] +elif exp_args.model_parameterization == "qkvo" or exp_args.model_parameterization == "gated": + params_to_analyze = all_attn_matrices + all_mlp_matrices + matrix_groups_for_svd = {} + if master_process: + matrix_groups_for_svd = { + "attn_qk": attn_qk_group, + "attn_vo": attn_vo_group, + "mlp_w1": mlp_w1_group, + "mlp_w2": mlp_proj_params + } + + + +######################################## +# Training and validation # +######################################## +print0("PRINT: Starting training...", console=True) +train_loader = distributed_data_generator(args.train_files, world_size * args.train_seq_len, rank, world_size) +training_time_ms = 0 +torch.cuda.synchronize() +t0 = time.perf_counter() +train_steps = args.num_iterations + +for step in range(train_steps + 1): # Loop up to num_iterations (inclusive for final validation) + last_step = (step == train_steps) + + # --------------- VALIDATION SECTION ----------------- + # Validate at step 0 (after warmup), at specified intervals, and at the very last step + if step == 0 or last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + # Add time from previous segment only if t0 was set (i.e., not the first validation at step 0) + if step > 0 : # For step 0, t0 hasn't started a training segment yet + current_run_time = 1000 * (time.perf_counter() - t0) + training_time_ms += current_run_time + + model_compiled.eval() # Use model_compiled + val_batch_size = world_size * args.val_seq_len + # Ensure val_tokens is divisible by val_batch_size, or handle remainder + if args.val_tokens % val_batch_size != 0: + print0(f"PRINT: Warning: val_tokens ({args.val_tokens}) not perfectly divisible by val_batch_size ({val_batch_size}). Some tokens might be missed.", console=True) + val_num_steps = args.val_tokens // val_batch_size + + val_loader = distributed_data_generator(args.val_files, val_batch_size, rank, world_size) + val_loss_sum = torch.zeros(1, device=device) # Accumulate loss on device + actual_val_steps = 0 + with torch.no_grad(): + for val_i in range(val_num_steps): + try: + inputs, targets = next(val_loader) + loss_val = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + val_loss_sum += loss_val + actual_val_steps += 1 + except StopIteration: + print0(f"PRINT: Validation data loader for '{args.val_files}' exhausted early at val_step {val_i+1}/{val_num_steps}.", console=True) + break # Stop if data runs out + + if actual_val_steps > 0: + val_loss_avg = val_loss_sum / actual_val_steps + else: # Handle case where no validation steps were run (e.g., val_tokens too small or data loader issue) + val_loss_avg = torch.tensor(float('nan'), device=device) + print0(f"PRINT: Warning: No validation steps were completed. val_loss is NaN.", console=True) + + del val_loader # Clean up + dist.all_reduce(val_loss_avg, op=dist.ReduceOp.AVG) # Reduce average loss + + svd_log_str = "" + if master_process and 'matrix_groups_for_svd' in locals() and matrix_groups_for_svd: + TOPK = 10 + svd_results_by_category = {} + + with torch.no_grad(): + # per-category metrics (average over matrices in the group) + for name, group_params in matrix_groups_for_svd.items(): + if not group_params: + continue + mets = [calculate_svd_metrics(p, topk=TOPK) for p in group_params] + if mets: + avg_entropy = float(np.mean([m['entropy_norm'] for m in mets])) + avg_erank = float(np.mean([m['erank'] for m in mets])) + avg_topkE = float(np.mean([m['topk_energy'] for m in mets])) + avg_qratio = float(np.mean([m['q75_q25'] for m in mets])) + svd_results_by_category[name] = dict( + entropy=avg_entropy, erank=avg_erank, topkE=avg_topkE, q75_q25=avg_qratio + ) + + # VO product as another category + vo_mets = [] + num_layers = len(attn_v_params) + for i in range(num_layers): + w_v = attn_v_params[i] + w_o = attn_o_params[i] + w_ov_product = torch.matmul(w_o, w_v) + vo_mets.append(calculate_svd_metrics(w_ov_product, topk=TOPK)) + if vo_mets: + svd_results_by_category['vo_prod'] = dict( + entropy=float(np.mean([m['entropy_norm'] for m in vo_mets])), + erank=float(np.mean([m['erank'] for m in vo_mets])), + topkE=float(np.mean([m['topk_energy'] for m in vo_mets])), + q75_q25=float(np.mean([m['q75_q25'] for m in vo_mets])), + ) + + # format logging string (append metrics after entropy) + svd_log_parts = [] + for name, vals in svd_results_by_category.items(): + svd_log_parts.append( + f"{name}:H={vals['entropy']:.4f},top{TOPK}E={vals['topkE']:.2f},eRank={vals['erank']:.1f},q75/q25={vals['q75_q25']:.2f}" + ) + svd_log_str = " ".join(svd_log_parts) + + + # For step 0, training_time_ms is 0. For subsequent steps, it's cumulative. + avg_step_time = training_time_ms / max(step, 1) if step > 0 else 0 + print0(f"PRINT: step:{step}/{train_steps} val_loss:{val_loss_avg.item():.4f} svd_entropy: {svd_log_str} train_time:{training_time_ms:.0f}ms step_avg:{avg_step_time:.2f}ms", console=True) + + model_compiled.train() # Switch back to train mode + torch.cuda.synchronize() + t0 = time.perf_counter() # Reset timer for the next training segment + + if last_step: + if master_process and args.save_checkpoint: + if run_dir_path_str: # Ensure run_dir_path_str is set by master process + checkpoint_parent_dir = Path(run_dir_path_str) / "checkpoints" + checkpoint_parent_dir.mkdir(parents=True, exist_ok=True) # Create checkpoints subdir + checkpoint_path = checkpoint_parent_dir / f"state_step{step:06d}.pt" + log_checkpoint = dict(step=step, code=code, model=model_compiled.state_dict(), # Use model_compiled + optimizers=[opt.state_dict() for opt in optimizers]) + torch.save(log_checkpoint, str(checkpoint_path)) # Convert Path to str for torch.save + print0(f"PRINT: Saved checkpoint to {checkpoint_path}", console=True) + else: + print0("PRINT: Warning - run_dir_path_str not set, cannot save checkpoint.", console=True) + break + + # --------------- TRAINING SECTION ----------------- + try: + inputs, targets = next(train_loader) + except StopIteration: + print0(f"PRINT: Training data loader for '{args.train_files}' exhausted. Ending training early at step {step}.", console=True) + break # End if data runs out + + loss_train = model_compiled(inputs, targets, get_window_size_blocks(step)) # Use model_compiled + loss_train.backward() + + for param in model_compiled.parameters(): # Use model_compiled + if param.grad is not None: # Check if grad exists + dist.all_reduce(param.grad, op=dist.ReduceOp.AVG) + + current_lr_val = get_lr(step) + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["initial_lr"] * current_lr_val + + # --- MODIFICATION: Muon momentum warmup only if optimizer2 (Muon) exists --- + if optimizer2 is not None: # Check if Muon optimizer was created + for group in optimizer2.param_groups: + frac = min(step / 300, 1) # momentum warmup for muon + group["momentum"] = (1 - frac) * 0.85 + frac * 0.95 + + for opt in optimizers: + opt.step() + + model_compiled.zero_grad(set_to_none=True) # Use model_compiled + + # Logging (less frequent for training steps) + if step > 0 and (step % 20 == 0 or step == train_steps -1) : # Avoid logging at step 0 before first val + # This time is for the current segment since last validation / t0 reset + current_segment_time_ms = 1000 * (time.perf_counter() - t0) + # approx_training_time_ms is the total cumulative time + approx_total_training_time_ms = training_time_ms + current_segment_time_ms + + total_tokens_in_batch = args.train_seq_len * world_size + train_loss_per_token = loss_train.item() / total_tokens_in_batch if total_tokens_in_batch > 0 else loss_train.item() + + print0(f"step:{step+1}/{train_steps} train_time:{approx_total_training_time_ms:.0f}ms step_avg:{approx_total_training_time_ms/max(1, step + 1):.2f}ms", console=True) # Log to console too + +print0(f"PRINT: --- Training Finished: {time.ctime()} ---", console=True) +print0(f"PRINT: Peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB", console=True) + +if dist.is_initialized(): + dist.destroy_process_group() +[2025-08-22 23:20:51] [Rank 0] PRINT: Constructing model... +[2025-08-22 23:20:51] [Rank 0] PRINT: Constructing model... +[2025-08-22 23:20:53] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 23:20:53] [Rank 0] PRINT: Broadcasting model parameters... +[2025-08-22 23:20:53] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 23:20:53] [Rank 0] PRINT: Model constructed and broadcasted. +[2025-08-22 23:20:53] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 23:20:53] [Rank 0] PRINT: Collecting parameters for optimizers... +[2025-08-22 23:20:53] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 9 +[2025-08-22 23:20:53] [Rank 0] PRINT: Configuring optimizers for EXPERIMENT_MODE = 9 +[2025-08-22 23:20:53] [Rank 0] PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: 0.05). +[2025-08-22 23:20:53] [Rank 0] PRINT: Mode 9: Muon on V Attn, MLP (Adam LR: 0.05). +[2025-08-22 23:20:53] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 23:20:53] [Rank 0] PRINT: Optimizers configured. Total optimizers: 2 +[2025-08-22 23:20:53] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 23:20:53] [Rank 0] PRINT: Muon optimizer is active with 47 parameters. +[2025-08-22 23:20:53] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 23:20:53] [Rank 0] PRINT: Compiling model with TorchInductor... +[2025-08-22 23:20:53] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 23:20:53] [Rank 0] PRINT: Model compilation complete. +[2025-08-22 23:20:53] [Rank 0] PRINT: Starting warmup... +[2025-08-22 23:20:53] [Rank 0] PRINT: Starting warmup... +[2025-08-22 23:21:36] [Rank 0] PRINT: Warmup complete. +[2025-08-22 23:21:36] [Rank 0] PRINT: Warmup complete. +[2025-08-22 23:21:36] [Rank 0] PRINT: Starting training... +[2025-08-22 23:21:36] [Rank 0] PRINT: Starting training... +[2025-08-22 23:21:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:21:36] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:21:54] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 23:21:54] [Rank 0] PRINT: step:0/10000 val_loss:10.8258 svd_entropy: attn_qk:H=0.9247,top10E=0.05,eRank=465.8,q75/q25=10.27 attn_vo:H=0.4624,top10E=0.02,eRank=232.9,q75/q25=inf mlp_w1:H=0.9812,top10E=0.03,eRank=677.7,q75/q25=2.37 mlp_w2:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf vo_prod:H=0.0000,top10E=0.00,eRank=0.0,q75/q25=inf train_time:0ms step_avg:0.00ms +[2025-08-22 23:21:56] [Rank 0] step:21/10000 train_time:1851ms step_avg:88.14ms +[2025-08-22 23:21:56] [Rank 0] step:21/10000 train_time:1851ms step_avg:88.14ms +[2025-08-22 23:21:58] [Rank 0] step:41/10000 train_time:3643ms step_avg:88.84ms +[2025-08-22 23:21:58] [Rank 0] step:41/10000 train_time:3643ms step_avg:88.84ms +[2025-08-22 23:22:00] [Rank 0] step:61/10000 train_time:5437ms step_avg:89.14ms +[2025-08-22 23:22:00] [Rank 0] step:61/10000 train_time:5437ms step_avg:89.14ms +[2025-08-22 23:22:01] [Rank 0] step:81/10000 train_time:7233ms step_avg:89.29ms +[2025-08-22 23:22:01] [Rank 0] step:81/10000 train_time:7233ms step_avg:89.29ms +[2025-08-22 23:22:03] [Rank 0] step:101/10000 train_time:9030ms step_avg:89.41ms +[2025-08-22 23:22:03] [Rank 0] step:101/10000 train_time:9030ms step_avg:89.41ms +[2025-08-22 23:22:05] [Rank 0] step:121/10000 train_time:10827ms step_avg:89.48ms +[2025-08-22 23:22:05] [Rank 0] step:121/10000 train_time:10827ms step_avg:89.48ms +[2025-08-22 23:22:07] [Rank 0] step:141/10000 train_time:12624ms step_avg:89.53ms +[2025-08-22 23:22:07] [Rank 0] step:141/10000 train_time:12624ms step_avg:89.53ms +[2025-08-22 23:22:09] [Rank 0] step:161/10000 train_time:14422ms step_avg:89.58ms +[2025-08-22 23:22:09] [Rank 0] step:161/10000 train_time:14422ms step_avg:89.58ms +[2025-08-22 23:22:10] [Rank 0] step:181/10000 train_time:16218ms step_avg:89.60ms +[2025-08-22 23:22:10] [Rank 0] step:181/10000 train_time:16218ms step_avg:89.60ms +[2025-08-22 23:22:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:22:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:22:26] [Rank 0] PRINT: step:200/10000 val_loss:5.6961 svd_entropy: attn_qk:H=0.4784,top10E=0.73,eRank=32.1,q75/q25=27.98 attn_vo:H=0.6432,top10E=0.43,eRank=149.0,q75/q25=20.47 mlp_w1:H=0.9468,top10E=0.05,eRank=542.7,q75/q25=4.83 mlp_w2:H=0.9259,top10E=0.07,eRank=477.2,q75/q25=6.94 vo_prod:H=0.3690,top10E=0.80,eRank=21.7,q75/q25=194.10 train_time:18021ms step_avg:90.10ms +[2025-08-22 23:22:26] [Rank 0] PRINT: step:200/10000 val_loss:5.6961 svd_entropy: attn_qk:H=0.4784,top10E=0.73,eRank=32.1,q75/q25=27.98 attn_vo:H=0.6432,top10E=0.43,eRank=149.0,q75/q25=20.47 mlp_w1:H=0.9468,top10E=0.05,eRank=542.7,q75/q25=4.83 mlp_w2:H=0.9259,top10E=0.07,eRank=477.2,q75/q25=6.94 vo_prod:H=0.3690,top10E=0.80,eRank=21.7,q75/q25=194.10 train_time:18021ms step_avg:90.10ms +[2025-08-22 23:22:26] [Rank 0] step:201/10000 train_time:18042ms step_avg:89.76ms +[2025-08-22 23:22:26] [Rank 0] step:201/10000 train_time:18042ms step_avg:89.76ms +[2025-08-22 23:22:28] [Rank 0] step:221/10000 train_time:19842ms step_avg:89.78ms +[2025-08-22 23:22:28] [Rank 0] step:221/10000 train_time:19842ms step_avg:89.78ms +[2025-08-22 23:22:30] [Rank 0] step:241/10000 train_time:21635ms step_avg:89.77ms +[2025-08-22 23:22:30] [Rank 0] step:241/10000 train_time:21635ms step_avg:89.77ms +[2025-08-22 23:22:32] [Rank 0] step:261/10000 train_time:23429ms step_avg:89.77ms +[2025-08-22 23:22:32] [Rank 0] step:261/10000 train_time:23429ms step_avg:89.77ms +[2025-08-22 23:22:33] [Rank 0] step:281/10000 train_time:25223ms step_avg:89.76ms +[2025-08-22 23:22:33] [Rank 0] step:281/10000 train_time:25223ms step_avg:89.76ms +[2025-08-22 23:22:35] [Rank 0] step:301/10000 train_time:27018ms step_avg:89.76ms +[2025-08-22 23:22:35] [Rank 0] step:301/10000 train_time:27018ms step_avg:89.76ms +[2025-08-22 23:22:37] [Rank 0] step:321/10000 train_time:28815ms step_avg:89.76ms +[2025-08-22 23:22:37] [Rank 0] step:321/10000 train_time:28815ms step_avg:89.76ms +[2025-08-22 23:22:39] [Rank 0] step:341/10000 train_time:30611ms step_avg:89.77ms +[2025-08-22 23:22:39] [Rank 0] step:341/10000 train_time:30611ms step_avg:89.77ms +[2025-08-22 23:22:40] [Rank 0] step:361/10000 train_time:32409ms step_avg:89.78ms +[2025-08-22 23:22:40] [Rank 0] step:361/10000 train_time:32409ms step_avg:89.78ms +[2025-08-22 23:22:42] [Rank 0] step:381/10000 train_time:34206ms step_avg:89.78ms +[2025-08-22 23:22:42] [Rank 0] step:381/10000 train_time:34206ms step_avg:89.78ms +[2025-08-22 23:22:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:22:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:22:58] [Rank 0] PRINT: step:400/10000 val_loss:5.2555 svd_entropy: attn_qk:H=0.4892,top10E=0.71,eRank=36.0,q75/q25=31.37 attn_vo:H=0.6689,top10E=0.41,eRank=170.8,q75/q25=20.90 mlp_w1:H=0.9404,top10E=0.06,eRank=523.5,q75/q25=4.77 mlp_w2:H=0.9323,top10E=0.07,eRank=499.6,q75/q25=6.26 vo_prod:H=0.4067,top10E=0.74,eRank=38.6,q75/q25=214.02 train_time:36013ms step_avg:90.03ms +[2025-08-22 23:22:58] [Rank 0] PRINT: step:400/10000 val_loss:5.2555 svd_entropy: attn_qk:H=0.4892,top10E=0.71,eRank=36.0,q75/q25=31.37 attn_vo:H=0.6689,top10E=0.41,eRank=170.8,q75/q25=20.90 mlp_w1:H=0.9404,top10E=0.06,eRank=523.5,q75/q25=4.77 mlp_w2:H=0.9323,top10E=0.07,eRank=499.6,q75/q25=6.26 vo_prod:H=0.4067,top10E=0.74,eRank=38.6,q75/q25=214.02 train_time:36013ms step_avg:90.03ms +[2025-08-22 23:22:58] [Rank 0] step:401/10000 train_time:36035ms step_avg:89.86ms +[2025-08-22 23:22:58] [Rank 0] step:401/10000 train_time:36035ms step_avg:89.86ms +[2025-08-22 23:23:00] [Rank 0] step:421/10000 train_time:37816ms step_avg:89.82ms +[2025-08-22 23:23:00] [Rank 0] step:421/10000 train_time:37816ms step_avg:89.82ms +[2025-08-22 23:23:02] [Rank 0] step:441/10000 train_time:39609ms step_avg:89.82ms +[2025-08-22 23:23:02] [Rank 0] step:441/10000 train_time:39609ms step_avg:89.82ms +[2025-08-22 23:23:03] [Rank 0] step:461/10000 train_time:41403ms step_avg:89.81ms +[2025-08-22 23:23:03] [Rank 0] step:461/10000 train_time:41403ms step_avg:89.81ms +[2025-08-22 23:23:05] [Rank 0] step:481/10000 train_time:43202ms step_avg:89.82ms +[2025-08-22 23:23:05] [Rank 0] step:481/10000 train_time:43202ms step_avg:89.82ms +[2025-08-22 23:23:07] [Rank 0] step:501/10000 train_time:44997ms step_avg:89.82ms +[2025-08-22 23:23:07] [Rank 0] step:501/10000 train_time:44997ms step_avg:89.82ms +[2025-08-22 23:23:09] [Rank 0] step:521/10000 train_time:46795ms step_avg:89.82ms +[2025-08-22 23:23:09] [Rank 0] step:521/10000 train_time:46795ms step_avg:89.82ms +[2025-08-22 23:23:11] [Rank 0] step:541/10000 train_time:48591ms step_avg:89.82ms +[2025-08-22 23:23:11] [Rank 0] step:541/10000 train_time:48591ms step_avg:89.82ms +[2025-08-22 23:23:12] [Rank 0] step:561/10000 train_time:50390ms step_avg:89.82ms +[2025-08-22 23:23:12] [Rank 0] step:561/10000 train_time:50390ms step_avg:89.82ms +[2025-08-22 23:23:14] [Rank 0] step:581/10000 train_time:52188ms step_avg:89.82ms +[2025-08-22 23:23:14] [Rank 0] step:581/10000 train_time:52188ms step_avg:89.82ms +[2025-08-22 23:23:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:23:16] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:23:30] [Rank 0] PRINT: step:600/10000 val_loss:5.0348 svd_entropy: attn_qk:H=0.4808,top10E=0.70,eRank=40.1,q75/q25=45.54 attn_vo:H=0.7045,top10E=0.36,eRank=182.4,q75/q25=22.07 mlp_w1:H=0.9356,top10E=0.07,eRank=509.5,q75/q25=4.85 mlp_w2:H=0.9293,top10E=0.07,eRank=492.1,q75/q25=6.37 vo_prod:H=0.4690,top10E=0.68,eRank=48.6,q75/q25=238.16 train_time:53992ms step_avg:89.99ms +[2025-08-22 23:23:30] [Rank 0] PRINT: step:600/10000 val_loss:5.0348 svd_entropy: attn_qk:H=0.4808,top10E=0.70,eRank=40.1,q75/q25=45.54 attn_vo:H=0.7045,top10E=0.36,eRank=182.4,q75/q25=22.07 mlp_w1:H=0.9356,top10E=0.07,eRank=509.5,q75/q25=4.85 mlp_w2:H=0.9293,top10E=0.07,eRank=492.1,q75/q25=6.37 vo_prod:H=0.4690,top10E=0.68,eRank=48.6,q75/q25=238.16 train_time:53992ms step_avg:89.99ms +[2025-08-22 23:23:30] [Rank 0] step:601/10000 train_time:54012ms step_avg:89.87ms +[2025-08-22 23:23:30] [Rank 0] step:601/10000 train_time:54012ms step_avg:89.87ms +[2025-08-22 23:23:32] [Rank 0] step:621/10000 train_time:55807ms step_avg:89.87ms +[2025-08-22 23:23:32] [Rank 0] step:621/10000 train_time:55807ms step_avg:89.87ms +[2025-08-22 23:23:33] [Rank 0] step:641/10000 train_time:57601ms step_avg:89.86ms +[2025-08-22 23:23:33] [Rank 0] step:641/10000 train_time:57601ms step_avg:89.86ms +[2025-08-22 23:23:35] [Rank 0] step:661/10000 train_time:59396ms step_avg:89.86ms +[2025-08-22 23:23:35] [Rank 0] step:661/10000 train_time:59396ms step_avg:89.86ms +[2025-08-22 23:23:37] [Rank 0] step:681/10000 train_time:61195ms step_avg:89.86ms +[2025-08-22 23:23:37] [Rank 0] step:681/10000 train_time:61195ms step_avg:89.86ms +[2025-08-22 23:23:39] [Rank 0] step:701/10000 train_time:62994ms step_avg:89.86ms +[2025-08-22 23:23:39] [Rank 0] step:701/10000 train_time:62994ms step_avg:89.86ms +[2025-08-22 23:23:41] [Rank 0] step:721/10000 train_time:64793ms step_avg:89.87ms +[2025-08-22 23:23:41] [Rank 0] step:721/10000 train_time:64793ms step_avg:89.87ms +[2025-08-22 23:23:42] [Rank 0] step:741/10000 train_time:66592ms step_avg:89.87ms +[2025-08-22 23:23:42] [Rank 0] step:741/10000 train_time:66592ms step_avg:89.87ms +[2025-08-22 23:23:44] [Rank 0] step:761/10000 train_time:68406ms step_avg:89.89ms +[2025-08-22 23:23:44] [Rank 0] step:761/10000 train_time:68406ms step_avg:89.89ms +[2025-08-22 23:23:46] [Rank 0] step:781/10000 train_time:70220ms step_avg:89.91ms +[2025-08-22 23:23:46] [Rank 0] step:781/10000 train_time:70220ms step_avg:89.91ms +[2025-08-22 23:23:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:23:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:24:01] [Rank 0] PRINT: step:800/10000 val_loss:4.7459 svd_entropy: attn_qk:H=0.4769,top10E=0.68,eRank=45.5,q75/q25=59.90 attn_vo:H=0.7110,top10E=0.36,eRank=184.9,q75/q25=23.08 mlp_w1:H=0.9305,top10E=0.08,eRank=494.4,q75/q25=4.90 mlp_w2:H=0.9224,top10E=0.09,eRank=473.9,q75/q25=6.45 vo_prod:H=0.5025,top10E=0.65,eRank=54.2,q75/q25=254.02 train_time:72041ms step_avg:90.05ms +[2025-08-22 23:24:01] [Rank 0] PRINT: step:800/10000 val_loss:4.7459 svd_entropy: attn_qk:H=0.4769,top10E=0.68,eRank=45.5,q75/q25=59.90 attn_vo:H=0.7110,top10E=0.36,eRank=184.9,q75/q25=23.08 mlp_w1:H=0.9305,top10E=0.08,eRank=494.4,q75/q25=4.90 mlp_w2:H=0.9224,top10E=0.09,eRank=473.9,q75/q25=6.45 vo_prod:H=0.5025,top10E=0.65,eRank=54.2,q75/q25=254.02 train_time:72041ms step_avg:90.05ms +[2025-08-22 23:24:02] [Rank 0] step:801/10000 train_time:72061ms step_avg:89.96ms +[2025-08-22 23:24:02] [Rank 0] step:801/10000 train_time:72061ms step_avg:89.96ms +[2025-08-22 23:24:03] [Rank 0] step:821/10000 train_time:73876ms step_avg:89.98ms +[2025-08-22 23:24:03] [Rank 0] step:821/10000 train_time:73876ms step_avg:89.98ms +[2025-08-22 23:24:05] [Rank 0] step:841/10000 train_time:75683ms step_avg:89.99ms +[2025-08-22 23:24:05] [Rank 0] step:841/10000 train_time:75683ms step_avg:89.99ms +[2025-08-22 23:24:07] [Rank 0] step:861/10000 train_time:77491ms step_avg:90.00ms +[2025-08-22 23:24:07] [Rank 0] step:861/10000 train_time:77491ms step_avg:90.00ms +[2025-08-22 23:24:09] [Rank 0] step:881/10000 train_time:79301ms step_avg:90.01ms +[2025-08-22 23:24:09] [Rank 0] step:881/10000 train_time:79301ms step_avg:90.01ms +[2025-08-22 23:24:11] [Rank 0] step:901/10000 train_time:81110ms step_avg:90.02ms +[2025-08-22 23:24:11] [Rank 0] step:901/10000 train_time:81110ms step_avg:90.02ms +[2025-08-22 23:24:12] [Rank 0] step:921/10000 train_time:82920ms step_avg:90.03ms +[2025-08-22 23:24:12] [Rank 0] step:921/10000 train_time:82920ms step_avg:90.03ms +[2025-08-22 23:24:14] [Rank 0] step:941/10000 train_time:84729ms step_avg:90.04ms +[2025-08-22 23:24:14] [Rank 0] step:941/10000 train_time:84729ms step_avg:90.04ms +[2025-08-22 23:24:16] [Rank 0] step:961/10000 train_time:86541ms step_avg:90.05ms +[2025-08-22 23:24:16] [Rank 0] step:961/10000 train_time:86541ms step_avg:90.05ms +[2025-08-22 23:24:18] [Rank 0] step:981/10000 train_time:88353ms step_avg:90.06ms +[2025-08-22 23:24:18] [Rank 0] step:981/10000 train_time:88353ms step_avg:90.06ms +[2025-08-22 23:24:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:24:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:24:33] [Rank 0] PRINT: step:1000/10000 val_loss:4.6155 svd_entropy: attn_qk:H=0.4942,top10E=0.67,eRank=49.0,q75/q25=69.16 attn_vo:H=0.7096,top10E=0.36,eRank=184.6,q75/q25=23.10 mlp_w1:H=0.9277,top10E=0.08,eRank=486.7,q75/q25=4.91 mlp_w2:H=0.9190,top10E=0.09,eRank=465.7,q75/q25=6.43 vo_prod:H=0.4960,top10E=0.66,eRank=54.3,q75/q25=253.79 train_time:90170ms step_avg:90.17ms +[2025-08-22 23:24:33] [Rank 0] PRINT: step:1000/10000 val_loss:4.6155 svd_entropy: attn_qk:H=0.4942,top10E=0.67,eRank=49.0,q75/q25=69.16 attn_vo:H=0.7096,top10E=0.36,eRank=184.6,q75/q25=23.10 mlp_w1:H=0.9277,top10E=0.08,eRank=486.7,q75/q25=4.91 mlp_w2:H=0.9190,top10E=0.09,eRank=465.7,q75/q25=6.43 vo_prod:H=0.4960,top10E=0.66,eRank=54.3,q75/q25=253.79 train_time:90170ms step_avg:90.17ms +[2025-08-22 23:24:34] [Rank 0] step:1001/10000 train_time:90191ms step_avg:90.10ms +[2025-08-22 23:24:34] [Rank 0] step:1001/10000 train_time:90191ms step_avg:90.10ms +[2025-08-22 23:24:35] [Rank 0] step:1021/10000 train_time:92008ms step_avg:90.12ms +[2025-08-22 23:24:35] [Rank 0] step:1021/10000 train_time:92008ms step_avg:90.12ms +[2025-08-22 23:24:37] [Rank 0] step:1041/10000 train_time:93817ms step_avg:90.12ms +[2025-08-22 23:24:37] [Rank 0] step:1041/10000 train_time:93817ms step_avg:90.12ms +[2025-08-22 23:24:39] [Rank 0] step:1061/10000 train_time:95626ms step_avg:90.13ms +[2025-08-22 23:24:39] [Rank 0] step:1061/10000 train_time:95626ms step_avg:90.13ms +[2025-08-22 23:24:41] [Rank 0] step:1081/10000 train_time:97437ms step_avg:90.14ms +[2025-08-22 23:24:41] [Rank 0] step:1081/10000 train_time:97437ms step_avg:90.14ms +[2025-08-22 23:24:43] [Rank 0] step:1101/10000 train_time:99248ms step_avg:90.14ms +[2025-08-22 23:24:43] [Rank 0] step:1101/10000 train_time:99248ms step_avg:90.14ms +[2025-08-22 23:24:44] [Rank 0] step:1121/10000 train_time:101058ms step_avg:90.15ms +[2025-08-22 23:24:44] [Rank 0] step:1121/10000 train_time:101058ms step_avg:90.15ms +[2025-08-22 23:24:46] [Rank 0] step:1141/10000 train_time:102869ms step_avg:90.16ms +[2025-08-22 23:24:46] [Rank 0] step:1141/10000 train_time:102869ms step_avg:90.16ms +[2025-08-22 23:24:48] [Rank 0] step:1161/10000 train_time:104680ms step_avg:90.16ms +[2025-08-22 23:24:48] [Rank 0] step:1161/10000 train_time:104680ms step_avg:90.16ms +[2025-08-22 23:24:50] [Rank 0] step:1181/10000 train_time:106494ms step_avg:90.17ms +[2025-08-22 23:24:50] [Rank 0] step:1181/10000 train_time:106494ms step_avg:90.17ms +[2025-08-22 23:24:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:24:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:25:05] [Rank 0] PRINT: step:1200/10000 val_loss:4.5179 svd_entropy: attn_qk:H=0.4976,top10E=0.66,eRank=50.0,q75/q25=75.15 attn_vo:H=0.7081,top10E=0.36,eRank=182.7,q75/q25=23.56 mlp_w1:H=0.9237,top10E=0.09,eRank=476.0,q75/q25=4.93 mlp_w2:H=0.9148,top10E=0.10,eRank=455.7,q75/q25=6.45 vo_prod:H=0.4932,top10E=0.67,eRank=56.0,q75/q25=263.48 train_time:108313ms step_avg:90.26ms +[2025-08-22 23:25:05] [Rank 0] PRINT: step:1200/10000 val_loss:4.5179 svd_entropy: attn_qk:H=0.4976,top10E=0.66,eRank=50.0,q75/q25=75.15 attn_vo:H=0.7081,top10E=0.36,eRank=182.7,q75/q25=23.56 mlp_w1:H=0.9237,top10E=0.09,eRank=476.0,q75/q25=4.93 mlp_w2:H=0.9148,top10E=0.10,eRank=455.7,q75/q25=6.45 vo_prod:H=0.4932,top10E=0.67,eRank=56.0,q75/q25=263.48 train_time:108313ms step_avg:90.26ms +[2025-08-22 23:25:06] [Rank 0] step:1201/10000 train_time:108334ms step_avg:90.20ms +[2025-08-22 23:25:06] [Rank 0] step:1201/10000 train_time:108334ms step_avg:90.20ms +[2025-08-22 23:25:07] [Rank 0] step:1221/10000 train_time:110136ms step_avg:90.20ms +[2025-08-22 23:25:07] [Rank 0] step:1221/10000 train_time:110136ms step_avg:90.20ms +[2025-08-22 23:25:09] [Rank 0] step:1241/10000 train_time:111947ms step_avg:90.21ms +[2025-08-22 23:25:09] [Rank 0] step:1241/10000 train_time:111947ms step_avg:90.21ms +[2025-08-22 23:25:11] [Rank 0] step:1261/10000 train_time:113759ms step_avg:90.21ms +[2025-08-22 23:25:11] [Rank 0] step:1261/10000 train_time:113759ms step_avg:90.21ms +[2025-08-22 23:25:13] [Rank 0] step:1281/10000 train_time:115571ms step_avg:90.22ms +[2025-08-22 23:25:13] [Rank 0] step:1281/10000 train_time:115571ms step_avg:90.22ms +[2025-08-22 23:25:15] [Rank 0] step:1301/10000 train_time:117385ms step_avg:90.23ms +[2025-08-22 23:25:15] [Rank 0] step:1301/10000 train_time:117385ms step_avg:90.23ms +[2025-08-22 23:25:16] [Rank 0] step:1321/10000 train_time:119203ms step_avg:90.24ms +[2025-08-22 23:25:16] [Rank 0] step:1321/10000 train_time:119203ms step_avg:90.24ms +[2025-08-22 23:25:18] [Rank 0] step:1341/10000 train_time:121020ms step_avg:90.25ms +[2025-08-22 23:25:18] [Rank 0] step:1341/10000 train_time:121020ms step_avg:90.25ms +[2025-08-22 23:25:20] [Rank 0] step:1361/10000 train_time:122838ms step_avg:90.26ms +[2025-08-22 23:25:20] [Rank 0] step:1361/10000 train_time:122838ms step_avg:90.26ms +[2025-08-22 23:25:22] [Rank 0] step:1381/10000 train_time:124656ms step_avg:90.27ms +[2025-08-22 23:25:22] [Rank 0] step:1381/10000 train_time:124656ms step_avg:90.27ms +[2025-08-22 23:25:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:25:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:25:37] [Rank 0] PRINT: step:1400/10000 val_loss:4.5320 svd_entropy: attn_qk:H=0.4891,top10E=0.67,eRank=50.1,q75/q25=72.18 attn_vo:H=0.7070,top10E=0.36,eRank=182.6,q75/q25=23.80 mlp_w1:H=0.9217,top10E=0.10,eRank=471.0,q75/q25=4.96 mlp_w2:H=0.9127,top10E=0.11,eRank=450.9,q75/q25=6.45 vo_prod:H=0.4859,top10E=0.67,eRank=55.7,q75/q25=264.72 train_time:126480ms step_avg:90.34ms +[2025-08-22 23:25:37] [Rank 0] PRINT: step:1400/10000 val_loss:4.5320 svd_entropy: attn_qk:H=0.4891,top10E=0.67,eRank=50.1,q75/q25=72.18 attn_vo:H=0.7070,top10E=0.36,eRank=182.6,q75/q25=23.80 mlp_w1:H=0.9217,top10E=0.10,eRank=471.0,q75/q25=4.96 mlp_w2:H=0.9127,top10E=0.11,eRank=450.9,q75/q25=6.45 vo_prod:H=0.4859,top10E=0.67,eRank=55.7,q75/q25=264.72 train_time:126480ms step_avg:90.34ms +[2025-08-22 23:25:37] [Rank 0] step:1401/10000 train_time:126500ms step_avg:90.29ms +[2025-08-22 23:25:37] [Rank 0] step:1401/10000 train_time:126500ms step_avg:90.29ms +[2025-08-22 23:25:39] [Rank 0] step:1421/10000 train_time:128317ms step_avg:90.30ms +[2025-08-22 23:25:39] [Rank 0] step:1421/10000 train_time:128317ms step_avg:90.30ms +[2025-08-22 23:25:41] [Rank 0] step:1441/10000 train_time:130124ms step_avg:90.30ms +[2025-08-22 23:25:41] [Rank 0] step:1441/10000 train_time:130124ms step_avg:90.30ms +[2025-08-22 23:25:43] [Rank 0] step:1461/10000 train_time:131931ms step_avg:90.30ms +[2025-08-22 23:25:43] [Rank 0] step:1461/10000 train_time:131931ms step_avg:90.30ms +[2025-08-22 23:25:45] [Rank 0] step:1481/10000 train_time:133741ms step_avg:90.30ms +[2025-08-22 23:25:45] [Rank 0] step:1481/10000 train_time:133741ms step_avg:90.30ms +[2025-08-22 23:25:47] [Rank 0] step:1501/10000 train_time:135561ms step_avg:90.31ms +[2025-08-22 23:25:47] [Rank 0] step:1501/10000 train_time:135561ms step_avg:90.31ms +[2025-08-22 23:25:48] [Rank 0] step:1521/10000 train_time:137382ms step_avg:90.32ms +[2025-08-22 23:25:48] [Rank 0] step:1521/10000 train_time:137382ms step_avg:90.32ms +[2025-08-22 23:25:50] [Rank 0] step:1541/10000 train_time:139203ms step_avg:90.33ms +[2025-08-22 23:25:50] [Rank 0] step:1541/10000 train_time:139203ms step_avg:90.33ms +[2025-08-22 23:25:52] [Rank 0] step:1561/10000 train_time:141027ms step_avg:90.34ms +[2025-08-22 23:25:52] [Rank 0] step:1561/10000 train_time:141027ms step_avg:90.34ms +[2025-08-22 23:25:54] [Rank 0] step:1581/10000 train_time:142852ms step_avg:90.36ms +[2025-08-22 23:25:54] [Rank 0] step:1581/10000 train_time:142852ms step_avg:90.36ms +[2025-08-22 23:25:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:25:56] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:26:09] [Rank 0] PRINT: step:1600/10000 val_loss:4.3783 svd_entropy: attn_qk:H=0.4980,top10E=0.66,eRank=51.8,q75/q25=72.20 attn_vo:H=0.7062,top10E=0.37,eRank=181.8,q75/q25=23.92 mlp_w1:H=0.9162,top10E=0.11,eRank=456.5,q75/q25=4.99 mlp_w2:H=0.9067,top10E=0.12,eRank=437.6,q75/q25=6.49 vo_prod:H=0.4640,top10E=0.68,eRank=55.8,q75/q25=267.74 train_time:144682ms step_avg:90.43ms +[2025-08-22 23:26:09] [Rank 0] PRINT: step:1600/10000 val_loss:4.3783 svd_entropy: attn_qk:H=0.4980,top10E=0.66,eRank=51.8,q75/q25=72.20 attn_vo:H=0.7062,top10E=0.37,eRank=181.8,q75/q25=23.92 mlp_w1:H=0.9162,top10E=0.11,eRank=456.5,q75/q25=4.99 mlp_w2:H=0.9067,top10E=0.12,eRank=437.6,q75/q25=6.49 vo_prod:H=0.4640,top10E=0.68,eRank=55.8,q75/q25=267.74 train_time:144682ms step_avg:90.43ms +[2025-08-22 23:26:09] [Rank 0] step:1601/10000 train_time:144702ms step_avg:90.38ms +[2025-08-22 23:26:09] [Rank 0] step:1601/10000 train_time:144702ms step_avg:90.38ms +[2025-08-22 23:26:11] [Rank 0] step:1621/10000 train_time:146513ms step_avg:90.38ms +[2025-08-22 23:26:11] [Rank 0] step:1621/10000 train_time:146513ms step_avg:90.38ms +[2025-08-22 23:26:13] [Rank 0] step:1641/10000 train_time:148331ms step_avg:90.39ms +[2025-08-22 23:26:13] [Rank 0] step:1641/10000 train_time:148331ms step_avg:90.39ms +[2025-08-22 23:26:15] [Rank 0] step:1661/10000 train_time:150150ms step_avg:90.40ms +[2025-08-22 23:26:15] [Rank 0] step:1661/10000 train_time:150150ms step_avg:90.40ms +[2025-08-22 23:26:17] [Rank 0] step:1681/10000 train_time:151972ms step_avg:90.41ms +[2025-08-22 23:26:17] [Rank 0] step:1681/10000 train_time:151972ms step_avg:90.41ms +[2025-08-22 23:26:18] [Rank 0] step:1701/10000 train_time:153793ms step_avg:90.41ms +[2025-08-22 23:26:18] [Rank 0] step:1701/10000 train_time:153793ms step_avg:90.41ms +[2025-08-22 23:26:20] [Rank 0] step:1721/10000 train_time:155616ms step_avg:90.42ms +[2025-08-22 23:26:20] [Rank 0] step:1721/10000 train_time:155616ms step_avg:90.42ms +[2025-08-22 23:26:22] [Rank 0] step:1741/10000 train_time:157438ms step_avg:90.43ms +[2025-08-22 23:26:22] [Rank 0] step:1741/10000 train_time:157438ms step_avg:90.43ms +[2025-08-22 23:26:24] [Rank 0] step:1761/10000 train_time:159261ms step_avg:90.44ms +[2025-08-22 23:26:24] [Rank 0] step:1761/10000 train_time:159261ms step_avg:90.44ms +[2025-08-22 23:26:26] [Rank 0] step:1781/10000 train_time:161084ms step_avg:90.45ms +[2025-08-22 23:26:26] [Rank 0] step:1781/10000 train_time:161084ms step_avg:90.45ms +[2025-08-22 23:26:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:26:27] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:26:41] [Rank 0] PRINT: step:1800/10000 val_loss:4.3245 svd_entropy: attn_qk:H=0.5004,top10E=0.66,eRank=52.6,q75/q25=76.51 attn_vo:H=0.7056,top10E=0.37,eRank=183.1,q75/q25=24.31 mlp_w1:H=0.9151,top10E=0.11,eRank=453.5,q75/q25=5.04 mlp_w2:H=0.9049,top10E=0.12,eRank=433.6,q75/q25=6.51 vo_prod:H=0.4617,top10E=0.68,eRank=56.9,q75/q25=268.44 train_time:162910ms step_avg:90.51ms +[2025-08-22 23:26:41] [Rank 0] PRINT: step:1800/10000 val_loss:4.3245 svd_entropy: attn_qk:H=0.5004,top10E=0.66,eRank=52.6,q75/q25=76.51 attn_vo:H=0.7056,top10E=0.37,eRank=183.1,q75/q25=24.31 mlp_w1:H=0.9151,top10E=0.11,eRank=453.5,q75/q25=5.04 mlp_w2:H=0.9049,top10E=0.12,eRank=433.6,q75/q25=6.51 vo_prod:H=0.4617,top10E=0.68,eRank=56.9,q75/q25=268.44 train_time:162910ms step_avg:90.51ms +[2025-08-22 23:26:41] [Rank 0] step:1801/10000 train_time:162930ms step_avg:90.47ms +[2025-08-22 23:26:41] [Rank 0] step:1801/10000 train_time:162930ms step_avg:90.47ms +[2025-08-22 23:26:43] [Rank 0] step:1821/10000 train_time:164750ms step_avg:90.47ms +[2025-08-22 23:26:43] [Rank 0] step:1821/10000 train_time:164750ms step_avg:90.47ms +[2025-08-22 23:26:45] [Rank 0] step:1841/10000 train_time:166569ms step_avg:90.48ms +[2025-08-22 23:26:45] [Rank 0] step:1841/10000 train_time:166569ms step_avg:90.48ms +[2025-08-22 23:26:47] [Rank 0] step:1861/10000 train_time:168388ms step_avg:90.48ms +[2025-08-22 23:26:47] [Rank 0] step:1861/10000 train_time:168388ms step_avg:90.48ms +[2025-08-22 23:26:49] [Rank 0] step:1881/10000 train_time:170206ms step_avg:90.49ms +[2025-08-22 23:26:49] [Rank 0] step:1881/10000 train_time:170206ms step_avg:90.49ms +[2025-08-22 23:26:50] [Rank 0] step:1901/10000 train_time:172026ms step_avg:90.49ms +[2025-08-22 23:26:50] [Rank 0] step:1901/10000 train_time:172026ms step_avg:90.49ms +[2025-08-22 23:26:52] [Rank 0] step:1921/10000 train_time:173847ms step_avg:90.50ms +[2025-08-22 23:26:52] [Rank 0] step:1921/10000 train_time:173847ms step_avg:90.50ms +[2025-08-22 23:26:54] [Rank 0] step:1941/10000 train_time:175668ms step_avg:90.50ms +[2025-08-22 23:26:54] [Rank 0] step:1941/10000 train_time:175668ms step_avg:90.50ms +[2025-08-22 23:26:56] [Rank 0] step:1961/10000 train_time:177490ms step_avg:90.51ms +[2025-08-22 23:26:56] [Rank 0] step:1961/10000 train_time:177490ms step_avg:90.51ms +[2025-08-22 23:26:58] [Rank 0] step:1981/10000 train_time:179311ms step_avg:90.52ms +[2025-08-22 23:26:58] [Rank 0] step:1981/10000 train_time:179311ms step_avg:90.52ms +[2025-08-22 23:26:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:26:59] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:27:13] [Rank 0] PRINT: step:2000/10000 val_loss:4.2950 svd_entropy: attn_qk:H=0.5009,top10E=0.66,eRank=53.2,q75/q25=79.53 attn_vo:H=0.7049,top10E=0.37,eRank=183.8,q75/q25=24.45 mlp_w1:H=0.9135,top10E=0.11,eRank=449.7,q75/q25=5.07 mlp_w2:H=0.9030,top10E=0.13,eRank=429.3,q75/q25=6.57 vo_prod:H=0.4684,top10E=0.68,eRank=57.8,q75/q25=276.17 train_time:181139ms step_avg:90.57ms +[2025-08-22 23:27:13] [Rank 0] PRINT: step:2000/10000 val_loss:4.2950 svd_entropy: attn_qk:H=0.5009,top10E=0.66,eRank=53.2,q75/q25=79.53 attn_vo:H=0.7049,top10E=0.37,eRank=183.8,q75/q25=24.45 mlp_w1:H=0.9135,top10E=0.11,eRank=449.7,q75/q25=5.07 mlp_w2:H=0.9030,top10E=0.13,eRank=429.3,q75/q25=6.57 vo_prod:H=0.4684,top10E=0.68,eRank=57.8,q75/q25=276.17 train_time:181139ms step_avg:90.57ms +[2025-08-22 23:27:13] [Rank 0] step:2001/10000 train_time:181160ms step_avg:90.53ms +[2025-08-22 23:27:13] [Rank 0] step:2001/10000 train_time:181160ms step_avg:90.53ms +[2025-08-22 23:27:15] [Rank 0] step:2021/10000 train_time:182966ms step_avg:90.53ms +[2025-08-22 23:27:15] [Rank 0] step:2021/10000 train_time:182966ms step_avg:90.53ms +[2025-08-22 23:27:18] [Rank 0] step:2041/10000 train_time:185448ms step_avg:90.86ms +[2025-08-22 23:27:18] [Rank 0] step:2041/10000 train_time:185448ms step_avg:90.86ms +[2025-08-22 23:27:19] [Rank 0] step:2061/10000 train_time:187269ms step_avg:90.86ms +[2025-08-22 23:27:19] [Rank 0] step:2061/10000 train_time:187269ms step_avg:90.86ms +[2025-08-22 23:27:21] [Rank 0] step:2081/10000 train_time:189094ms step_avg:90.87ms +[2025-08-22 23:27:21] [Rank 0] step:2081/10000 train_time:189094ms step_avg:90.87ms +[2025-08-22 23:27:23] [Rank 0] step:2101/10000 train_time:190919ms step_avg:90.87ms +[2025-08-22 23:27:23] [Rank 0] step:2101/10000 train_time:190919ms step_avg:90.87ms +[2025-08-22 23:27:25] [Rank 0] step:2121/10000 train_time:192745ms step_avg:90.87ms +[2025-08-22 23:27:25] [Rank 0] step:2121/10000 train_time:192745ms step_avg:90.87ms +[2025-08-22 23:27:27] [Rank 0] step:2141/10000 train_time:194571ms step_avg:90.88ms +[2025-08-22 23:27:27] [Rank 0] step:2141/10000 train_time:194571ms step_avg:90.88ms +[2025-08-22 23:27:29] [Rank 0] step:2161/10000 train_time:196401ms step_avg:90.88ms +[2025-08-22 23:27:29] [Rank 0] step:2161/10000 train_time:196401ms step_avg:90.88ms +[2025-08-22 23:27:30] [Rank 0] step:2181/10000 train_time:198229ms step_avg:90.89ms +[2025-08-22 23:27:30] [Rank 0] step:2181/10000 train_time:198229ms step_avg:90.89ms +[2025-08-22 23:27:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:27:32] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:27:46] [Rank 0] PRINT: step:2200/10000 val_loss:4.2544 svd_entropy: attn_qk:H=0.4980,top10E=0.66,eRank=53.4,q75/q25=81.93 attn_vo:H=0.7051,top10E=0.37,eRank=183.8,q75/q25=24.65 mlp_w1:H=0.9122,top10E=0.11,eRank=446.6,q75/q25=5.08 mlp_w2:H=0.9009,top10E=0.13,eRank=425.0,q75/q25=6.61 vo_prod:H=0.4753,top10E=0.67,eRank=58.1,q75/q25=272.45 train_time:200064ms step_avg:90.94ms +[2025-08-22 23:27:46] [Rank 0] PRINT: step:2200/10000 val_loss:4.2544 svd_entropy: attn_qk:H=0.4980,top10E=0.66,eRank=53.4,q75/q25=81.93 attn_vo:H=0.7051,top10E=0.37,eRank=183.8,q75/q25=24.65 mlp_w1:H=0.9122,top10E=0.11,eRank=446.6,q75/q25=5.08 mlp_w2:H=0.9009,top10E=0.13,eRank=425.0,q75/q25=6.61 vo_prod:H=0.4753,top10E=0.67,eRank=58.1,q75/q25=272.45 train_time:200064ms step_avg:90.94ms +[2025-08-22 23:27:46] [Rank 0] step:2201/10000 train_time:200085ms step_avg:90.91ms +[2025-08-22 23:27:46] [Rank 0] step:2201/10000 train_time:200085ms step_avg:90.91ms +[2025-08-22 23:27:48] [Rank 0] step:2221/10000 train_time:201906ms step_avg:90.91ms +[2025-08-22 23:27:48] [Rank 0] step:2221/10000 train_time:201906ms step_avg:90.91ms +[2025-08-22 23:27:50] [Rank 0] step:2241/10000 train_time:203762ms step_avg:90.92ms +[2025-08-22 23:27:50] [Rank 0] step:2241/10000 train_time:203762ms step_avg:90.92ms +[2025-08-22 23:27:52] [Rank 0] step:2261/10000 train_time:205636ms step_avg:90.95ms +[2025-08-22 23:27:52] [Rank 0] step:2261/10000 train_time:205636ms step_avg:90.95ms +[2025-08-22 23:27:54] [Rank 0] step:2281/10000 train_time:207500ms step_avg:90.97ms +[2025-08-22 23:27:54] [Rank 0] step:2281/10000 train_time:207500ms step_avg:90.97ms +[2025-08-22 23:27:55] [Rank 0] step:2301/10000 train_time:209366ms step_avg:90.99ms +[2025-08-22 23:27:55] [Rank 0] step:2301/10000 train_time:209366ms step_avg:90.99ms +[2025-08-22 23:27:57] [Rank 0] step:2321/10000 train_time:211233ms step_avg:91.01ms +[2025-08-22 23:27:57] [Rank 0] step:2321/10000 train_time:211233ms step_avg:91.01ms +[2025-08-22 23:27:59] [Rank 0] step:2341/10000 train_time:213100ms step_avg:91.03ms +[2025-08-22 23:27:59] [Rank 0] step:2341/10000 train_time:213100ms step_avg:91.03ms +[2025-08-22 23:28:01] [Rank 0] step:2361/10000 train_time:214967ms step_avg:91.05ms +[2025-08-22 23:28:01] [Rank 0] step:2361/10000 train_time:214967ms step_avg:91.05ms +[2025-08-22 23:28:03] [Rank 0] step:2381/10000 train_time:216834ms step_avg:91.07ms +[2025-08-22 23:28:03] [Rank 0] step:2381/10000 train_time:216834ms step_avg:91.07ms +[2025-08-22 23:28:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:28:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:28:18] [Rank 0] PRINT: step:2400/10000 val_loss:4.3729 svd_entropy: attn_qk:H=0.4950,top10E=0.66,eRank=53.8,q75/q25=81.09 attn_vo:H=0.7041,top10E=0.37,eRank=183.8,q75/q25=24.61 mlp_w1:H=0.9103,top10E=0.12,eRank=442.2,q75/q25=5.12 mlp_w2:H=0.8987,top10E=0.13,eRank=420.3,q75/q25=6.62 vo_prod:H=0.4726,top10E=0.67,eRank=58.6,q75/q25=274.59 train_time:218707ms step_avg:91.13ms +[2025-08-22 23:28:18] [Rank 0] PRINT: step:2400/10000 val_loss:4.3729 svd_entropy: attn_qk:H=0.4950,top10E=0.66,eRank=53.8,q75/q25=81.09 attn_vo:H=0.7041,top10E=0.37,eRank=183.8,q75/q25=24.61 mlp_w1:H=0.9103,top10E=0.12,eRank=442.2,q75/q25=5.12 mlp_w2:H=0.8987,top10E=0.13,eRank=420.3,q75/q25=6.62 vo_prod:H=0.4726,top10E=0.67,eRank=58.6,q75/q25=274.59 train_time:218707ms step_avg:91.13ms +[2025-08-22 23:28:19] [Rank 0] step:2401/10000 train_time:218728ms step_avg:91.10ms +[2025-08-22 23:28:19] [Rank 0] step:2401/10000 train_time:218728ms step_avg:91.10ms +[2025-08-22 23:28:20] [Rank 0] step:2421/10000 train_time:220591ms step_avg:91.12ms +[2025-08-22 23:28:20] [Rank 0] step:2421/10000 train_time:220591ms step_avg:91.12ms +[2025-08-22 23:28:22] [Rank 0] step:2441/10000 train_time:222452ms step_avg:91.13ms +[2025-08-22 23:28:22] [Rank 0] step:2441/10000 train_time:222452ms step_avg:91.13ms +[2025-08-22 23:28:24] [Rank 0] step:2461/10000 train_time:224317ms step_avg:91.15ms +[2025-08-22 23:28:24] [Rank 0] step:2461/10000 train_time:224317ms step_avg:91.15ms +[2025-08-22 23:28:26] [Rank 0] step:2481/10000 train_time:226181ms step_avg:91.17ms +[2025-08-22 23:28:26] [Rank 0] step:2481/10000 train_time:226181ms step_avg:91.17ms +[2025-08-22 23:28:28] [Rank 0] step:2501/10000 train_time:228045ms step_avg:91.18ms +[2025-08-22 23:28:28] [Rank 0] step:2501/10000 train_time:228045ms step_avg:91.18ms +[2025-08-22 23:28:30] [Rank 0] step:2521/10000 train_time:229909ms step_avg:91.20ms +[2025-08-22 23:28:30] [Rank 0] step:2521/10000 train_time:229909ms step_avg:91.20ms +[2025-08-22 23:28:32] [Rank 0] step:2541/10000 train_time:231774ms step_avg:91.21ms +[2025-08-22 23:28:32] [Rank 0] step:2541/10000 train_time:231774ms step_avg:91.21ms +[2025-08-22 23:28:34] [Rank 0] step:2561/10000 train_time:233638ms step_avg:91.23ms +[2025-08-22 23:28:34] [Rank 0] step:2561/10000 train_time:233638ms step_avg:91.23ms +[2025-08-22 23:28:35] [Rank 0] step:2581/10000 train_time:235504ms step_avg:91.25ms +[2025-08-22 23:28:35] [Rank 0] step:2581/10000 train_time:235504ms step_avg:91.25ms +[2025-08-22 23:28:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:28:37] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:28:51] [Rank 0] PRINT: step:2600/10000 val_loss:4.1694 svd_entropy: attn_qk:H=0.4984,top10E=0.65,eRank=54.5,q75/q25=77.53 attn_vo:H=0.7033,top10E=0.37,eRank=182.5,q75/q25=24.64 mlp_w1:H=0.9043,top10E=0.13,eRank=428.3,q75/q25=5.19 mlp_w2:H=0.8920,top10E=0.14,eRank=407.6,q75/q25=6.69 vo_prod:H=0.4621,top10E=0.68,eRank=58.5,q75/q25=278.75 train_time:237377ms step_avg:91.30ms +[2025-08-22 23:28:51] [Rank 0] PRINT: step:2600/10000 val_loss:4.1694 svd_entropy: attn_qk:H=0.4984,top10E=0.65,eRank=54.5,q75/q25=77.53 attn_vo:H=0.7033,top10E=0.37,eRank=182.5,q75/q25=24.64 mlp_w1:H=0.9043,top10E=0.13,eRank=428.3,q75/q25=5.19 mlp_w2:H=0.8920,top10E=0.14,eRank=407.6,q75/q25=6.69 vo_prod:H=0.4621,top10E=0.68,eRank=58.5,q75/q25=278.75 train_time:237377ms step_avg:91.30ms +[2025-08-22 23:28:51] [Rank 0] step:2601/10000 train_time:237397ms step_avg:91.27ms +[2025-08-22 23:28:51] [Rank 0] step:2601/10000 train_time:237397ms step_avg:91.27ms +[2025-08-22 23:28:53] [Rank 0] step:2621/10000 train_time:239250ms step_avg:91.28ms +[2025-08-22 23:28:53] [Rank 0] step:2621/10000 train_time:239250ms step_avg:91.28ms +[2025-08-22 23:28:55] [Rank 0] step:2641/10000 train_time:241110ms step_avg:91.30ms +[2025-08-22 23:28:55] [Rank 0] step:2641/10000 train_time:241110ms step_avg:91.30ms +[2025-08-22 23:28:57] [Rank 0] step:2661/10000 train_time:242973ms step_avg:91.31ms +[2025-08-22 23:28:57] [Rank 0] step:2661/10000 train_time:242973ms step_avg:91.31ms +[2025-08-22 23:28:59] [Rank 0] step:2681/10000 train_time:244837ms step_avg:91.32ms +[2025-08-22 23:28:59] [Rank 0] step:2681/10000 train_time:244837ms step_avg:91.32ms +[2025-08-22 23:29:00] [Rank 0] step:2701/10000 train_time:246702ms step_avg:91.34ms +[2025-08-22 23:29:00] [Rank 0] step:2701/10000 train_time:246702ms step_avg:91.34ms +[2025-08-22 23:29:02] [Rank 0] step:2721/10000 train_time:248566ms step_avg:91.35ms +[2025-08-22 23:29:02] [Rank 0] step:2721/10000 train_time:248566ms step_avg:91.35ms +[2025-08-22 23:29:04] [Rank 0] step:2741/10000 train_time:250430ms step_avg:91.36ms +[2025-08-22 23:29:04] [Rank 0] step:2741/10000 train_time:250430ms step_avg:91.36ms +[2025-08-22 23:29:06] [Rank 0] step:2761/10000 train_time:252297ms step_avg:91.38ms +[2025-08-22 23:29:06] [Rank 0] step:2761/10000 train_time:252297ms step_avg:91.38ms +[2025-08-22 23:29:08] [Rank 0] step:2781/10000 train_time:254167ms step_avg:91.39ms +[2025-08-22 23:29:08] [Rank 0] step:2781/10000 train_time:254167ms step_avg:91.39ms +[2025-08-22 23:29:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:29:10] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:29:23] [Rank 0] PRINT: step:2800/10000 val_loss:4.1514 svd_entropy: attn_qk:H=0.5000,top10E=0.65,eRank=55.0,q75/q25=79.75 attn_vo:H=0.7055,top10E=0.37,eRank=183.4,q75/q25=24.70 mlp_w1:H=0.9035,top10E=0.13,eRank=426.3,q75/q25=5.22 mlp_w2:H=0.8905,top10E=0.15,eRank=404.6,q75/q25=6.78 vo_prod:H=0.4699,top10E=0.67,eRank=59.2,q75/q25=281.17 train_time:256117ms step_avg:91.47ms +[2025-08-22 23:29:23] [Rank 0] PRINT: step:2800/10000 val_loss:4.1514 svd_entropy: attn_qk:H=0.5000,top10E=0.65,eRank=55.0,q75/q25=79.75 attn_vo:H=0.7055,top10E=0.37,eRank=183.4,q75/q25=24.70 mlp_w1:H=0.9035,top10E=0.13,eRank=426.3,q75/q25=5.22 mlp_w2:H=0.8905,top10E=0.15,eRank=404.6,q75/q25=6.78 vo_prod:H=0.4699,top10E=0.67,eRank=59.2,q75/q25=281.17 train_time:256117ms step_avg:91.47ms +[2025-08-22 23:29:24] [Rank 0] step:2801/10000 train_time:256136ms step_avg:91.44ms +[2025-08-22 23:29:24] [Rank 0] step:2801/10000 train_time:256136ms step_avg:91.44ms +[2025-08-22 23:29:26] [Rank 0] step:2821/10000 train_time:257998ms step_avg:91.46ms +[2025-08-22 23:29:26] [Rank 0] step:2821/10000 train_time:257998ms step_avg:91.46ms +[2025-08-22 23:29:27] [Rank 0] step:2841/10000 train_time:259863ms step_avg:91.47ms +[2025-08-22 23:29:27] [Rank 0] step:2841/10000 train_time:259863ms step_avg:91.47ms +[2025-08-22 23:29:29] [Rank 0] step:2861/10000 train_time:261731ms step_avg:91.48ms +[2025-08-22 23:29:29] [Rank 0] step:2861/10000 train_time:261731ms step_avg:91.48ms +[2025-08-22 23:29:31] [Rank 0] step:2881/10000 train_time:263599ms step_avg:91.50ms +[2025-08-22 23:29:31] [Rank 0] step:2881/10000 train_time:263599ms step_avg:91.50ms +[2025-08-22 23:29:33] [Rank 0] step:2901/10000 train_time:265468ms step_avg:91.51ms +[2025-08-22 23:29:33] [Rank 0] step:2901/10000 train_time:265468ms step_avg:91.51ms +[2025-08-22 23:29:35] [Rank 0] step:2921/10000 train_time:267337ms step_avg:91.52ms +[2025-08-22 23:29:35] [Rank 0] step:2921/10000 train_time:267337ms step_avg:91.52ms +[2025-08-22 23:29:37] [Rank 0] step:2941/10000 train_time:269209ms step_avg:91.54ms +[2025-08-22 23:29:37] [Rank 0] step:2941/10000 train_time:269209ms step_avg:91.54ms +[2025-08-22 23:29:39] [Rank 0] step:2961/10000 train_time:271081ms step_avg:91.55ms +[2025-08-22 23:29:39] [Rank 0] step:2961/10000 train_time:271081ms step_avg:91.55ms +[2025-08-22 23:29:40] [Rank 0] step:2981/10000 train_time:272960ms step_avg:91.57ms +[2025-08-22 23:29:40] [Rank 0] step:2981/10000 train_time:272960ms step_avg:91.57ms +[2025-08-22 23:29:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:29:42] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:29:56] [Rank 0] PRINT: step:3000/10000 val_loss:4.1218 svd_entropy: attn_qk:H=0.5003,top10E=0.65,eRank=55.3,q75/q25=77.00 attn_vo:H=0.7082,top10E=0.36,eRank=184.2,q75/q25=24.63 mlp_w1:H=0.9026,top10E=0.13,eRank=424.5,q75/q25=5.24 mlp_w2:H=0.8892,top10E=0.15,eRank=401.9,q75/q25=6.79 vo_prod:H=0.4793,top10E=0.67,eRank=60.0,q75/q25=275.01 train_time:274846ms step_avg:91.62ms +[2025-08-22 23:29:56] [Rank 0] PRINT: step:3000/10000 val_loss:4.1218 svd_entropy: attn_qk:H=0.5003,top10E=0.65,eRank=55.3,q75/q25=77.00 attn_vo:H=0.7082,top10E=0.36,eRank=184.2,q75/q25=24.63 mlp_w1:H=0.9026,top10E=0.13,eRank=424.5,q75/q25=5.24 mlp_w2:H=0.8892,top10E=0.15,eRank=401.9,q75/q25=6.79 vo_prod:H=0.4793,top10E=0.67,eRank=60.0,q75/q25=275.01 train_time:274846ms step_avg:91.62ms +[2025-08-22 23:29:56] [Rank 0] step:3001/10000 train_time:274867ms step_avg:91.59ms +[2025-08-22 23:29:56] [Rank 0] step:3001/10000 train_time:274867ms step_avg:91.59ms +[2025-08-22 23:29:58] [Rank 0] step:3021/10000 train_time:276743ms step_avg:91.61ms +[2025-08-22 23:29:58] [Rank 0] step:3021/10000 train_time:276743ms step_avg:91.61ms +[2025-08-22 23:30:00] [Rank 0] step:3041/10000 train_time:278614ms step_avg:91.62ms +[2025-08-22 23:30:00] [Rank 0] step:3041/10000 train_time:278614ms step_avg:91.62ms +[2025-08-22 23:30:02] [Rank 0] step:3061/10000 train_time:280490ms step_avg:91.63ms +[2025-08-22 23:30:02] [Rank 0] step:3061/10000 train_time:280490ms step_avg:91.63ms +[2025-08-22 23:30:04] [Rank 0] step:3081/10000 train_time:282371ms step_avg:91.65ms +[2025-08-22 23:30:04] [Rank 0] step:3081/10000 train_time:282371ms step_avg:91.65ms +[2025-08-22 23:30:06] [Rank 0] step:3101/10000 train_time:284247ms step_avg:91.66ms +[2025-08-22 23:30:06] [Rank 0] step:3101/10000 train_time:284247ms step_avg:91.66ms +[2025-08-22 23:30:08] [Rank 0] step:3121/10000 train_time:286120ms step_avg:91.68ms +[2025-08-22 23:30:08] [Rank 0] step:3121/10000 train_time:286120ms step_avg:91.68ms +[2025-08-22 23:30:09] [Rank 0] step:3141/10000 train_time:287995ms step_avg:91.69ms +[2025-08-22 23:30:09] [Rank 0] step:3141/10000 train_time:287995ms step_avg:91.69ms +[2025-08-22 23:30:11] [Rank 0] step:3161/10000 train_time:289871ms step_avg:91.70ms +[2025-08-22 23:30:11] [Rank 0] step:3161/10000 train_time:289871ms step_avg:91.70ms +[2025-08-22 23:30:13] [Rank 0] step:3181/10000 train_time:291795ms step_avg:91.73ms +[2025-08-22 23:30:13] [Rank 0] step:3181/10000 train_time:291795ms step_avg:91.73ms +[2025-08-22 23:30:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:30:15] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:30:29] [Rank 0] PRINT: step:3200/10000 val_loss:4.1365 svd_entropy: attn_qk:H=0.5053,top10E=0.65,eRank=56.3,q75/q25=75.31 attn_vo:H=0.7078,top10E=0.37,eRank=183.6,q75/q25=24.59 mlp_w1:H=0.9006,top10E=0.13,eRank=420.5,q75/q25=5.27 mlp_w2:H=0.8873,top10E=0.15,eRank=398.8,q75/q25=6.79 vo_prod:H=0.4783,top10E=0.67,eRank=59.9,q75/q25=271.79 train_time:293737ms step_avg:91.79ms +[2025-08-22 23:30:29] [Rank 0] PRINT: step:3200/10000 val_loss:4.1365 svd_entropy: attn_qk:H=0.5053,top10E=0.65,eRank=56.3,q75/q25=75.31 attn_vo:H=0.7078,top10E=0.37,eRank=183.6,q75/q25=24.59 mlp_w1:H=0.9006,top10E=0.13,eRank=420.5,q75/q25=5.27 mlp_w2:H=0.8873,top10E=0.15,eRank=398.8,q75/q25=6.79 vo_prod:H=0.4783,top10E=0.67,eRank=59.9,q75/q25=271.79 train_time:293737ms step_avg:91.79ms +[2025-08-22 23:30:29] [Rank 0] step:3201/10000 train_time:293758ms step_avg:91.77ms +[2025-08-22 23:30:29] [Rank 0] step:3201/10000 train_time:293758ms step_avg:91.77ms +[2025-08-22 23:30:31] [Rank 0] step:3221/10000 train_time:295624ms step_avg:91.78ms +[2025-08-22 23:30:31] [Rank 0] step:3221/10000 train_time:295624ms step_avg:91.78ms +[2025-08-22 23:30:33] [Rank 0] step:3241/10000 train_time:297493ms step_avg:91.79ms +[2025-08-22 23:30:33] [Rank 0] step:3241/10000 train_time:297493ms step_avg:91.79ms +[2025-08-22 23:30:35] [Rank 0] step:3261/10000 train_time:299362ms step_avg:91.80ms +[2025-08-22 23:30:35] [Rank 0] step:3261/10000 train_time:299362ms step_avg:91.80ms +[2025-08-22 23:30:37] [Rank 0] step:3281/10000 train_time:301232ms step_avg:91.81ms +[2025-08-22 23:30:37] [Rank 0] step:3281/10000 train_time:301232ms step_avg:91.81ms +[2025-08-22 23:30:38] [Rank 0] step:3301/10000 train_time:303103ms step_avg:91.82ms +[2025-08-22 23:30:38] [Rank 0] step:3301/10000 train_time:303103ms step_avg:91.82ms +[2025-08-22 23:30:40] [Rank 0] step:3321/10000 train_time:304975ms step_avg:91.83ms +[2025-08-22 23:30:40] [Rank 0] step:3321/10000 train_time:304975ms step_avg:91.83ms +[2025-08-22 23:30:42] [Rank 0] step:3341/10000 train_time:306849ms step_avg:91.84ms +[2025-08-22 23:30:42] [Rank 0] step:3341/10000 train_time:306849ms step_avg:91.84ms +[2025-08-22 23:30:44] [Rank 0] step:3361/10000 train_time:308724ms step_avg:91.85ms +[2025-08-22 23:30:44] [Rank 0] step:3361/10000 train_time:308724ms step_avg:91.85ms +[2025-08-22 23:30:46] [Rank 0] step:3381/10000 train_time:310598ms step_avg:91.87ms +[2025-08-22 23:30:46] [Rank 0] step:3381/10000 train_time:310598ms step_avg:91.87ms +[2025-08-22 23:30:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:30:48] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:31:01] [Rank 0] PRINT: step:3400/10000 val_loss:4.0791 svd_entropy: attn_qk:H=0.5080,top10E=0.64,eRank=56.8,q75/q25=75.40 attn_vo:H=0.7080,top10E=0.37,eRank=182.9,q75/q25=24.50 mlp_w1:H=0.8964,top10E=0.13,eRank=411.7,q75/q25=5.34 mlp_w2:H=0.8839,top10E=0.15,eRank=392.5,q75/q25=6.84 vo_prod:H=0.4779,top10E=0.67,eRank=60.5,q75/q25=271.50 train_time:312481ms step_avg:91.91ms +[2025-08-22 23:31:01] [Rank 0] PRINT: step:3400/10000 val_loss:4.0791 svd_entropy: attn_qk:H=0.5080,top10E=0.64,eRank=56.8,q75/q25=75.40 attn_vo:H=0.7080,top10E=0.37,eRank=182.9,q75/q25=24.50 mlp_w1:H=0.8964,top10E=0.13,eRank=411.7,q75/q25=5.34 mlp_w2:H=0.8839,top10E=0.15,eRank=392.5,q75/q25=6.84 vo_prod:H=0.4779,top10E=0.67,eRank=60.5,q75/q25=271.50 train_time:312481ms step_avg:91.91ms +[2025-08-22 23:31:02] [Rank 0] step:3401/10000 train_time:312501ms step_avg:91.89ms +[2025-08-22 23:31:02] [Rank 0] step:3401/10000 train_time:312501ms step_avg:91.89ms +[2025-08-22 23:31:03] [Rank 0] step:3421/10000 train_time:314358ms step_avg:91.89ms +[2025-08-22 23:31:03] [Rank 0] step:3421/10000 train_time:314358ms step_avg:91.89ms +[2025-08-22 23:31:05] [Rank 0] step:3441/10000 train_time:316226ms step_avg:91.90ms +[2025-08-22 23:31:05] [Rank 0] step:3441/10000 train_time:316226ms step_avg:91.90ms +[2025-08-22 23:31:07] [Rank 0] step:3461/10000 train_time:318097ms step_avg:91.91ms +[2025-08-22 23:31:07] [Rank 0] step:3461/10000 train_time:318097ms step_avg:91.91ms +[2025-08-22 23:31:09] [Rank 0] step:3481/10000 train_time:319968ms step_avg:91.92ms +[2025-08-22 23:31:09] [Rank 0] step:3481/10000 train_time:319968ms step_avg:91.92ms +[2025-08-22 23:31:11] [Rank 0] step:3501/10000 train_time:321842ms step_avg:91.93ms +[2025-08-22 23:31:11] [Rank 0] step:3501/10000 train_time:321842ms step_avg:91.93ms +[2025-08-22 23:31:13] [Rank 0] step:3521/10000 train_time:323719ms step_avg:91.94ms +[2025-08-22 23:31:13] [Rank 0] step:3521/10000 train_time:323719ms step_avg:91.94ms +[2025-08-22 23:31:15] [Rank 0] step:3541/10000 train_time:325593ms step_avg:91.95ms +[2025-08-22 23:31:15] [Rank 0] step:3541/10000 train_time:325593ms step_avg:91.95ms +[2025-08-22 23:31:17] [Rank 0] step:3561/10000 train_time:327530ms step_avg:91.98ms +[2025-08-22 23:31:17] [Rank 0] step:3561/10000 train_time:327530ms step_avg:91.98ms +[2025-08-22 23:31:19] [Rank 0] step:3581/10000 train_time:329492ms step_avg:92.01ms +[2025-08-22 23:31:19] [Rank 0] step:3581/10000 train_time:329492ms step_avg:92.01ms +[2025-08-22 23:31:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:31:20] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:31:34] [Rank 0] PRINT: step:3600/10000 val_loss:4.0768 svd_entropy: attn_qk:H=0.5112,top10E=0.64,eRank=57.6,q75/q25=75.06 attn_vo:H=0.7082,top10E=0.37,eRank=182.5,q75/q25=24.53 mlp_w1:H=0.8944,top10E=0.14,eRank=407.6,q75/q25=5.38 mlp_w2:H=0.8817,top10E=0.16,eRank=388.6,q75/q25=6.87 vo_prod:H=0.4757,top10E=0.67,eRank=60.8,q75/q25=273.32 train_time:331375ms step_avg:92.05ms +[2025-08-22 23:31:34] [Rank 0] PRINT: step:3600/10000 val_loss:4.0768 svd_entropy: attn_qk:H=0.5112,top10E=0.64,eRank=57.6,q75/q25=75.06 attn_vo:H=0.7082,top10E=0.37,eRank=182.5,q75/q25=24.53 mlp_w1:H=0.8944,top10E=0.14,eRank=407.6,q75/q25=5.38 mlp_w2:H=0.8817,top10E=0.16,eRank=388.6,q75/q25=6.87 vo_prod:H=0.4757,top10E=0.67,eRank=60.8,q75/q25=273.32 train_time:331375ms step_avg:92.05ms +[2025-08-22 23:31:34] [Rank 0] step:3601/10000 train_time:331394ms step_avg:92.03ms +[2025-08-22 23:31:34] [Rank 0] step:3601/10000 train_time:331394ms step_avg:92.03ms +[2025-08-22 23:31:36] [Rank 0] step:3621/10000 train_time:333285ms step_avg:92.04ms +[2025-08-22 23:31:36] [Rank 0] step:3621/10000 train_time:333285ms step_avg:92.04ms +[2025-08-22 23:31:38] [Rank 0] step:3641/10000 train_time:335158ms step_avg:92.05ms +[2025-08-22 23:31:38] [Rank 0] step:3641/10000 train_time:335158ms step_avg:92.05ms +[2025-08-22 23:31:40] [Rank 0] step:3661/10000 train_time:337033ms step_avg:92.06ms +[2025-08-22 23:31:40] [Rank 0] step:3661/10000 train_time:337033ms step_avg:92.06ms +[2025-08-22 23:31:42] [Rank 0] step:3681/10000 train_time:338910ms step_avg:92.07ms +[2025-08-22 23:31:42] [Rank 0] step:3681/10000 train_time:338910ms step_avg:92.07ms +[2025-08-22 23:31:44] [Rank 0] step:3701/10000 train_time:340789ms step_avg:92.08ms +[2025-08-22 23:31:44] [Rank 0] step:3701/10000 train_time:340789ms step_avg:92.08ms +[2025-08-22 23:31:46] [Rank 0] step:3721/10000 train_time:342697ms step_avg:92.10ms +[2025-08-22 23:31:46] [Rank 0] step:3721/10000 train_time:342697ms step_avg:92.10ms +[2025-08-22 23:31:48] [Rank 0] step:3741/10000 train_time:344614ms step_avg:92.12ms +[2025-08-22 23:31:48] [Rank 0] step:3741/10000 train_time:344614ms step_avg:92.12ms +[2025-08-22 23:31:50] [Rank 0] step:3761/10000 train_time:346531ms step_avg:92.14ms +[2025-08-22 23:31:50] [Rank 0] step:3761/10000 train_time:346531ms step_avg:92.14ms +[2025-08-22 23:31:51] [Rank 0] step:3781/10000 train_time:348450ms step_avg:92.16ms +[2025-08-22 23:31:51] [Rank 0] step:3781/10000 train_time:348450ms step_avg:92.16ms +[2025-08-22 23:31:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:31:53] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:32:07] [Rank 0] PRINT: step:3800/10000 val_loss:4.0467 svd_entropy: attn_qk:H=0.5155,top10E=0.63,eRank=58.3,q75/q25=71.03 attn_vo:H=0.7090,top10E=0.37,eRank=182.4,q75/q25=24.40 mlp_w1:H=0.8927,top10E=0.14,eRank=404.1,q75/q25=5.40 mlp_w2:H=0.8801,top10E=0.16,eRank=385.9,q75/q25=6.92 vo_prod:H=0.4753,top10E=0.67,eRank=60.6,q75/q25=268.96 train_time:350375ms step_avg:92.20ms +[2025-08-22 23:32:07] [Rank 0] PRINT: step:3800/10000 val_loss:4.0467 svd_entropy: attn_qk:H=0.5155,top10E=0.63,eRank=58.3,q75/q25=71.03 attn_vo:H=0.7090,top10E=0.37,eRank=182.4,q75/q25=24.40 mlp_w1:H=0.8927,top10E=0.14,eRank=404.1,q75/q25=5.40 mlp_w2:H=0.8801,top10E=0.16,eRank=385.9,q75/q25=6.92 vo_prod:H=0.4753,top10E=0.67,eRank=60.6,q75/q25=268.96 train_time:350375ms step_avg:92.20ms +[2025-08-22 23:32:07] [Rank 0] step:3801/10000 train_time:350395ms step_avg:92.18ms +[2025-08-22 23:32:07] [Rank 0] step:3801/10000 train_time:350395ms step_avg:92.18ms +[2025-08-22 23:32:09] [Rank 0] step:3821/10000 train_time:352315ms step_avg:92.20ms +[2025-08-22 23:32:09] [Rank 0] step:3821/10000 train_time:352315ms step_avg:92.20ms +[2025-08-22 23:32:11] [Rank 0] step:3841/10000 train_time:354225ms step_avg:92.22ms +[2025-08-22 23:32:11] [Rank 0] step:3841/10000 train_time:354225ms step_avg:92.22ms +[2025-08-22 23:32:13] [Rank 0] step:3861/10000 train_time:356137ms step_avg:92.24ms +[2025-08-22 23:32:13] [Rank 0] step:3861/10000 train_time:356137ms step_avg:92.24ms +[2025-08-22 23:32:15] [Rank 0] step:3881/10000 train_time:358047ms step_avg:92.26ms +[2025-08-22 23:32:15] [Rank 0] step:3881/10000 train_time:358047ms step_avg:92.26ms +[2025-08-22 23:32:17] [Rank 0] step:3901/10000 train_time:359958ms step_avg:92.27ms +[2025-08-22 23:32:17] [Rank 0] step:3901/10000 train_time:359958ms step_avg:92.27ms +[2025-08-22 23:32:19] [Rank 0] step:3921/10000 train_time:361876ms step_avg:92.29ms +[2025-08-22 23:32:19] [Rank 0] step:3921/10000 train_time:361876ms step_avg:92.29ms +[2025-08-22 23:32:21] [Rank 0] step:3941/10000 train_time:363853ms step_avg:92.33ms +[2025-08-22 23:32:21] [Rank 0] step:3941/10000 train_time:363853ms step_avg:92.33ms +[2025-08-22 23:32:23] [Rank 0] step:3961/10000 train_time:365826ms step_avg:92.36ms +[2025-08-22 23:32:23] [Rank 0] step:3961/10000 train_time:365826ms step_avg:92.36ms +[2025-08-22 23:32:25] [Rank 0] step:3981/10000 train_time:367738ms step_avg:92.37ms +[2025-08-22 23:32:25] [Rank 0] step:3981/10000 train_time:367738ms step_avg:92.37ms +[2025-08-22 23:32:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:32:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:32:40] [Rank 0] PRINT: step:4000/10000 val_loss:4.0255 svd_entropy: attn_qk:H=0.5194,top10E=0.63,eRank=59.0,q75/q25=71.52 attn_vo:H=0.7082,top10E=0.37,eRank=181.8,q75/q25=24.42 mlp_w1:H=0.8918,top10E=0.14,eRank=401.9,q75/q25=5.44 mlp_w2:H=0.8791,top10E=0.16,eRank=383.9,q75/q25=6.97 vo_prod:H=0.4768,top10E=0.67,eRank=60.8,q75/q25=269.08 train_time:369655ms step_avg:92.41ms +[2025-08-22 23:32:40] [Rank 0] PRINT: step:4000/10000 val_loss:4.0255 svd_entropy: attn_qk:H=0.5194,top10E=0.63,eRank=59.0,q75/q25=71.52 attn_vo:H=0.7082,top10E=0.37,eRank=181.8,q75/q25=24.42 mlp_w1:H=0.8918,top10E=0.14,eRank=401.9,q75/q25=5.44 mlp_w2:H=0.8791,top10E=0.16,eRank=383.9,q75/q25=6.97 vo_prod:H=0.4768,top10E=0.67,eRank=60.8,q75/q25=269.08 train_time:369655ms step_avg:92.41ms +[2025-08-22 23:32:40] [Rank 0] step:4001/10000 train_time:369675ms step_avg:92.40ms +[2025-08-22 23:32:40] [Rank 0] step:4001/10000 train_time:369675ms step_avg:92.40ms +[2025-08-22 23:32:42] [Rank 0] step:4021/10000 train_time:371586ms step_avg:92.41ms +[2025-08-22 23:32:42] [Rank 0] step:4021/10000 train_time:371586ms step_avg:92.41ms +[2025-08-22 23:32:44] [Rank 0] step:4041/10000 train_time:373494ms step_avg:92.43ms +[2025-08-22 23:32:44] [Rank 0] step:4041/10000 train_time:373494ms step_avg:92.43ms +[2025-08-22 23:32:46] [Rank 0] step:4061/10000 train_time:375399ms step_avg:92.44ms +[2025-08-22 23:32:46] [Rank 0] step:4061/10000 train_time:375399ms step_avg:92.44ms +[2025-08-22 23:32:48] [Rank 0] step:4081/10000 train_time:377548ms step_avg:92.51ms +[2025-08-22 23:32:48] [Rank 0] step:4081/10000 train_time:377548ms step_avg:92.51ms +[2025-08-22 23:32:50] [Rank 0] step:4101/10000 train_time:379456ms step_avg:92.53ms +[2025-08-22 23:32:50] [Rank 0] step:4101/10000 train_time:379456ms step_avg:92.53ms +[2025-08-22 23:32:52] [Rank 0] step:4121/10000 train_time:381365ms step_avg:92.54ms +[2025-08-22 23:32:52] [Rank 0] step:4121/10000 train_time:381365ms step_avg:92.54ms +[2025-08-22 23:32:54] [Rank 0] step:4141/10000 train_time:383276ms step_avg:92.56ms +[2025-08-22 23:32:54] [Rank 0] step:4141/10000 train_time:383276ms step_avg:92.56ms +[2025-08-22 23:32:56] [Rank 0] step:4161/10000 train_time:385185ms step_avg:92.57ms +[2025-08-22 23:32:56] [Rank 0] step:4161/10000 train_time:385185ms step_avg:92.57ms +[2025-08-22 23:32:58] [Rank 0] step:4181/10000 train_time:387098ms step_avg:92.59ms +[2025-08-22 23:32:58] [Rank 0] step:4181/10000 train_time:387098ms step_avg:92.59ms +[2025-08-22 23:33:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:33:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:33:13] [Rank 0] PRINT: step:4200/10000 val_loss:4.0148 svd_entropy: attn_qk:H=0.5205,top10E=0.63,eRank=59.3,q75/q25=72.69 attn_vo:H=0.7092,top10E=0.37,eRank=182.4,q75/q25=24.45 mlp_w1:H=0.8915,top10E=0.14,eRank=401.2,q75/q25=5.47 mlp_w2:H=0.8787,top10E=0.16,eRank=382.8,q75/q25=7.00 vo_prod:H=0.4788,top10E=0.67,eRank=61.0,q75/q25=268.29 train_time:389016ms step_avg:92.62ms +[2025-08-22 23:33:13] [Rank 0] PRINT: step:4200/10000 val_loss:4.0148 svd_entropy: attn_qk:H=0.5205,top10E=0.63,eRank=59.3,q75/q25=72.69 attn_vo:H=0.7092,top10E=0.37,eRank=182.4,q75/q25=24.45 mlp_w1:H=0.8915,top10E=0.14,eRank=401.2,q75/q25=5.47 mlp_w2:H=0.8787,top10E=0.16,eRank=382.8,q75/q25=7.00 vo_prod:H=0.4788,top10E=0.67,eRank=61.0,q75/q25=268.29 train_time:389016ms step_avg:92.62ms +[2025-08-22 23:33:13] [Rank 0] step:4201/10000 train_time:389037ms step_avg:92.61ms +[2025-08-22 23:33:13] [Rank 0] step:4201/10000 train_time:389037ms step_avg:92.61ms +[2025-08-22 23:33:15] [Rank 0] step:4221/10000 train_time:390932ms step_avg:92.62ms +[2025-08-22 23:33:15] [Rank 0] step:4221/10000 train_time:390932ms step_avg:92.62ms +[2025-08-22 23:33:17] [Rank 0] step:4241/10000 train_time:392843ms step_avg:92.63ms +[2025-08-22 23:33:17] [Rank 0] step:4241/10000 train_time:392843ms step_avg:92.63ms +[2025-08-22 23:33:19] [Rank 0] step:4261/10000 train_time:394750ms step_avg:92.64ms +[2025-08-22 23:33:19] [Rank 0] step:4261/10000 train_time:394750ms step_avg:92.64ms +[2025-08-22 23:33:21] [Rank 0] step:4281/10000 train_time:396661ms step_avg:92.66ms +[2025-08-22 23:33:21] [Rank 0] step:4281/10000 train_time:396661ms step_avg:92.66ms +[2025-08-22 23:33:23] [Rank 0] step:4301/10000 train_time:398650ms step_avg:92.69ms +[2025-08-22 23:33:23] [Rank 0] step:4301/10000 train_time:398650ms step_avg:92.69ms +[2025-08-22 23:33:25] [Rank 0] step:4321/10000 train_time:400634ms step_avg:92.72ms +[2025-08-22 23:33:25] [Rank 0] step:4321/10000 train_time:400634ms step_avg:92.72ms +[2025-08-22 23:33:27] [Rank 0] step:4341/10000 train_time:402543ms step_avg:92.73ms +[2025-08-22 23:33:27] [Rank 0] step:4341/10000 train_time:402543ms step_avg:92.73ms +[2025-08-22 23:33:29] [Rank 0] step:4361/10000 train_time:404455ms step_avg:92.74ms +[2025-08-22 23:33:29] [Rank 0] step:4361/10000 train_time:404455ms step_avg:92.74ms +[2025-08-22 23:33:31] [Rank 0] step:4381/10000 train_time:406364ms step_avg:92.76ms +[2025-08-22 23:33:31] [Rank 0] step:4381/10000 train_time:406364ms step_avg:92.76ms +[2025-08-22 23:33:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:33:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:33:46] [Rank 0] PRINT: step:4400/10000 val_loss:4.0019 svd_entropy: attn_qk:H=0.5230,top10E=0.63,eRank=60.0,q75/q25=70.23 attn_vo:H=0.7088,top10E=0.37,eRank=181.2,q75/q25=24.27 mlp_w1:H=0.8873,top10E=0.15,eRank=393.5,q75/q25=5.51 mlp_w2:H=0.8756,top10E=0.17,eRank=377.7,q75/q25=7.03 vo_prod:H=0.4766,top10E=0.67,eRank=61.4,q75/q25=265.48 train_time:408282ms step_avg:92.79ms +[2025-08-22 23:33:46] [Rank 0] PRINT: step:4400/10000 val_loss:4.0019 svd_entropy: attn_qk:H=0.5230,top10E=0.63,eRank=60.0,q75/q25=70.23 attn_vo:H=0.7088,top10E=0.37,eRank=181.2,q75/q25=24.27 mlp_w1:H=0.8873,top10E=0.15,eRank=393.5,q75/q25=5.51 mlp_w2:H=0.8756,top10E=0.17,eRank=377.7,q75/q25=7.03 vo_prod:H=0.4766,top10E=0.67,eRank=61.4,q75/q25=265.48 train_time:408282ms step_avg:92.79ms +[2025-08-22 23:33:47] [Rank 0] step:4401/10000 train_time:408304ms step_avg:92.78ms +[2025-08-22 23:33:47] [Rank 0] step:4401/10000 train_time:408304ms step_avg:92.78ms +[2025-08-22 23:33:48] [Rank 0] step:4421/10000 train_time:410226ms step_avg:92.79ms +[2025-08-22 23:33:48] [Rank 0] step:4421/10000 train_time:410226ms step_avg:92.79ms +[2025-08-22 23:33:50] [Rank 0] step:4441/10000 train_time:412137ms step_avg:92.80ms +[2025-08-22 23:33:50] [Rank 0] step:4441/10000 train_time:412137ms step_avg:92.80ms +[2025-08-22 23:33:52] [Rank 0] step:4461/10000 train_time:414055ms step_avg:92.82ms +[2025-08-22 23:33:52] [Rank 0] step:4461/10000 train_time:414055ms step_avg:92.82ms +[2025-08-22 23:33:54] [Rank 0] step:4481/10000 train_time:415974ms step_avg:92.83ms +[2025-08-22 23:33:54] [Rank 0] step:4481/10000 train_time:415974ms step_avg:92.83ms +[2025-08-22 23:33:56] [Rank 0] step:4501/10000 train_time:417893ms step_avg:92.84ms +[2025-08-22 23:33:56] [Rank 0] step:4501/10000 train_time:417893ms step_avg:92.84ms +[2025-08-22 23:33:58] [Rank 0] step:4521/10000 train_time:419814ms step_avg:92.86ms +[2025-08-22 23:33:58] [Rank 0] step:4521/10000 train_time:419814ms step_avg:92.86ms +[2025-08-22 23:34:00] [Rank 0] step:4541/10000 train_time:421737ms step_avg:92.87ms +[2025-08-22 23:34:00] [Rank 0] step:4541/10000 train_time:421737ms step_avg:92.87ms +[2025-08-22 23:34:02] [Rank 0] step:4561/10000 train_time:423659ms step_avg:92.89ms +[2025-08-22 23:34:02] [Rank 0] step:4561/10000 train_time:423659ms step_avg:92.89ms +[2025-08-22 23:34:04] [Rank 0] step:4581/10000 train_time:425586ms step_avg:92.90ms +[2025-08-22 23:34:04] [Rank 0] step:4581/10000 train_time:425586ms step_avg:92.90ms +[2025-08-22 23:34:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:34:06] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:34:19] [Rank 0] PRINT: step:4600/10000 val_loss:3.9849 svd_entropy: attn_qk:H=0.5235,top10E=0.63,eRank=60.3,q75/q25=70.83 attn_vo:H=0.7087,top10E=0.37,eRank=181.2,q75/q25=24.31 mlp_w1:H=0.8857,top10E=0.15,eRank=390.7,q75/q25=5.55 mlp_w2:H=0.8746,top10E=0.17,eRank=375.7,q75/q25=7.06 vo_prod:H=0.4804,top10E=0.67,eRank=61.8,q75/q25=265.99 train_time:427517ms step_avg:92.94ms +[2025-08-22 23:34:19] [Rank 0] PRINT: step:4600/10000 val_loss:3.9849 svd_entropy: attn_qk:H=0.5235,top10E=0.63,eRank=60.3,q75/q25=70.83 attn_vo:H=0.7087,top10E=0.37,eRank=181.2,q75/q25=24.31 mlp_w1:H=0.8857,top10E=0.15,eRank=390.7,q75/q25=5.55 mlp_w2:H=0.8746,top10E=0.17,eRank=375.7,q75/q25=7.06 vo_prod:H=0.4804,top10E=0.67,eRank=61.8,q75/q25=265.99 train_time:427517ms step_avg:92.94ms +[2025-08-22 23:34:19] [Rank 0] step:4601/10000 train_time:427538ms step_avg:92.92ms +[2025-08-22 23:34:19] [Rank 0] step:4601/10000 train_time:427538ms step_avg:92.92ms +[2025-08-22 23:34:21] [Rank 0] step:4621/10000 train_time:429442ms step_avg:92.93ms +[2025-08-22 23:34:21] [Rank 0] step:4621/10000 train_time:429442ms step_avg:92.93ms +[2025-08-22 23:34:23] [Rank 0] step:4641/10000 train_time:431359ms step_avg:92.95ms +[2025-08-22 23:34:23] [Rank 0] step:4641/10000 train_time:431359ms step_avg:92.95ms +[2025-08-22 23:34:25] [Rank 0] step:4661/10000 train_time:433274ms step_avg:92.96ms +[2025-08-22 23:34:25] [Rank 0] step:4661/10000 train_time:433274ms step_avg:92.96ms +[2025-08-22 23:34:27] [Rank 0] step:4681/10000 train_time:435252ms step_avg:92.98ms +[2025-08-22 23:34:27] [Rank 0] step:4681/10000 train_time:435252ms step_avg:92.98ms +[2025-08-22 23:34:29] [Rank 0] step:4701/10000 train_time:437253ms step_avg:93.01ms +[2025-08-22 23:34:29] [Rank 0] step:4701/10000 train_time:437253ms step_avg:93.01ms +[2025-08-22 23:34:31] [Rank 0] step:4721/10000 train_time:439168ms step_avg:93.02ms +[2025-08-22 23:34:31] [Rank 0] step:4721/10000 train_time:439168ms step_avg:93.02ms +[2025-08-22 23:34:33] [Rank 0] step:4741/10000 train_time:441085ms step_avg:93.04ms +[2025-08-22 23:34:33] [Rank 0] step:4741/10000 train_time:441085ms step_avg:93.04ms +[2025-08-22 23:34:35] [Rank 0] step:4761/10000 train_time:443000ms step_avg:93.05ms +[2025-08-22 23:34:35] [Rank 0] step:4761/10000 train_time:443000ms step_avg:93.05ms +[2025-08-22 23:34:37] [Rank 0] step:4781/10000 train_time:444916ms step_avg:93.06ms +[2025-08-22 23:34:37] [Rank 0] step:4781/10000 train_time:444916ms step_avg:93.06ms +[2025-08-22 23:34:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:34:39] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:34:52] [Rank 0] PRINT: step:4800/10000 val_loss:3.9768 svd_entropy: attn_qk:H=0.5219,top10E=0.63,eRank=60.4,q75/q25=71.58 attn_vo:H=0.7092,top10E=0.37,eRank=181.5,q75/q25=24.29 mlp_w1:H=0.8850,top10E=0.15,eRank=389.4,q75/q25=5.57 mlp_w2:H=0.8738,top10E=0.17,eRank=374.2,q75/q25=7.08 vo_prod:H=0.4852,top10E=0.66,eRank=62.3,q75/q25=262.25 train_time:446838ms step_avg:93.09ms +[2025-08-22 23:34:52] [Rank 0] PRINT: step:4800/10000 val_loss:3.9768 svd_entropy: attn_qk:H=0.5219,top10E=0.63,eRank=60.4,q75/q25=71.58 attn_vo:H=0.7092,top10E=0.37,eRank=181.5,q75/q25=24.29 mlp_w1:H=0.8850,top10E=0.15,eRank=389.4,q75/q25=5.57 mlp_w2:H=0.8738,top10E=0.17,eRank=374.2,q75/q25=7.08 vo_prod:H=0.4852,top10E=0.66,eRank=62.3,q75/q25=262.25 train_time:446838ms step_avg:93.09ms +[2025-08-22 23:34:53] [Rank 0] step:4801/10000 train_time:446858ms step_avg:93.08ms +[2025-08-22 23:34:53] [Rank 0] step:4801/10000 train_time:446858ms step_avg:93.08ms +[2025-08-22 23:34:55] [Rank 0] step:4821/10000 train_time:448782ms step_avg:93.09ms +[2025-08-22 23:34:55] [Rank 0] step:4821/10000 train_time:448782ms step_avg:93.09ms +[2025-08-22 23:34:56] [Rank 0] step:4841/10000 train_time:450693ms step_avg:93.10ms +[2025-08-22 23:34:56] [Rank 0] step:4841/10000 train_time:450693ms step_avg:93.10ms +[2025-08-22 23:34:58] [Rank 0] step:4861/10000 train_time:452605ms step_avg:93.11ms +[2025-08-22 23:34:58] [Rank 0] step:4861/10000 train_time:452605ms step_avg:93.11ms +[2025-08-22 23:35:00] [Rank 0] step:4881/10000 train_time:454517ms step_avg:93.12ms +[2025-08-22 23:35:00] [Rank 0] step:4881/10000 train_time:454517ms step_avg:93.12ms +[2025-08-22 23:35:02] [Rank 0] step:4901/10000 train_time:456430ms step_avg:93.13ms +[2025-08-22 23:35:02] [Rank 0] step:4901/10000 train_time:456430ms step_avg:93.13ms +[2025-08-22 23:35:04] [Rank 0] step:4921/10000 train_time:458347ms step_avg:93.14ms +[2025-08-22 23:35:04] [Rank 0] step:4921/10000 train_time:458347ms step_avg:93.14ms +[2025-08-22 23:35:06] [Rank 0] step:4941/10000 train_time:460264ms step_avg:93.15ms +[2025-08-22 23:35:06] [Rank 0] step:4941/10000 train_time:460264ms step_avg:93.15ms +[2025-08-22 23:35:08] [Rank 0] step:4961/10000 train_time:462178ms step_avg:93.16ms +[2025-08-22 23:35:08] [Rank 0] step:4961/10000 train_time:462178ms step_avg:93.16ms +[2025-08-22 23:35:10] [Rank 0] step:4981/10000 train_time:464097ms step_avg:93.17ms +[2025-08-22 23:35:10] [Rank 0] step:4981/10000 train_time:464097ms step_avg:93.17ms +[2025-08-22 23:35:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:35:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:35:25] [Rank 0] PRINT: step:5000/10000 val_loss:3.9648 svd_entropy: attn_qk:H=0.5223,top10E=0.63,eRank=60.7,q75/q25=73.42 attn_vo:H=0.7094,top10E=0.37,eRank=181.7,q75/q25=24.33 mlp_w1:H=0.8845,top10E=0.15,eRank=388.4,q75/q25=5.61 mlp_w2:H=0.8729,top10E=0.17,eRank=372.6,q75/q25=7.10 vo_prod:H=0.4861,top10E=0.66,eRank=62.7,q75/q25=261.34 train_time:466020ms step_avg:93.20ms +[2025-08-22 23:35:25] [Rank 0] PRINT: step:5000/10000 val_loss:3.9648 svd_entropy: attn_qk:H=0.5223,top10E=0.63,eRank=60.7,q75/q25=73.42 attn_vo:H=0.7094,top10E=0.37,eRank=181.7,q75/q25=24.33 mlp_w1:H=0.8845,top10E=0.15,eRank=388.4,q75/q25=5.61 mlp_w2:H=0.8729,top10E=0.17,eRank=372.6,q75/q25=7.10 vo_prod:H=0.4861,top10E=0.66,eRank=62.7,q75/q25=261.34 train_time:466020ms step_avg:93.20ms +[2025-08-22 23:35:25] [Rank 0] step:5001/10000 train_time:466041ms step_avg:93.19ms +[2025-08-22 23:35:25] [Rank 0] step:5001/10000 train_time:466041ms step_avg:93.19ms +[2025-08-22 23:35:27] [Rank 0] step:5021/10000 train_time:467951ms step_avg:93.20ms +[2025-08-22 23:35:27] [Rank 0] step:5021/10000 train_time:467951ms step_avg:93.20ms +[2025-08-22 23:35:29] [Rank 0] step:5041/10000 train_time:469944ms step_avg:93.22ms +[2025-08-22 23:35:29] [Rank 0] step:5041/10000 train_time:469944ms step_avg:93.22ms +[2025-08-22 23:35:31] [Rank 0] step:5061/10000 train_time:471936ms step_avg:93.25ms +[2025-08-22 23:35:31] [Rank 0] step:5061/10000 train_time:471936ms step_avg:93.25ms +[2025-08-22 23:35:33] [Rank 0] step:5081/10000 train_time:473849ms step_avg:93.26ms +[2025-08-22 23:35:33] [Rank 0] step:5081/10000 train_time:473849ms step_avg:93.26ms +[2025-08-22 23:35:35] [Rank 0] step:5101/10000 train_time:475761ms step_avg:93.27ms +[2025-08-22 23:35:35] [Rank 0] step:5101/10000 train_time:475761ms step_avg:93.27ms +[2025-08-22 23:35:37] [Rank 0] step:5121/10000 train_time:477674ms step_avg:93.28ms +[2025-08-22 23:35:37] [Rank 0] step:5121/10000 train_time:477674ms step_avg:93.28ms +[2025-08-22 23:35:39] [Rank 0] step:5141/10000 train_time:479592ms step_avg:93.29ms +[2025-08-22 23:35:39] [Rank 0] step:5141/10000 train_time:479592ms step_avg:93.29ms +[2025-08-22 23:35:41] [Rank 0] step:5161/10000 train_time:481505ms step_avg:93.30ms +[2025-08-22 23:35:41] [Rank 0] step:5161/10000 train_time:481505ms step_avg:93.30ms +[2025-08-22 23:35:43] [Rank 0] step:5181/10000 train_time:483424ms step_avg:93.31ms +[2025-08-22 23:35:43] [Rank 0] step:5181/10000 train_time:483424ms step_avg:93.31ms +[2025-08-22 23:35:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:35:44] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:35:58] [Rank 0] PRINT: step:5200/10000 val_loss:3.9553 svd_entropy: attn_qk:H=0.5206,top10E=0.63,eRank=60.6,q75/q25=74.21 attn_vo:H=0.7097,top10E=0.37,eRank=181.7,q75/q25=24.30 mlp_w1:H=0.8839,top10E=0.15,eRank=387.3,q75/q25=5.63 mlp_w2:H=0.8720,top10E=0.17,eRank=370.9,q75/q25=7.11 vo_prod:H=0.4881,top10E=0.66,eRank=62.9,q75/q25=261.94 train_time:485372ms step_avg:93.34ms +[2025-08-22 23:35:58] [Rank 0] PRINT: step:5200/10000 val_loss:3.9553 svd_entropy: attn_qk:H=0.5206,top10E=0.63,eRank=60.6,q75/q25=74.21 attn_vo:H=0.7097,top10E=0.37,eRank=181.7,q75/q25=24.30 mlp_w1:H=0.8839,top10E=0.15,eRank=387.3,q75/q25=5.63 mlp_w2:H=0.8720,top10E=0.17,eRank=370.9,q75/q25=7.11 vo_prod:H=0.4881,top10E=0.66,eRank=62.9,q75/q25=261.94 train_time:485372ms step_avg:93.34ms +[2025-08-22 23:35:58] [Rank 0] step:5201/10000 train_time:485393ms step_avg:93.33ms +[2025-08-22 23:35:58] [Rank 0] step:5201/10000 train_time:485393ms step_avg:93.33ms +[2025-08-22 23:36:00] [Rank 0] step:5221/10000 train_time:487353ms step_avg:93.34ms +[2025-08-22 23:36:00] [Rank 0] step:5221/10000 train_time:487353ms step_avg:93.34ms +[2025-08-22 23:36:02] [Rank 0] step:5241/10000 train_time:489300ms step_avg:93.36ms +[2025-08-22 23:36:02] [Rank 0] step:5241/10000 train_time:489300ms step_avg:93.36ms +[2025-08-22 23:36:04] [Rank 0] step:5261/10000 train_time:491248ms step_avg:93.38ms +[2025-08-22 23:36:04] [Rank 0] step:5261/10000 train_time:491248ms step_avg:93.38ms +[2025-08-22 23:36:06] [Rank 0] step:5281/10000 train_time:493198ms step_avg:93.39ms +[2025-08-22 23:36:06] [Rank 0] step:5281/10000 train_time:493198ms step_avg:93.39ms +[2025-08-22 23:36:08] [Rank 0] step:5301/10000 train_time:495157ms step_avg:93.41ms +[2025-08-22 23:36:08] [Rank 0] step:5301/10000 train_time:495157ms step_avg:93.41ms +[2025-08-22 23:36:10] [Rank 0] step:5321/10000 train_time:497113ms step_avg:93.42ms +[2025-08-22 23:36:10] [Rank 0] step:5321/10000 train_time:497113ms step_avg:93.42ms +[2025-08-22 23:36:12] [Rank 0] step:5341/10000 train_time:499065ms step_avg:93.44ms +[2025-08-22 23:36:12] [Rank 0] step:5341/10000 train_time:499065ms step_avg:93.44ms +[2025-08-22 23:36:14] [Rank 0] step:5361/10000 train_time:501021ms step_avg:93.46ms +[2025-08-22 23:36:14] [Rank 0] step:5361/10000 train_time:501021ms step_avg:93.46ms +[2025-08-22 23:36:16] [Rank 0] step:5381/10000 train_time:502975ms step_avg:93.47ms +[2025-08-22 23:36:16] [Rank 0] step:5381/10000 train_time:502975ms step_avg:93.47ms +[2025-08-22 23:36:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:36:18] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:36:31] [Rank 0] PRINT: step:5400/10000 val_loss:3.9446 svd_entropy: attn_qk:H=0.5196,top10E=0.63,eRank=60.8,q75/q25=75.06 attn_vo:H=0.7112,top10E=0.36,eRank=182.3,q75/q25=24.39 mlp_w1:H=0.8832,top10E=0.15,eRank=386.0,q75/q25=5.65 mlp_w2:H=0.8708,top10E=0.17,eRank=369.0,q75/q25=7.15 vo_prod:H=0.4935,top10E=0.66,eRank=63.7,q75/q25=261.35 train_time:504932ms step_avg:93.51ms +[2025-08-22 23:36:31] [Rank 0] PRINT: step:5400/10000 val_loss:3.9446 svd_entropy: attn_qk:H=0.5196,top10E=0.63,eRank=60.8,q75/q25=75.06 attn_vo:H=0.7112,top10E=0.36,eRank=182.3,q75/q25=24.39 mlp_w1:H=0.8832,top10E=0.15,eRank=386.0,q75/q25=5.65 mlp_w2:H=0.8708,top10E=0.17,eRank=369.0,q75/q25=7.15 vo_prod:H=0.4935,top10E=0.66,eRank=63.7,q75/q25=261.35 train_time:504932ms step_avg:93.51ms +[2025-08-22 23:36:32] [Rank 0] step:5401/10000 train_time:504953ms step_avg:93.49ms +[2025-08-22 23:36:32] [Rank 0] step:5401/10000 train_time:504953ms step_avg:93.49ms +[2025-08-22 23:36:34] [Rank 0] step:5421/10000 train_time:506983ms step_avg:93.52ms +[2025-08-22 23:36:34] [Rank 0] step:5421/10000 train_time:506983ms step_avg:93.52ms +[2025-08-22 23:36:36] [Rank 0] step:5441/10000 train_time:508998ms step_avg:93.55ms +[2025-08-22 23:36:36] [Rank 0] step:5441/10000 train_time:508998ms step_avg:93.55ms +[2025-08-22 23:36:38] [Rank 0] step:5461/10000 train_time:510947ms step_avg:93.56ms +[2025-08-22 23:36:38] [Rank 0] step:5461/10000 train_time:510947ms step_avg:93.56ms +[2025-08-22 23:36:40] [Rank 0] step:5481/10000 train_time:512893ms step_avg:93.58ms +[2025-08-22 23:36:40] [Rank 0] step:5481/10000 train_time:512893ms step_avg:93.58ms +[2025-08-22 23:36:41] [Rank 0] step:5501/10000 train_time:514847ms step_avg:93.59ms +[2025-08-22 23:36:41] [Rank 0] step:5501/10000 train_time:514847ms step_avg:93.59ms +[2025-08-22 23:36:43] [Rank 0] step:5521/10000 train_time:516801ms step_avg:93.61ms +[2025-08-22 23:36:43] [Rank 0] step:5521/10000 train_time:516801ms step_avg:93.61ms +[2025-08-22 23:36:45] [Rank 0] step:5541/10000 train_time:518750ms step_avg:93.62ms +[2025-08-22 23:36:45] [Rank 0] step:5541/10000 train_time:518750ms step_avg:93.62ms +[2025-08-22 23:36:47] [Rank 0] step:5561/10000 train_time:520701ms step_avg:93.63ms +[2025-08-22 23:36:47] [Rank 0] step:5561/10000 train_time:520701ms step_avg:93.63ms +[2025-08-22 23:36:49] [Rank 0] step:5581/10000 train_time:522651ms step_avg:93.65ms +[2025-08-22 23:36:49] [Rank 0] step:5581/10000 train_time:522651ms step_avg:93.65ms +[2025-08-22 23:36:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:36:51] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:37:05] [Rank 0] PRINT: step:5600/10000 val_loss:3.9390 svd_entropy: attn_qk:H=0.5176,top10E=0.63,eRank=60.8,q75/q25=76.03 attn_vo:H=0.7118,top10E=0.36,eRank=182.8,q75/q25=24.47 mlp_w1:H=0.8823,top10E=0.15,eRank=384.5,q75/q25=5.68 mlp_w2:H=0.8698,top10E=0.17,eRank=367.2,q75/q25=7.13 vo_prod:H=0.4952,top10E=0.66,eRank=63.7,q75/q25=259.28 train_time:524609ms step_avg:93.68ms +[2025-08-22 23:37:05] [Rank 0] PRINT: step:5600/10000 val_loss:3.9390 svd_entropy: attn_qk:H=0.5176,top10E=0.63,eRank=60.8,q75/q25=76.03 attn_vo:H=0.7118,top10E=0.36,eRank=182.8,q75/q25=24.47 mlp_w1:H=0.8823,top10E=0.15,eRank=384.5,q75/q25=5.68 mlp_w2:H=0.8698,top10E=0.17,eRank=367.2,q75/q25=7.13 vo_prod:H=0.4952,top10E=0.66,eRank=63.7,q75/q25=259.28 train_time:524609ms step_avg:93.68ms +[2025-08-22 23:37:05] [Rank 0] step:5601/10000 train_time:524629ms step_avg:93.67ms +[2025-08-22 23:37:05] [Rank 0] step:5601/10000 train_time:524629ms step_avg:93.67ms +[2025-08-22 23:37:07] [Rank 0] step:5621/10000 train_time:526572ms step_avg:93.68ms +[2025-08-22 23:37:07] [Rank 0] step:5621/10000 train_time:526572ms step_avg:93.68ms +[2025-08-22 23:37:09] [Rank 0] step:5641/10000 train_time:528517ms step_avg:93.69ms +[2025-08-22 23:37:09] [Rank 0] step:5641/10000 train_time:528517ms step_avg:93.69ms +[2025-08-22 23:37:11] [Rank 0] step:5661/10000 train_time:530459ms step_avg:93.70ms +[2025-08-22 23:37:11] [Rank 0] step:5661/10000 train_time:530459ms step_avg:93.70ms +[2025-08-22 23:37:13] [Rank 0] step:5681/10000 train_time:532409ms step_avg:93.72ms +[2025-08-22 23:37:13] [Rank 0] step:5681/10000 train_time:532409ms step_avg:93.72ms +[2025-08-22 23:37:15] [Rank 0] step:5701/10000 train_time:534355ms step_avg:93.73ms +[2025-08-22 23:37:15] [Rank 0] step:5701/10000 train_time:534355ms step_avg:93.73ms +[2025-08-22 23:37:17] [Rank 0] step:5721/10000 train_time:536307ms step_avg:93.74ms +[2025-08-22 23:37:17] [Rank 0] step:5721/10000 train_time:536307ms step_avg:93.74ms +[2025-08-22 23:37:19] [Rank 0] step:5741/10000 train_time:538253ms step_avg:93.76ms +[2025-08-22 23:37:19] [Rank 0] step:5741/10000 train_time:538253ms step_avg:93.76ms +[2025-08-22 23:37:20] [Rank 0] step:5761/10000 train_time:540204ms step_avg:93.77ms +[2025-08-22 23:37:20] [Rank 0] step:5761/10000 train_time:540204ms step_avg:93.77ms +[2025-08-22 23:37:22] [Rank 0] step:5781/10000 train_time:542152ms step_avg:93.78ms +[2025-08-22 23:37:22] [Rank 0] step:5781/10000 train_time:542152ms step_avg:93.78ms +[2025-08-22 23:37:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:37:24] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:37:38] [Rank 0] PRINT: step:5800/10000 val_loss:3.9358 svd_entropy: attn_qk:H=0.5160,top10E=0.63,eRank=60.7,q75/q25=77.43 attn_vo:H=0.7121,top10E=0.36,eRank=183.0,q75/q25=24.46 mlp_w1:H=0.8817,top10E=0.15,eRank=383.4,q75/q25=5.69 mlp_w2:H=0.8687,top10E=0.17,eRank=365.5,q75/q25=7.16 vo_prod:H=0.4956,top10E=0.66,eRank=63.7,q75/q25=259.86 train_time:544110ms step_avg:93.81ms +[2025-08-22 23:37:38] [Rank 0] PRINT: step:5800/10000 val_loss:3.9358 svd_entropy: attn_qk:H=0.5160,top10E=0.63,eRank=60.7,q75/q25=77.43 attn_vo:H=0.7121,top10E=0.36,eRank=183.0,q75/q25=24.46 mlp_w1:H=0.8817,top10E=0.15,eRank=383.4,q75/q25=5.69 mlp_w2:H=0.8687,top10E=0.17,eRank=365.5,q75/q25=7.16 vo_prod:H=0.4956,top10E=0.66,eRank=63.7,q75/q25=259.86 train_time:544110ms step_avg:93.81ms +[2025-08-22 23:37:38] [Rank 0] step:5801/10000 train_time:544129ms step_avg:93.80ms +[2025-08-22 23:37:38] [Rank 0] step:5801/10000 train_time:544129ms step_avg:93.80ms +[2025-08-22 23:37:40] [Rank 0] step:5821/10000 train_time:546066ms step_avg:93.81ms +[2025-08-22 23:37:40] [Rank 0] step:5821/10000 train_time:546066ms step_avg:93.81ms +[2025-08-22 23:37:42] [Rank 0] step:5841/10000 train_time:548011ms step_avg:93.82ms +[2025-08-22 23:37:42] [Rank 0] step:5841/10000 train_time:548011ms step_avg:93.82ms +[2025-08-22 23:37:44] [Rank 0] step:5861/10000 train_time:549962ms step_avg:93.83ms +[2025-08-22 23:37:44] [Rank 0] step:5861/10000 train_time:549962ms step_avg:93.83ms +[2025-08-22 23:37:46] [Rank 0] step:5881/10000 train_time:551910ms step_avg:93.85ms +[2025-08-22 23:37:46] [Rank 0] step:5881/10000 train_time:551910ms step_avg:93.85ms +[2025-08-22 23:37:48] [Rank 0] step:5901/10000 train_time:553859ms step_avg:93.86ms +[2025-08-22 23:37:48] [Rank 0] step:5901/10000 train_time:553859ms step_avg:93.86ms +[2025-08-22 23:37:50] [Rank 0] step:5921/10000 train_time:555808ms step_avg:93.87ms +[2025-08-22 23:37:50] [Rank 0] step:5921/10000 train_time:555808ms step_avg:93.87ms +[2025-08-22 23:37:52] [Rank 0] step:5941/10000 train_time:557764ms step_avg:93.88ms +[2025-08-22 23:37:52] [Rank 0] step:5941/10000 train_time:557764ms step_avg:93.88ms +[2025-08-22 23:37:54] [Rank 0] step:5961/10000 train_time:559716ms step_avg:93.90ms +[2025-08-22 23:37:54] [Rank 0] step:5961/10000 train_time:559716ms step_avg:93.90ms +[2025-08-22 23:37:56] [Rank 0] step:5981/10000 train_time:561671ms step_avg:93.91ms +[2025-08-22 23:37:56] [Rank 0] step:5981/10000 train_time:561671ms step_avg:93.91ms +[2025-08-22 23:37:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:37:57] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:38:11] [Rank 0] PRINT: step:6000/10000 val_loss:3.9163 svd_entropy: attn_qk:H=0.5142,top10E=0.63,eRank=60.8,q75/q25=78.33 attn_vo:H=0.7125,top10E=0.36,eRank=183.3,q75/q25=24.56 mlp_w1:H=0.8806,top10E=0.15,eRank=381.7,q75/q25=5.73 mlp_w2:H=0.8675,top10E=0.18,eRank=363.6,q75/q25=7.18 vo_prod:H=0.4972,top10E=0.65,eRank=64.2,q75/q25=259.13 train_time:563630ms step_avg:93.94ms +[2025-08-22 23:38:11] [Rank 0] PRINT: step:6000/10000 val_loss:3.9163 svd_entropy: attn_qk:H=0.5142,top10E=0.63,eRank=60.8,q75/q25=78.33 attn_vo:H=0.7125,top10E=0.36,eRank=183.3,q75/q25=24.56 mlp_w1:H=0.8806,top10E=0.15,eRank=381.7,q75/q25=5.73 mlp_w2:H=0.8675,top10E=0.18,eRank=363.6,q75/q25=7.18 vo_prod:H=0.4972,top10E=0.65,eRank=64.2,q75/q25=259.13 train_time:563630ms step_avg:93.94ms +[2025-08-22 23:38:11] [Rank 0] step:6001/10000 train_time:563652ms step_avg:93.93ms +[2025-08-22 23:38:11] [Rank 0] step:6001/10000 train_time:563652ms step_avg:93.93ms +[2025-08-22 23:38:13] [Rank 0] step:6021/10000 train_time:565608ms step_avg:93.94ms +[2025-08-22 23:38:13] [Rank 0] step:6021/10000 train_time:565608ms step_avg:93.94ms +[2025-08-22 23:38:15] [Rank 0] step:6041/10000 train_time:567562ms step_avg:93.95ms +[2025-08-22 23:38:15] [Rank 0] step:6041/10000 train_time:567562ms step_avg:93.95ms +[2025-08-22 23:38:17] [Rank 0] step:6061/10000 train_time:569520ms step_avg:93.96ms +[2025-08-22 23:38:17] [Rank 0] step:6061/10000 train_time:569520ms step_avg:93.96ms +[2025-08-22 23:38:19] [Rank 0] step:6081/10000 train_time:571473ms step_avg:93.98ms +[2025-08-22 23:38:19] [Rank 0] step:6081/10000 train_time:571473ms step_avg:93.98ms +[2025-08-22 23:38:21] [Rank 0] step:6101/10000 train_time:573436ms step_avg:93.99ms +[2025-08-22 23:38:21] [Rank 0] step:6101/10000 train_time:573436ms step_avg:93.99ms +[2025-08-22 23:38:23] [Rank 0] step:6121/10000 train_time:575655ms step_avg:94.05ms +[2025-08-22 23:38:23] [Rank 0] step:6121/10000 train_time:575655ms step_avg:94.05ms +[2025-08-22 23:38:25] [Rank 0] step:6141/10000 train_time:577625ms step_avg:94.06ms +[2025-08-22 23:38:25] [Rank 0] step:6141/10000 train_time:577625ms step_avg:94.06ms +[2025-08-22 23:38:27] [Rank 0] step:6161/10000 train_time:579583ms step_avg:94.07ms +[2025-08-22 23:38:27] [Rank 0] step:6161/10000 train_time:579583ms step_avg:94.07ms +[2025-08-22 23:38:29] [Rank 0] step:6181/10000 train_time:581543ms step_avg:94.09ms +[2025-08-22 23:38:29] [Rank 0] step:6181/10000 train_time:581543ms step_avg:94.09ms +[2025-08-22 23:38:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:38:31] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:38:45] [Rank 0] PRINT: step:6200/10000 val_loss:3.9053 svd_entropy: attn_qk:H=0.5140,top10E=0.63,eRank=60.8,q75/q25=79.75 attn_vo:H=0.7128,top10E=0.36,eRank=183.4,q75/q25=24.63 mlp_w1:H=0.8797,top10E=0.16,eRank=380.2,q75/q25=5.74 mlp_w2:H=0.8661,top10E=0.18,eRank=361.5,q75/q25=7.20 vo_prod:H=0.4993,top10E=0.65,eRank=64.4,q75/q25=257.41 train_time:583509ms step_avg:94.11ms +[2025-08-22 23:38:45] [Rank 0] PRINT: step:6200/10000 val_loss:3.9053 svd_entropy: attn_qk:H=0.5140,top10E=0.63,eRank=60.8,q75/q25=79.75 attn_vo:H=0.7128,top10E=0.36,eRank=183.4,q75/q25=24.63 mlp_w1:H=0.8797,top10E=0.16,eRank=380.2,q75/q25=5.74 mlp_w2:H=0.8661,top10E=0.18,eRank=361.5,q75/q25=7.20 vo_prod:H=0.4993,top10E=0.65,eRank=64.4,q75/q25=257.41 train_time:583509ms step_avg:94.11ms +[2025-08-22 23:38:45] [Rank 0] step:6201/10000 train_time:583531ms step_avg:94.10ms +[2025-08-22 23:38:45] [Rank 0] step:6201/10000 train_time:583531ms step_avg:94.10ms +[2025-08-22 23:38:47] [Rank 0] step:6221/10000 train_time:585485ms step_avg:94.11ms +[2025-08-22 23:38:47] [Rank 0] step:6221/10000 train_time:585485ms step_avg:94.11ms +[2025-08-22 23:38:49] [Rank 0] step:6241/10000 train_time:587432ms step_avg:94.12ms +[2025-08-22 23:38:49] [Rank 0] step:6241/10000 train_time:587432ms step_avg:94.12ms +[2025-08-22 23:38:51] [Rank 0] step:6261/10000 train_time:589384ms step_avg:94.14ms +[2025-08-22 23:38:51] [Rank 0] step:6261/10000 train_time:589384ms step_avg:94.14ms +[2025-08-22 23:38:53] [Rank 0] step:6281/10000 train_time:591341ms step_avg:94.15ms +[2025-08-22 23:38:53] [Rank 0] step:6281/10000 train_time:591341ms step_avg:94.15ms +[2025-08-22 23:38:55] [Rank 0] step:6301/10000 train_time:593295ms step_avg:94.16ms +[2025-08-22 23:38:55] [Rank 0] step:6301/10000 train_time:593295ms step_avg:94.16ms +[2025-08-22 23:38:57] [Rank 0] step:6321/10000 train_time:595251ms step_avg:94.17ms +[2025-08-22 23:38:57] [Rank 0] step:6321/10000 train_time:595251ms step_avg:94.17ms +[2025-08-22 23:38:59] [Rank 0] step:6341/10000 train_time:597209ms step_avg:94.18ms +[2025-08-22 23:38:59] [Rank 0] step:6341/10000 train_time:597209ms step_avg:94.18ms +[2025-08-22 23:39:01] [Rank 0] step:6361/10000 train_time:599170ms step_avg:94.19ms +[2025-08-22 23:39:01] [Rank 0] step:6361/10000 train_time:599170ms step_avg:94.19ms +[2025-08-22 23:39:03] [Rank 0] step:6381/10000 train_time:601127ms step_avg:94.21ms +[2025-08-22 23:39:03] [Rank 0] step:6381/10000 train_time:601127ms step_avg:94.21ms +[2025-08-22 23:39:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:39:05] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:39:19] [Rank 0] PRINT: step:6400/10000 val_loss:3.8937 svd_entropy: attn_qk:H=0.5125,top10E=0.63,eRank=60.9,q75/q25=80.34 attn_vo:H=0.7131,top10E=0.36,eRank=183.5,q75/q25=24.71 mlp_w1:H=0.8790,top10E=0.16,eRank=379.0,q75/q25=5.75 mlp_w2:H=0.8650,top10E=0.18,eRank=359.7,q75/q25=7.25 vo_prod:H=0.5012,top10E=0.65,eRank=64.5,q75/q25=259.92 train_time:603086ms step_avg:94.23ms +[2025-08-22 23:39:19] [Rank 0] PRINT: step:6400/10000 val_loss:3.8937 svd_entropy: attn_qk:H=0.5125,top10E=0.63,eRank=60.9,q75/q25=80.34 attn_vo:H=0.7131,top10E=0.36,eRank=183.5,q75/q25=24.71 mlp_w1:H=0.8790,top10E=0.16,eRank=379.0,q75/q25=5.75 mlp_w2:H=0.8650,top10E=0.18,eRank=359.7,q75/q25=7.25 vo_prod:H=0.5012,top10E=0.65,eRank=64.5,q75/q25=259.92 train_time:603086ms step_avg:94.23ms +[2025-08-22 23:39:19] [Rank 0] step:6401/10000 train_time:603108ms step_avg:94.22ms +[2025-08-22 23:39:19] [Rank 0] step:6401/10000 train_time:603108ms step_avg:94.22ms +[2025-08-22 23:39:21] [Rank 0] step:6421/10000 train_time:605047ms step_avg:94.23ms +[2025-08-22 23:39:21] [Rank 0] step:6421/10000 train_time:605047ms step_avg:94.23ms +[2025-08-22 23:39:23] [Rank 0] step:6441/10000 train_time:606998ms step_avg:94.24ms +[2025-08-22 23:39:23] [Rank 0] step:6441/10000 train_time:606998ms step_avg:94.24ms +[2025-08-22 23:39:24] [Rank 0] step:6461/10000 train_time:608950ms step_avg:94.25ms +[2025-08-22 23:39:24] [Rank 0] step:6461/10000 train_time:608950ms step_avg:94.25ms +[2025-08-22 23:39:26] [Rank 0] step:6481/10000 train_time:610910ms step_avg:94.26ms +[2025-08-22 23:39:26] [Rank 0] step:6481/10000 train_time:610910ms step_avg:94.26ms +[2025-08-22 23:39:28] [Rank 0] step:6501/10000 train_time:612858ms step_avg:94.27ms +[2025-08-22 23:39:28] [Rank 0] step:6501/10000 train_time:612858ms step_avg:94.27ms +[2025-08-22 23:39:30] [Rank 0] step:6521/10000 train_time:614810ms step_avg:94.28ms +[2025-08-22 23:39:30] [Rank 0] step:6521/10000 train_time:614810ms step_avg:94.28ms +[2025-08-22 23:39:32] [Rank 0] step:6541/10000 train_time:616766ms step_avg:94.29ms +[2025-08-22 23:39:32] [Rank 0] step:6541/10000 train_time:616766ms step_avg:94.29ms +[2025-08-22 23:39:34] [Rank 0] step:6561/10000 train_time:618722ms step_avg:94.30ms +[2025-08-22 23:39:34] [Rank 0] step:6561/10000 train_time:618722ms step_avg:94.30ms +[2025-08-22 23:39:36] [Rank 0] step:6581/10000 train_time:620672ms step_avg:94.31ms +[2025-08-22 23:39:36] [Rank 0] step:6581/10000 train_time:620672ms step_avg:94.31ms +[2025-08-22 23:39:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:39:38] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:39:52] [Rank 0] PRINT: step:6600/10000 val_loss:3.8829 svd_entropy: attn_qk:H=0.5114,top10E=0.63,eRank=61.0,q75/q25=81.79 attn_vo:H=0.7138,top10E=0.36,eRank=183.9,q75/q25=24.60 mlp_w1:H=0.8781,top10E=0.16,eRank=377.6,q75/q25=5.77 mlp_w2:H=0.8640,top10E=0.18,eRank=358.3,q75/q25=7.26 vo_prod:H=0.5030,top10E=0.65,eRank=64.8,q75/q25=259.21 train_time:622634ms step_avg:94.34ms +[2025-08-22 23:39:52] [Rank 0] PRINT: step:6600/10000 val_loss:3.8829 svd_entropy: attn_qk:H=0.5114,top10E=0.63,eRank=61.0,q75/q25=81.79 attn_vo:H=0.7138,top10E=0.36,eRank=183.9,q75/q25=24.60 mlp_w1:H=0.8781,top10E=0.16,eRank=377.6,q75/q25=5.77 mlp_w2:H=0.8640,top10E=0.18,eRank=358.3,q75/q25=7.26 vo_prod:H=0.5030,top10E=0.65,eRank=64.8,q75/q25=259.21 train_time:622634ms step_avg:94.34ms +[2025-08-22 23:39:52] [Rank 0] step:6601/10000 train_time:622654ms step_avg:94.33ms +[2025-08-22 23:39:52] [Rank 0] step:6601/10000 train_time:622654ms step_avg:94.33ms +[2025-08-22 23:39:54] [Rank 0] step:6621/10000 train_time:624603ms step_avg:94.34ms +[2025-08-22 23:39:54] [Rank 0] step:6621/10000 train_time:624603ms step_avg:94.34ms +[2025-08-22 23:39:56] [Rank 0] step:6641/10000 train_time:626563ms step_avg:94.35ms +[2025-08-22 23:39:56] [Rank 0] step:6641/10000 train_time:626563ms step_avg:94.35ms +[2025-08-22 23:39:58] [Rank 0] step:6661/10000 train_time:628516ms step_avg:94.36ms +[2025-08-22 23:39:58] [Rank 0] step:6661/10000 train_time:628516ms step_avg:94.36ms +[2025-08-22 23:40:00] [Rank 0] step:6681/10000 train_time:630487ms step_avg:94.37ms +[2025-08-22 23:40:00] [Rank 0] step:6681/10000 train_time:630487ms step_avg:94.37ms +[2025-08-22 23:40:02] [Rank 0] step:6701/10000 train_time:632479ms step_avg:94.39ms +[2025-08-22 23:40:02] [Rank 0] step:6701/10000 train_time:632479ms step_avg:94.39ms +[2025-08-22 23:40:04] [Rank 0] step:6721/10000 train_time:634464ms step_avg:94.40ms +[2025-08-22 23:40:04] [Rank 0] step:6721/10000 train_time:634464ms step_avg:94.40ms +[2025-08-22 23:40:06] [Rank 0] step:6741/10000 train_time:636445ms step_avg:94.41ms +[2025-08-22 23:40:06] [Rank 0] step:6741/10000 train_time:636445ms step_avg:94.41ms +[2025-08-22 23:40:08] [Rank 0] step:6761/10000 train_time:638428ms step_avg:94.43ms +[2025-08-22 23:40:08] [Rank 0] step:6761/10000 train_time:638428ms step_avg:94.43ms +[2025-08-22 23:40:10] [Rank 0] step:6781/10000 train_time:640416ms step_avg:94.44ms +[2025-08-22 23:40:10] [Rank 0] step:6781/10000 train_time:640416ms step_avg:94.44ms +[2025-08-22 23:40:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:40:12] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:40:25] [Rank 0] PRINT: step:6800/10000 val_loss:3.8680 svd_entropy: attn_qk:H=0.5126,top10E=0.63,eRank=61.2,q75/q25=82.33 attn_vo:H=0.7143,top10E=0.36,eRank=184.3,q75/q25=24.60 mlp_w1:H=0.8777,top10E=0.16,eRank=376.9,q75/q25=5.80 mlp_w2:H=0.8634,top10E=0.18,eRank=357.3,q75/q25=7.28 vo_prod:H=0.5054,top10E=0.65,eRank=65.0,q75/q25=257.12 train_time:642411ms step_avg:94.47ms +[2025-08-22 23:40:25] [Rank 0] PRINT: step:6800/10000 val_loss:3.8680 svd_entropy: attn_qk:H=0.5126,top10E=0.63,eRank=61.2,q75/q25=82.33 attn_vo:H=0.7143,top10E=0.36,eRank=184.3,q75/q25=24.60 mlp_w1:H=0.8777,top10E=0.16,eRank=376.9,q75/q25=5.80 mlp_w2:H=0.8634,top10E=0.18,eRank=357.3,q75/q25=7.28 vo_prod:H=0.5054,top10E=0.65,eRank=65.0,q75/q25=257.12 train_time:642411ms step_avg:94.47ms +[2025-08-22 23:40:25] [Rank 0] step:6801/10000 train_time:642432ms step_avg:94.46ms +[2025-08-22 23:40:25] [Rank 0] step:6801/10000 train_time:642432ms step_avg:94.46ms +[2025-08-22 23:40:27] [Rank 0] step:6821/10000 train_time:644407ms step_avg:94.47ms +[2025-08-22 23:40:27] [Rank 0] step:6821/10000 train_time:644407ms step_avg:94.47ms +[2025-08-22 23:40:29] [Rank 0] step:6841/10000 train_time:646390ms step_avg:94.49ms +[2025-08-22 23:40:29] [Rank 0] step:6841/10000 train_time:646390ms step_avg:94.49ms +[2025-08-22 23:40:31] [Rank 0] step:6861/10000 train_time:648372ms step_avg:94.50ms +[2025-08-22 23:40:31] [Rank 0] step:6861/10000 train_time:648372ms step_avg:94.50ms +[2025-08-22 23:40:33] [Rank 0] step:6881/10000 train_time:650358ms step_avg:94.52ms +[2025-08-22 23:40:33] [Rank 0] step:6881/10000 train_time:650358ms step_avg:94.52ms +[2025-08-22 23:40:35] [Rank 0] step:6901/10000 train_time:652340ms step_avg:94.53ms +[2025-08-22 23:40:35] [Rank 0] step:6901/10000 train_time:652340ms step_avg:94.53ms +[2025-08-22 23:40:37] [Rank 0] step:6921/10000 train_time:654322ms step_avg:94.54ms +[2025-08-22 23:40:37] [Rank 0] step:6921/10000 train_time:654322ms step_avg:94.54ms +[2025-08-22 23:40:39] [Rank 0] step:6941/10000 train_time:656318ms step_avg:94.56ms +[2025-08-22 23:40:39] [Rank 0] step:6941/10000 train_time:656318ms step_avg:94.56ms +[2025-08-22 23:40:41] [Rank 0] step:6961/10000 train_time:658319ms step_avg:94.57ms +[2025-08-22 23:40:41] [Rank 0] step:6961/10000 train_time:658319ms step_avg:94.57ms +[2025-08-22 23:40:43] [Rank 0] step:6981/10000 train_time:660311ms step_avg:94.59ms +[2025-08-22 23:40:43] [Rank 0] step:6981/10000 train_time:660311ms step_avg:94.59ms +[2025-08-22 23:40:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:40:45] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:40:59] [Rank 0] PRINT: step:7000/10000 val_loss:3.8542 svd_entropy: attn_qk:H=0.5120,top10E=0.63,eRank=61.3,q75/q25=83.07 attn_vo:H=0.7153,top10E=0.36,eRank=184.8,q75/q25=24.65 mlp_w1:H=0.8771,top10E=0.16,eRank=376.1,q75/q25=5.80 mlp_w2:H=0.8627,top10E=0.18,eRank=356.3,q75/q25=7.29 vo_prod:H=0.5064,top10E=0.64,eRank=65.3,q75/q25=257.35 train_time:662352ms step_avg:94.62ms +[2025-08-22 23:40:59] [Rank 0] PRINT: step:7000/10000 val_loss:3.8542 svd_entropy: attn_qk:H=0.5120,top10E=0.63,eRank=61.3,q75/q25=83.07 attn_vo:H=0.7153,top10E=0.36,eRank=184.8,q75/q25=24.65 mlp_w1:H=0.8771,top10E=0.16,eRank=376.1,q75/q25=5.80 mlp_w2:H=0.8627,top10E=0.18,eRank=356.3,q75/q25=7.29 vo_prod:H=0.5064,top10E=0.64,eRank=65.3,q75/q25=257.35 train_time:662352ms step_avg:94.62ms +[2025-08-22 23:40:59] [Rank 0] step:7001/10000 train_time:662374ms step_avg:94.61ms +[2025-08-22 23:40:59] [Rank 0] step:7001/10000 train_time:662374ms step_avg:94.61ms +[2025-08-22 23:41:01] [Rank 0] step:7021/10000 train_time:664341ms step_avg:94.62ms +[2025-08-22 23:41:01] [Rank 0] step:7021/10000 train_time:664341ms step_avg:94.62ms +[2025-08-22 23:41:03] [Rank 0] step:7041/10000 train_time:666320ms step_avg:94.63ms +[2025-08-22 23:41:03] [Rank 0] step:7041/10000 train_time:666320ms step_avg:94.63ms +[2025-08-22 23:41:05] [Rank 0] step:7061/10000 train_time:668299ms step_avg:94.65ms +[2025-08-22 23:41:05] [Rank 0] step:7061/10000 train_time:668299ms step_avg:94.65ms +[2025-08-22 23:41:07] [Rank 0] step:7081/10000 train_time:670278ms step_avg:94.66ms +[2025-08-22 23:41:07] [Rank 0] step:7081/10000 train_time:670278ms step_avg:94.66ms +[2025-08-22 23:41:09] [Rank 0] step:7101/10000 train_time:672265ms step_avg:94.67ms +[2025-08-22 23:41:09] [Rank 0] step:7101/10000 train_time:672265ms step_avg:94.67ms +[2025-08-22 23:41:11] [Rank 0] step:7121/10000 train_time:674245ms step_avg:94.68ms +[2025-08-22 23:41:11] [Rank 0] step:7121/10000 train_time:674245ms step_avg:94.68ms +[2025-08-22 23:41:13] [Rank 0] step:7141/10000 train_time:676227ms step_avg:94.70ms +[2025-08-22 23:41:13] [Rank 0] step:7141/10000 train_time:676227ms step_avg:94.70ms +[2025-08-22 23:41:15] [Rank 0] step:7161/10000 train_time:678212ms step_avg:94.71ms +[2025-08-22 23:41:15] [Rank 0] step:7161/10000 train_time:678212ms step_avg:94.71ms +[2025-08-22 23:41:17] [Rank 0] step:7181/10000 train_time:680196ms step_avg:94.72ms +[2025-08-22 23:41:17] [Rank 0] step:7181/10000 train_time:680196ms step_avg:94.72ms +[2025-08-22 23:41:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:41:19] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:41:32] [Rank 0] PRINT: step:7200/10000 val_loss:3.8452 svd_entropy: attn_qk:H=0.5111,top10E=0.63,eRank=61.5,q75/q25=83.19 attn_vo:H=0.7160,top10E=0.36,eRank=185.3,q75/q25=24.60 mlp_w1:H=0.8766,top10E=0.16,eRank=375.3,q75/q25=5.82 mlp_w2:H=0.8622,top10E=0.18,eRank=355.6,q75/q25=7.33 vo_prod:H=0.5071,top10E=0.64,eRank=65.6,q75/q25=256.36 train_time:682189ms step_avg:94.75ms +[2025-08-22 23:41:32] [Rank 0] PRINT: step:7200/10000 val_loss:3.8452 svd_entropy: attn_qk:H=0.5111,top10E=0.63,eRank=61.5,q75/q25=83.19 attn_vo:H=0.7160,top10E=0.36,eRank=185.3,q75/q25=24.60 mlp_w1:H=0.8766,top10E=0.16,eRank=375.3,q75/q25=5.82 mlp_w2:H=0.8622,top10E=0.18,eRank=355.6,q75/q25=7.33 vo_prod:H=0.5071,top10E=0.64,eRank=65.6,q75/q25=256.36 train_time:682189ms step_avg:94.75ms +[2025-08-22 23:41:32] [Rank 0] step:7201/10000 train_time:682210ms step_avg:94.74ms +[2025-08-22 23:41:32] [Rank 0] step:7201/10000 train_time:682210ms step_avg:94.74ms +[2025-08-22 23:41:34] [Rank 0] step:7221/10000 train_time:684180ms step_avg:94.75ms +[2025-08-22 23:41:34] [Rank 0] step:7221/10000 train_time:684180ms step_avg:94.75ms +[2025-08-22 23:41:36] [Rank 0] step:7241/10000 train_time:686155ms step_avg:94.76ms +[2025-08-22 23:41:36] [Rank 0] step:7241/10000 train_time:686155ms step_avg:94.76ms +[2025-08-22 23:41:38] [Rank 0] step:7261/10000 train_time:688129ms step_avg:94.77ms +[2025-08-22 23:41:38] [Rank 0] step:7261/10000 train_time:688129ms step_avg:94.77ms +[2025-08-22 23:41:40] [Rank 0] step:7281/10000 train_time:690117ms step_avg:94.78ms +[2025-08-22 23:41:40] [Rank 0] step:7281/10000 train_time:690117ms step_avg:94.78ms +[2025-08-22 23:41:42] [Rank 0] step:7301/10000 train_time:692097ms step_avg:94.79ms +[2025-08-22 23:41:42] [Rank 0] step:7301/10000 train_time:692097ms step_avg:94.79ms +[2025-08-22 23:41:44] [Rank 0] step:7321/10000 train_time:694091ms step_avg:94.81ms +[2025-08-22 23:41:44] [Rank 0] step:7321/10000 train_time:694091ms step_avg:94.81ms +[2025-08-22 23:41:46] [Rank 0] step:7341/10000 train_time:696075ms step_avg:94.82ms +[2025-08-22 23:41:46] [Rank 0] step:7341/10000 train_time:696075ms step_avg:94.82ms +[2025-08-22 23:41:48] [Rank 0] step:7361/10000 train_time:698069ms step_avg:94.83ms +[2025-08-22 23:41:48] [Rank 0] step:7361/10000 train_time:698069ms step_avg:94.83ms +[2025-08-22 23:41:50] [Rank 0] step:7381/10000 train_time:700116ms step_avg:94.85ms +[2025-08-22 23:41:50] [Rank 0] step:7381/10000 train_time:700116ms step_avg:94.85ms +[2025-08-22 23:41:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:41:52] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:42:06] [Rank 0] PRINT: step:7400/10000 val_loss:3.8268 svd_entropy: attn_qk:H=0.5107,top10E=0.63,eRank=61.7,q75/q25=83.93 attn_vo:H=0.7164,top10E=0.36,eRank=185.5,q75/q25=24.66 mlp_w1:H=0.8760,top10E=0.16,eRank=374.4,q75/q25=5.83 mlp_w2:H=0.8617,top10E=0.18,eRank=354.8,q75/q25=7.37 vo_prod:H=0.5093,top10E=0.64,eRank=65.8,q75/q25=256.31 train_time:702227ms step_avg:94.90ms +[2025-08-22 23:42:06] [Rank 0] PRINT: step:7400/10000 val_loss:3.8268 svd_entropy: attn_qk:H=0.5107,top10E=0.63,eRank=61.7,q75/q25=83.93 attn_vo:H=0.7164,top10E=0.36,eRank=185.5,q75/q25=24.66 mlp_w1:H=0.8760,top10E=0.16,eRank=374.4,q75/q25=5.83 mlp_w2:H=0.8617,top10E=0.18,eRank=354.8,q75/q25=7.37 vo_prod:H=0.5093,top10E=0.64,eRank=65.8,q75/q25=256.31 train_time:702227ms step_avg:94.90ms +[2025-08-22 23:42:06] [Rank 0] step:7401/10000 train_time:702247ms step_avg:94.89ms +[2025-08-22 23:42:06] [Rank 0] step:7401/10000 train_time:702247ms step_avg:94.89ms +[2025-08-22 23:42:08] [Rank 0] step:7421/10000 train_time:704223ms step_avg:94.90ms +[2025-08-22 23:42:08] [Rank 0] step:7421/10000 train_time:704223ms step_avg:94.90ms +[2025-08-22 23:42:10] [Rank 0] step:7441/10000 train_time:706201ms step_avg:94.91ms +[2025-08-22 23:42:10] [Rank 0] step:7441/10000 train_time:706201ms step_avg:94.91ms +[2025-08-22 23:42:12] [Rank 0] step:7461/10000 train_time:708184ms step_avg:94.92ms +[2025-08-22 23:42:12] [Rank 0] step:7461/10000 train_time:708184ms step_avg:94.92ms +[2025-08-22 23:42:14] [Rank 0] step:7481/10000 train_time:710172ms step_avg:94.93ms +[2025-08-22 23:42:14] [Rank 0] step:7481/10000 train_time:710172ms step_avg:94.93ms +[2025-08-22 23:42:16] [Rank 0] step:7501/10000 train_time:712160ms step_avg:94.94ms +[2025-08-22 23:42:16] [Rank 0] step:7501/10000 train_time:712160ms step_avg:94.94ms +[2025-08-22 23:42:18] [Rank 0] step:7521/10000 train_time:714149ms step_avg:94.95ms +[2025-08-22 23:42:18] [Rank 0] step:7521/10000 train_time:714149ms step_avg:94.95ms +[2025-08-22 23:42:20] [Rank 0] step:7541/10000 train_time:716145ms step_avg:94.97ms +[2025-08-22 23:42:20] [Rank 0] step:7541/10000 train_time:716145ms step_avg:94.97ms +[2025-08-22 23:42:22] [Rank 0] step:7561/10000 train_time:718126ms step_avg:94.98ms +[2025-08-22 23:42:22] [Rank 0] step:7561/10000 train_time:718126ms step_avg:94.98ms +[2025-08-22 23:42:24] [Rank 0] step:7581/10000 train_time:720124ms step_avg:94.99ms +[2025-08-22 23:42:24] [Rank 0] step:7581/10000 train_time:720124ms step_avg:94.99ms +[2025-08-22 23:42:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:42:26] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:42:40] [Rank 0] PRINT: step:7600/10000 val_loss:3.8201 svd_entropy: attn_qk:H=0.5104,top10E=0.63,eRank=61.8,q75/q25=84.09 attn_vo:H=0.7171,top10E=0.36,eRank=185.9,q75/q25=24.56 mlp_w1:H=0.8754,top10E=0.16,eRank=373.6,q75/q25=5.86 mlp_w2:H=0.8613,top10E=0.18,eRank=354.3,q75/q25=7.38 vo_prod:H=0.5109,top10E=0.64,eRank=66.2,q75/q25=255.16 train_time:722128ms step_avg:95.02ms +[2025-08-22 23:42:40] [Rank 0] PRINT: step:7600/10000 val_loss:3.8201 svd_entropy: attn_qk:H=0.5104,top10E=0.63,eRank=61.8,q75/q25=84.09 attn_vo:H=0.7171,top10E=0.36,eRank=185.9,q75/q25=24.56 mlp_w1:H=0.8754,top10E=0.16,eRank=373.6,q75/q25=5.86 mlp_w2:H=0.8613,top10E=0.18,eRank=354.3,q75/q25=7.38 vo_prod:H=0.5109,top10E=0.64,eRank=66.2,q75/q25=255.16 train_time:722128ms step_avg:95.02ms +[2025-08-22 23:42:40] [Rank 0] step:7601/10000 train_time:722148ms step_avg:95.01ms +[2025-08-22 23:42:40] [Rank 0] step:7601/10000 train_time:722148ms step_avg:95.01ms +[2025-08-22 23:42:42] [Rank 0] step:7621/10000 train_time:724151ms step_avg:95.02ms +[2025-08-22 23:42:42] [Rank 0] step:7621/10000 train_time:724151ms step_avg:95.02ms +[2025-08-22 23:42:44] [Rank 0] step:7641/10000 train_time:726137ms step_avg:95.03ms +[2025-08-22 23:42:44] [Rank 0] step:7641/10000 train_time:726137ms step_avg:95.03ms +[2025-08-22 23:42:46] [Rank 0] step:7661/10000 train_time:728129ms step_avg:95.04ms +[2025-08-22 23:42:46] [Rank 0] step:7661/10000 train_time:728129ms step_avg:95.04ms +[2025-08-22 23:42:48] [Rank 0] step:7681/10000 train_time:730115ms step_avg:95.05ms +[2025-08-22 23:42:48] [Rank 0] step:7681/10000 train_time:730115ms step_avg:95.05ms +[2025-08-22 23:42:50] [Rank 0] step:7701/10000 train_time:732104ms step_avg:95.07ms +[2025-08-22 23:42:50] [Rank 0] step:7701/10000 train_time:732104ms step_avg:95.07ms +[2025-08-22 23:42:52] [Rank 0] step:7721/10000 train_time:734109ms step_avg:95.08ms +[2025-08-22 23:42:52] [Rank 0] step:7721/10000 train_time:734109ms step_avg:95.08ms +[2025-08-22 23:42:54] [Rank 0] step:7741/10000 train_time:736159ms step_avg:95.10ms +[2025-08-22 23:42:54] [Rank 0] step:7741/10000 train_time:736159ms step_avg:95.10ms +[2025-08-22 23:42:56] [Rank 0] step:7761/10000 train_time:738221ms step_avg:95.12ms +[2025-08-22 23:42:56] [Rank 0] step:7761/10000 train_time:738221ms step_avg:95.12ms +[2025-08-22 23:42:58] [Rank 0] step:7781/10000 train_time:740213ms step_avg:95.13ms +[2025-08-22 23:42:58] [Rank 0] step:7781/10000 train_time:740213ms step_avg:95.13ms +[2025-08-22 23:43:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:43:00] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:43:13] [Rank 0] PRINT: step:7800/10000 val_loss:3.8077 svd_entropy: attn_qk:H=0.5104,top10E=0.63,eRank=62.0,q75/q25=84.30 attn_vo:H=0.7178,top10E=0.35,eRank=186.2,q75/q25=24.66 mlp_w1:H=0.8748,top10E=0.16,eRank=372.6,q75/q25=5.87 mlp_w2:H=0.8610,top10E=0.18,eRank=353.8,q75/q25=7.40 vo_prod:H=0.5126,top10E=0.64,eRank=66.5,q75/q25=252.87 train_time:742224ms step_avg:95.16ms +[2025-08-22 23:43:13] [Rank 0] PRINT: step:7800/10000 val_loss:3.8077 svd_entropy: attn_qk:H=0.5104,top10E=0.63,eRank=62.0,q75/q25=84.30 attn_vo:H=0.7178,top10E=0.35,eRank=186.2,q75/q25=24.66 mlp_w1:H=0.8748,top10E=0.16,eRank=372.6,q75/q25=5.87 mlp_w2:H=0.8610,top10E=0.18,eRank=353.8,q75/q25=7.40 vo_prod:H=0.5126,top10E=0.64,eRank=66.5,q75/q25=252.87 train_time:742224ms step_avg:95.16ms +[2025-08-22 23:43:13] [Rank 0] step:7801/10000 train_time:742244ms step_avg:95.15ms +[2025-08-22 23:43:13] [Rank 0] step:7801/10000 train_time:742244ms step_avg:95.15ms +[2025-08-22 23:43:15] [Rank 0] step:7821/10000 train_time:744235ms step_avg:95.16ms +[2025-08-22 23:43:15] [Rank 0] step:7821/10000 train_time:744235ms step_avg:95.16ms +[2025-08-22 23:43:17] [Rank 0] step:7841/10000 train_time:746214ms step_avg:95.17ms +[2025-08-22 23:43:17] [Rank 0] step:7841/10000 train_time:746214ms step_avg:95.17ms +[2025-08-22 23:43:19] [Rank 0] step:7861/10000 train_time:748205ms step_avg:95.18ms +[2025-08-22 23:43:19] [Rank 0] step:7861/10000 train_time:748205ms step_avg:95.18ms +[2025-08-22 23:43:21] [Rank 0] step:7881/10000 train_time:750197ms step_avg:95.19ms +[2025-08-22 23:43:21] [Rank 0] step:7881/10000 train_time:750197ms step_avg:95.19ms +[2025-08-22 23:43:23] [Rank 0] step:7901/10000 train_time:752181ms step_avg:95.20ms +[2025-08-22 23:43:23] [Rank 0] step:7901/10000 train_time:752181ms step_avg:95.20ms +[2025-08-22 23:43:25] [Rank 0] step:7921/10000 train_time:754171ms step_avg:95.21ms +[2025-08-22 23:43:25] [Rank 0] step:7921/10000 train_time:754171ms step_avg:95.21ms +[2025-08-22 23:43:27] [Rank 0] step:7941/10000 train_time:756169ms step_avg:95.22ms +[2025-08-22 23:43:27] [Rank 0] step:7941/10000 train_time:756169ms step_avg:95.22ms +[2025-08-22 23:43:29] [Rank 0] step:7961/10000 train_time:758159ms step_avg:95.23ms +[2025-08-22 23:43:29] [Rank 0] step:7961/10000 train_time:758159ms step_avg:95.23ms +[2025-08-22 23:43:31] [Rank 0] step:7981/10000 train_time:760140ms step_avg:95.24ms +[2025-08-22 23:43:31] [Rank 0] step:7981/10000 train_time:760140ms step_avg:95.24ms +[2025-08-22 23:43:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:43:33] [Rank 0] PRINT: Warning: val_tokens (1966080) not perfectly divisible by val_batch_size (262144). Some tokens might be missed. +[2025-08-22 23:43:47] [Rank 0] PRINT: step:8000/10000 val_loss:3.7909 svd_entropy: attn_qk:H=0.5099,top10E=0.63,eRank=62.1,q75/q25=84.77 attn_vo:H=0.7183,top10E=0.35,eRank=186.5,q75/q25=24.68 mlp_w1:H=0.8742,top10E=0.16,eRank=371.9,q75/q25=5.88 mlp_w2:H=0.8607,top10E=0.18,eRank=353.3,q75/q25=7.44 vo_prod:H=0.5130,top10E=0.64,eRank=66.5,q75/q25=253.14 train_time:762140ms step_avg:95.27ms +[2025-08-22 23:43:47] [Rank 0] PRINT: step:8000/10000 val_loss:3.7909 svd_entropy: attn_qk:H=0.5099,top10E=0.63,eRank=62.1,q75/q25=84.77 attn_vo:H=0.7183,top10E=0.35,eRank=186.5,q75/q25=24.68 mlp_w1:H=0.8742,top10E=0.16,eRank=371.9,q75/q25=5.88 mlp_w2:H=0.8607,top10E=0.18,eRank=353.3,q75/q25=7.44 vo_prod:H=0.5130,top10E=0.64,eRank=66.5,q75/q25=253.14 train_time:762140ms step_avg:95.27ms +[2025-08-22 23:43:47] [Rank 0] step:8001/10000 train_time:762161ms step_avg:95.26ms +[2025-08-22 23:43:47] [Rank 0] step:8001/10000 train_time:762161ms step_avg:95.26ms +[2025-08-22 23:43:49] [Rank 0] step:8021/10000 train_time:764140ms step_avg:95.27ms +[2025-08-22 23:43:49] [Rank 0] step:8021/10000 train_time:764140ms step_avg:95.27ms +[2025-08-22 23:43:51] [Rank 0] step:8041/10000 train_time:766133ms step_avg:95.28ms +[2025-08-22 23:43:51] [Rank 0] step:8041/10000 train_time:766133ms step_avg:95.28ms +[2025-08-22 23:43:53] [Rank 0] step:8061/10000 train_time:768120ms step_avg:95.29ms +[2025-08-22 23:43:53] [Rank 0] step:8061/10000 train_time:768120ms step_avg:95.29ms +[2025-08-22 23:43:55] [Rank 0] step:8081/10000 train_time:770253ms step_avg:95.32ms +[2025-08-22 23:43:55] [Rank 0] step:8081/10000 train_time:770253ms step_avg:95.32ms +[2025-08-22 23:43:57] [Rank 0] step:8101/10000 train_time:772141ms step_avg:95.31ms +[2025-08-22 23:43:57] [Rank 0] step:8101/10000 train_time:772141ms step_avg:95.31ms